text
stringlengths 0
1.05M
| meta
dict |
---|---|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import argparse
import logging
import socket
import threading
import time
import colorlog
import requests
from plexapi.server import PlexServer
handler = colorlog.StreamHandler()
handler.setFormatter(colorlog.ColoredFormatter(
'%(log_color)s%(levelname)s:%(name)s:%(message)s'))
logger = colorlog.getLogger('[PlexTrafficDriver]')
logger.setLevel(logging.INFO)
logger.addHandler(handler)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--verbose", help="increase output verbosity",
action="store_true")
parser.add_argument("--baseurl", help="baseurl", type=str, required=True)
parser.add_argument("--token", help="token", type=str, required=True)
parser.add_argument("--concurrency", type=int, required=True)
return parser.parse_args()
def play_movie(url, i, run_event):
logger.info('thread {} started: {}'.format(i, url))
mb = 1024 * 1024
while run_event.is_set():
total = 0
last = 0
try:
r = requests.get(url, stream=True, timeout=2)
for line in r.iter_lines(chunk_size=1024):
if not run_event.is_set():
break
total += len(line)
if int(total / mb) > last:
print("{}: {} mb".format(i, total / mb))
last = int(total / mb)
except socket.timeout as e:
pass
logger.info('exit thread {}'.format(i))
if __name__ == '__main__':
args = parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
existing_browsers = set()
plex = PlexServer(baseurl=args.baseurl, token=args.token) # Defaults to localhost:32400
run_event = threading.Event()
run_event.set()
threads = set()
for (i, video) in enumerate(plex.search('the')):
if i == args.concurrency:
break
url = video.getStreamURL(videoResolution='800x600')
t = threading.Thread(target=play_movie, args=(url, i, run_event))
threads.add(t)
t.start()
try:
while 1:
time.sleep(.1)
except KeyboardInterrupt:
logger.info("exit")
run_event.clear()
for t in threads:
t.join()
| {
"repo_name": "wisechengyi/PlexTrafficDriver",
"path": "src/python/main/fid_api.py",
"copies": "1",
"size": "2221",
"license": "mit",
"hash": 8018842877147503000,
"line_mean": 25.7590361446,
"line_max": 93,
"alpha_frac": 0.6492570914,
"autogenerated": false,
"ratio": 3.576489533011272,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9653715066167456,
"avg_score": 0.01440631164876305,
"num_lines": 83
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import pkgutil
from collections import defaultdict
from collections import namedtuple
from twitter.common.collections import OrderedSet
from pants.backend.project_info.tasks.ide_gen import IdeGen
from pants.base.build_environment import get_buildroot
from pants.build_graph.address import Address
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.exclude import Exclude
from pants.base.generator import Generator, TemplateData
from pants.util.dirutil import safe_open
from pants.backend.jvm.subsystems import scala_platform
from pants.backend.jvm.subsystems.jvm_platform import JvmPlatform
_TEMPLATE_BASEDIR = os.path.join('templates', 'ensime')
_DEFAULT_PROJECT_DIR = './.pants.d/ensime/project'
SourceBase = namedtuple("SourceBase", "id path")
class EnsimeGen(IdeGen):
"""Create an Ensime project from the given targets."""
@classmethod
def register_options(cls, register):
register('--excluded-deps', type=list, advanced=True,
help='Exclude these targets from dependency resolution during workspace generation')
super(EnsimeGen, cls).register_options(register)
def __init__(self, *args, **kwargs):
super(EnsimeGen, self).__init__(*args, **kwargs)
self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'ensime.mustache')
self.project_filename = os.path.join(self.cwd, '.ensime')
self.ensime_output_dir = os.path.join(self.gen_project_workdir, 'out')
def resolve_jars(self, targets):
excluded_targets = set()
for exclusion in self.get_options().excluded_deps:
for target in self.context.resolve(exclusion):
excluded_targets.add(target)
synthetic_target = self.context.add_new_target(
address=Address('', 'exlusions'),
target_type=JavaLibrary,
dependencies=list(),
sources=list(),
excludes=[]
)
filtered_targets = [target for target in targets if not target in excluded_targets] + [synthetic_target]
return super(EnsimeGen, self).resolve_jars(filtered_targets)
def generate_project(self, project):
def linked_folder_id(source_set):
return source_set.source_base.replace(os.path.sep, '.')
def base_path(source_set):
return os.path.join(source_set.root_dir, source_set.source_base, source_set.path)
def create_source_base_template(source_set):
source_base = base_path(source_set)
return SourceBase(
id=linked_folder_id(source_set),
path=source_base
)
source_sets = project.sources[:]
if project.has_python:
source_sets.extend(project.py_sources)
source_bases = frozenset(map(create_source_base_template, source_sets))
libs = []
def add_jarlibs(classpath_entries):
for classpath_entry in classpath_entries:
libs.append((classpath_entry.jar, classpath_entry.source_jar))
add_jarlibs(project.internal_jars)
add_jarlibs(project.external_jars)
scala_full_version = scala_platform.scala_build_info[self.context.options['scala-platform']['version']].full_version
scala = TemplateData(
language_level=scala_full_version,
compiler_classpath=project.scala_compiler_classpath
)
outdir = os.path.abspath(self.ensime_output_dir)
if not os.path.exists(outdir):
os.makedirs(outdir)
java_platform = JvmPlatform.global_instance().default_platform
jdk_home = JvmPlatform.preferred_jvm_distribution([java_platform], strict=True).home
configured_project = TemplateData(
name=self.project_name,
java_home=jdk_home,
scala=scala,
source_bases=source_bases,
has_tests=project.has_tests,
internal_jars=[cp_entry.jar for cp_entry in project.internal_jars],
internal_source_jars=[cp_entry.source_jar for cp_entry in project.internal_jars
if cp_entry.source_jar],
external_jars=[cp_entry.jar for cp_entry in project.external_jars],
external_javadoc_jars=[cp_entry.javadoc_jar for cp_entry in project.external_jars
if cp_entry.javadoc_jar],
external_source_jars=[cp_entry.source_jar for cp_entry in project.external_jars
if cp_entry.source_jar],
libs=libs,
outdir=os.path.relpath(outdir, get_buildroot()),
root_dir=get_buildroot(),
cache_dir=os.path.join(self.cwd, '.ensime_cache')
)
def apply_template(output_path, template_relpath, **template_data):
with safe_open(output_path, 'w') as output:
Generator(pkgutil.get_data(__name__, template_relpath), **template_data).write(output)
apply_template(self.project_filename, self.project_template, project=configured_project)
print('\nGenerated ensime project at {}{}'.format(self.gen_project_workdir, os.sep))
| {
"repo_name": "toddgardner/pants-plugins",
"path": "src/python/verst/pants/ensime/tasks/ensime.py",
"copies": "1",
"size": "4930",
"license": "mit",
"hash": -2135752528696676000,
"line_mean": 38.7580645161,
"line_max": 120,
"alpha_frac": 0.7028397566,
"autogenerated": false,
"ratio": 3.659985152190052,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9839495001140468,
"avg_score": 0.0046659815299167325,
"num_lines": 124
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.core.exceptions import ObjectDoesNotExist
import os
import logging
import time
from celery import Celery
from celery.signals import task_prerun
from django.conf import settings
logger = logging.getLogger(__name__)
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dbs.settings')
app = Celery('dbs')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
@task_prerun.connect
def task_sent_handler(**kwargs):
logger.info("kwargs = %s", kwargs)
# broker is quicker than relational DB so this usually gets executed sooner than we have
# celery task in DB, this suspend could solve it
time.sleep(1)
try:
task_id = kwargs['task_id']
except KeyError:
logger.error("missing task_id in kwargs")
else:
from dbs.models import Task
try:
task = Task.objects.get(celery_id=task_id)
except ObjectDoesNotExist:
logger.error("No such task '%s'", task_id)
else:
task.state = Task.STATUS_RUNNING
task.save()
| {
"repo_name": "DBuildService/dbs-server",
"path": "dbs/celery.py",
"copies": "1",
"size": "1468",
"license": "bsd-3-clause",
"hash": 7232017375379604000,
"line_mean": 27.2307692308,
"line_max": 125,
"alpha_frac": 0.6982288828,
"autogenerated": false,
"ratio": 3.83289817232376,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.503112705512376,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from cgi import escape
from django.http import HttpResponse
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from functools import partial
def lookup_attr(obj, name):
for n in name.split('__'):
obj = getattr(obj, n)
if callable(obj):
obj = obj()
return obj
class AdminExportMixin:
actions = ('export_as_csv', 'export_as_xls')
def get_list_export(self, request):
try:
return self.list_export
except AttributeError:
return self.list_display
def get_export_fields(self, request):
fields = []
for name in self.get_list_export(request):
try:
names = name.split('__')
field = self.model._meta.get_field(names[0])
for n in names[1:]:
field = field.related.model._meta.get_field(n)
fields.append({
'name': field.name,
'verbose_name': field.verbose_name,
'get_value': partial(lambda name, obj: lookup_attr(obj, name), name),
})
except:
if callable(name):
fields.append({
'name': name.__func__.__name__,
'verbose_name': getattr(name, 'short_description', name.__func__.__name__),
'get_value': partial(lambda name, obj: name(obj), name),
})
elif hasattr(self, name):
attr = getattr(self, name)
fields.append({
'name': name,
'verbose_name': getattr(attr, 'short_description', name),
'get_value': partial(lambda attr, obj: attr(obj), attr),
})
elif hasattr(self.model, name):
attr = getattr(self.model, name)
fields.append({
'name': name,
'verbose_name': getattr(attr, 'short_description', name),
'get_value': partial(lambda name, obj: lookup_attr(obj, name), name),
})
else:
raise Exception('Can not resolve name "{}"'.format(name))
return fields
def get_export_data(self, request, queryset):
fields = self.get_export_fields(request)
yield [f['verbose_name'] for f in fields]
for obj in queryset.all():
yield [f['get_value'](obj) for f in fields]
def export_as_csv(self, request, queryset):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(self.model._meta.model_name)
for row in self.get_export_data(request, queryset):
response.write(', '.join([
'"{}"'.format(force_text(field).replace('"', '\\"'))
for field in row
]) + '\n')
return response
export_as_csv.short_description = _('Export selected records as CSV')
def export_as_xls(self, request, queryset):
response = HttpResponse(content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename="{}.xls"'.format(self.model._meta.model_name)
response.write('<html><head><meta charset="UTF-8" /></head><body><table>')
for row in self.get_export_data(request, queryset):
response.write('<tr><td>')
response.write('</td><td>'.join(
escape(force_text(value)).encode('ascii', 'xmlcharrefreplace')
for value in row
))
response.write('</td></tr>')
response.write('</table></body></html>')
return response
export_as_xls.short_description = _('Export selected records as XLS')
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/export.py",
"copies": "1",
"size": "4075",
"license": "bsd-3-clause",
"hash": -6670251024757385000,
"line_mean": 41.0103092784,
"line_max": 125,
"alpha_frac": 0.527607362,
"autogenerated": false,
"ratio": 4.434167573449401,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5461774935449402,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from cms.menu_bases import CMSAttachMenu
from django.core.urlresolvers import reverse
from django.db.models.signals import post_delete, post_save
from django.utils.translation import ugettext_lazy as _
from menus.base import NavigationNode, Modifier
from menus.menu_pool import menu_pool
from .conf import settings
from .models import EventType
from .utils import url_with_back, current_url
class DomecekMenu(CMSAttachMenu):
name = _('Domecek')
def get_nodes(self, request):
"""
This method is used to build the menu tree.
"""
nodes = []
nodes.append(NavigationNode(
_('Log in'),
reverse('domecek:user_login'),
len(nodes),
attr={'visible_for_authenticated': False, 'add_url_back': True},
))
nodes.append(NavigationNode(
_('Create account'),
reverse('domecek:user_create'),
len(nodes),
attr={'visible_for_authenticated': False},
))
nodes.append(NavigationNode(
_('Reset password'),
reverse('domecek:password_reset'),
len(nodes),
attr={'visible_for_authenticated': False},
))
nodes.append(NavigationNode(
_('Summary'),
reverse('domecek:summary'),
len(nodes),
attr={'visible_for_anonymous': False},
))
nodes.append(NavigationNode(
_('Registrations'),
reverse('domecek:registrations'),
len(nodes),
attr={'visible_for_anonymous': False},
))
nodes.append(NavigationNode(
_('Participants'),
reverse('domecek:participant_list'),
len(nodes),
attr={'visible_for_anonymous': False},
))
nodes.append(NavigationNode(
_('Add participant'),
reverse('domecek:participant_create'),
len(nodes),
parent_id=len(nodes)-1,
attr={'visible_for_anonymous': False, 'add_url_back': True},
))
nodes.append(NavigationNode(
_('Add parent'),
reverse('domecek:parent_create'),
len(nodes),
parent_id=len(nodes)-2,
attr={'visible_for_anonymous': False, 'add_url_back': True},
))
nodes.append(NavigationNode(
_('Clubs'),
reverse('domecek:club_list'),
len(nodes),
))
nodes.append(NavigationNode(
_('My Clubs'),
reverse('domecek:club_list_mine'),
len(nodes),
parent_id=len(nodes)-1,
attr={'require_leader': True},
))
nodes.append(NavigationNode(
_('Alternating'),
reverse('domecek:club_alternating'),
len(nodes),
parent_id=len(nodes)-2,
attr={'require_leader': True},
))
nodes.append(NavigationNode(
_('Events'),
reverse('domecek:event_list'),
len(nodes),
))
events_id=len(nodes)-1
nodes.append(NavigationNode(
_('My Events'),
reverse('domecek:event_list_mine'),
len(nodes),
parent_id=events_id,
attr={'require_leader': True},
))
for event_type in EventType.objects.all():
nodes.append(NavigationNode(
event_type.name,
reverse('domecek:event_list', kwargs={'event_type': event_type.slug}),
len(nodes),
parent_id=events_id,
))
nodes.append(NavigationNode(
_('Leaders'),
reverse('domecek:leader_list'),
len(nodes),
))
nodes.append(NavigationNode(
_('Timesheets'),
reverse('domecek:timesheet_list'),
len(nodes),
attr={'require_leader': True},
))
nodes.append(NavigationNode(
_('Reports'),
reverse('domecek:reports'),
len(nodes),
attr={'require_staff': True},
))
nodes.append(NavigationNode(
_('Log out'),
reverse('domecek:user_logout'),
len(nodes),
attr={'visible_for_anonymous': False},
))
return nodes
menu_pool.register_menu(DomecekMenu)
class DomecekModifier(Modifier):
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
if post_cut or breadcrumb:
return nodes
final = []
for node in nodes:
if ((node.attr.get('require_leader', False) and not request.leader)
or (node.attr.get('require_staff', False) and not request.user.is_staff)):
if node.parent and node in node.parent.children:
node.parent.children.remove(node)
continue
else:
if node.attr.get('add_url_back', False):
node.url = url_with_back(node.url, current_url(request))
final.append(node)
return final
menu_pool.register_modifier(DomecekModifier)
def invalidate_menu_cache(sender, **kwargs):
menu_pool.clear()
post_save.connect(invalidate_menu_cache, sender=EventType)
post_delete.connect(invalidate_menu_cache, sender=EventType)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/menu.py",
"copies": "1",
"size": "5479",
"license": "bsd-3-clause",
"hash": -8917284332624275000,
"line_mean": 31.6130952381,
"line_max": 125,
"alpha_frac": 0.5422522358,
"autogenerated": false,
"ratio": 4.138217522658611,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.518046975845861,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from cms.models.fields import PageField
from django.db import models
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from djangocms_text_ckeditor.fields import HTMLField
from filer.fields.image import FilerImageField
from ..conf import settings
from .agegroup import AgeGroup
from .school import School
from .schoolyear import SchoolYear
from .fields import BirthNumberField, PostalCodeField
@python_2_unicode_compatible
class Leader(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_('user'),
related_name='domecek_leader')
description = HTMLField(_('description'), blank=True, default='')
photo = FilerImageField(verbose_name=_('photo'),
related_name='domecek_leaders', blank=True, null=True)
page = PageField(verbose_name=_('page'), blank=True, null=True)
school_years = models.ManyToManyField('domecek.SchoolYear', verbose_name=_('school years'),
related_name='leaders')
class Meta:
app_label = 'domecek'
ordering = ('user__first_name', 'user__last_name')
verbose_name = _('leader')
verbose_name_plural = _('leaders')
def __str__(self):
return self.full_name
@cached_property
def first_name(self):
return self.user.first_name
@cached_property
def last_name(self):
return self.user.last_name
@cached_property
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
@cached_property
def all_contacts(self):
return list(self.contacts.all())
@cached_property
def all_public_contacts(self):
return list(self.contacts.filter(public=True))
@cached_property
def all_clubs(self):
return list(self.clubs.all())
@cached_property
def all_events(self):
return list(self.events.all())
@cached_property
def all_school_years(self):
return list(self.school_years.all())
def get_alternate_leader_entries(self, school_year):
from .clubs import ClubJournalLeaderEntry
return ClubJournalLeaderEntry.objects.filter(
timesheet__leader = self,
club_entry__club__school_year = school_year,
).exclude(club_entry__club__in = self.clubs.all())
@python_2_unicode_compatible
class Contact(models.Model):
leader = models.ForeignKey(Leader, verbose_name=_('leader'), related_name='contacts')
contact_type = models.CharField(_('contact type'), max_length=30,
choices=settings.DOMECEK_CONTACT_TYPES)
contact = models.CharField(_('contact'), max_length=250)
order = models.IntegerField(_('order'), blank=True, default=0)
public = models.BooleanField(_('public'), default=False)
CONTACT_TYPES = dict(settings.DOMECEK_CONTACT_TYPES)
class Meta:
app_label = 'domecek'
ordering = ('order',)
verbose_name = _('contact')
verbose_name_plural = _('contacts')
def __str__(self):
return '{}, {}: {}'.format(self.leader.full_name, self.contact_type_name, self.contact)
@cached_property
def contact_type_name(self):
return self.CONTACT_TYPES[self.contact_type]
@python_2_unicode_compatible
class Parent(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'),
related_name='domecek_parents')
first_name = models.CharField(_('first name'), max_length=30)
last_name = models.CharField(_('last name'), max_length=30)
street = models.CharField(_('street'), max_length=150)
city = models.CharField(_('city'), max_length=150)
postal_code = PostalCodeField(_('postal code'))
email = models.EmailField(_('email address'), blank=True, default='')
phone = models.CharField(_('phone'), max_length=30)
class Meta:
app_label = 'domecek'
verbose_name = _('parent')
verbose_name_plural = _('parents')
def __str__(self):
return self.full_name
@cached_property
def address(self):
return '{}, {}, {}'.format(self.street, self.city, self.postal_code)
@cached_property
def contact(self):
if self.email and self.phone:
return '{}, {}'.format(self.phone, self.email)
else:
return self.email or self.phone or ''
@cached_property
def all_participants(self):
return list(self.participants.all())
@cached_property
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
@python_2_unicode_compatible
class Participant(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'),
related_name='domecek_participants')
parents = models.ManyToManyField(Parent, verbose_name=_('parents'), related_name='participants')
age_group = models.ForeignKey(AgeGroup, verbose_name=_('age group'), related_name='+')
first_name = models.CharField(_('first name'), max_length=30)
last_name = models.CharField(_('last name'), max_length=30)
birth_num = BirthNumberField(_('birth number'), unique=True)
street = models.CharField(_('street'), max_length=150)
city = models.CharField(_('city'), max_length=150)
postal_code = PostalCodeField(_('postal code'))
email = models.EmailField(_('email address'), blank=True, default='')
phone = models.CharField(_('phone'), max_length=30, blank=True, default='')
citizenship = models.CharField(_('citizenship'), max_length=50)
insurance = models.CharField(_('insurance'), max_length=50)
school = models.ForeignKey(School, verbose_name=_('school'), related_name='participants', blank=True, null=True)
school_other = models.CharField(_('other school'), max_length=150, blank=True, default='')
school_class = models.CharField(_('class'), max_length=30, blank=True, default='')
health = models.TextField(_('health'), blank=True, default='')
class Meta:
app_label = 'domecek'
verbose_name = _('participant')
verbose_name_plural = _('participants')
def __str__(self):
return _('{first_name} {last_name} ({birth_num})').format(
first_name = self.first_name,
last_name = self.last_name,
birth_num = self.birth_num,
)
@cached_property
def all_parents(self):
return list(self.parents.all())
@cached_property
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
@cached_property
def address(self):
return '{}, {}, {}'.format(self.street, self.city, self.postal_code)
@cached_property
def contact(self):
if self.email and self.phone:
return '{}, {}'.format(self.phone, self.email)
else:
return self.email or self.phone or ''
@cached_property
def school_name(self):
return self.school and smart_text(self.school) or self.school_other
@cached_property
def school_and_class(self):
if self.school_name and self.school_class:
return '{}, {}'.format(self.school_name, self.school_class)
else:
return self.school_name or self.school_class or ''
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/roles.py",
"copies": "1",
"size": "7934",
"license": "bsd-3-clause",
"hash": -7866174576632153000,
"line_mean": 36.780952381,
"line_max": 125,
"alpha_frac": 0.6082682128,
"autogenerated": false,
"ratio": 3.9064500246184144,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9956561739848843,
"avg_score": 0.011631299513914277,
"num_lines": 210
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from cms.models import CMSPlugin
from cms.models.fields import PageField
from collections import namedtuple
from datetime import date, datetime, time, timedelta
from django.core.urlresolvers import reverse_lazy as reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.encoding import smart_text, force_text
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from djangocms_text_ckeditor.fields import HTMLField
from filer.fields.file import FilerFileField
from filer.fields.image import FilerImageField
from json import loads
from ..conf import settings
from ..utils import get_mailer, currency, comma_separated
from .fields import DAY_OF_WEEK, DayOfWeekField
from .fields import ColorField, BirthNumberField, PostalCodeField, PriceField
from .question import Question
from .agegroup import AgeGroup
from .place import Place
from .roles import Leader, Participant
from .school import School
from .schoolyear import SchoolYear
from .startend import StartEndMixin
from .utils import PaymentStatus
@python_2_unicode_compatible
class ClubGroup(models.Model):
name = models.CharField(_('name'), max_length=150)
plural = models.CharField(_('plural'), max_length=150)
color = ColorField(_('color'))
order = models.IntegerField(_('order'), blank=True, default=0)
class Meta:
app_label = 'domecek'
ordering = ('order',)
verbose_name = _('club group')
verbose_name_plural = _('club groups')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Club(models.Model):
school_year = models.ForeignKey(SchoolYear, verbose_name=_('school year'), related_name='clubs')
name = models.CharField(_('name'), max_length=150)
description = HTMLField(_('description'), blank=True, default='')
groups = models.ManyToManyField(ClubGroup, verbose_name=_('groups'), related_name='clubs')
place = models.ForeignKey(Place, verbose_name=_('place'), related_name='clubs', blank=True, null=True)
age_groups = models.ManyToManyField(AgeGroup, verbose_name=_('age groups'), related_name='clubs', blank=True)
leaders = models.ManyToManyField(Leader, verbose_name=_('leaders'), related_name='clubs', blank=True)
price = PriceField(_('price'))
unit = models.CharField(_('unit'), max_length=150)
public = models.BooleanField(_('public'), default=False)
reg_active = models.BooleanField(_('active registration'), default=False)
photo = FilerImageField(verbose_name=_('photo'), blank=True, null=True)
page = PageField(verbose_name=_('page'), blank=True, null=True)
min_count = models.IntegerField(_('minimal count'), blank=True, null=True)
max_count = models.IntegerField(_('maximal count'), blank=True, null=True)
risks = HTMLField(_('risks'), blank=True)
plan = HTMLField(_('plan'), blank=True)
evaluation = HTMLField(_('evaluation'), blank=True)
note = models.CharField(_('note'), max_length=300, blank=True, default='')
questions = models.ManyToManyField(Question, verbose_name=_('additional questions'),
blank=True,
help_text=_('Add additional questions to be asked in the registration form.'))
class Meta:
app_label = 'domecek'
ordering = ('name',)
verbose_name = _('club')
verbose_name_plural = _('clubs')
def __str__(self):
return '{} {}'.format(self.school_year, self.name)
@cached_property
def all_groups(self):
return list(self.groups.all())
@cached_property
def all_age_groups(self):
return list(self.age_groups.all())
@cached_property
def all_leaders(self):
return list(self.leaders.all())
@cached_property
def all_times(self):
return list(self.times.all())
@cached_property
def all_periods(self):
return list(self.periods.all())
@cached_property
def all_questions(self):
return list(self.questions.all())
@cached_property
def all_attachments(self):
return list(self.attachments.all())
@cached_property
def all_registrations(self):
return list(self.registrations.all())
@cached_property
def all_journal_entries(self):
return list(self.journal_entries.all())
def get_current_period(self):
return self.periods.filter(end__gte=date.today()).first() or self.periods.last()
def get_absolute_url(self):
return reverse('domecek:club_detail', args=(self.id,))
def get_public_registration_url(self):
return reverse('domecek:club_registration_public', args=(self.id,))
def get_registration_url(self, participant):
return reverse('domecek:club_registration_form', kwargs={'club': self.id, 'participant': participant.id})
def get_edit_url(self):
return reverse('admin:domecek_club_change', args=(self.id,))
def get_groups_list(self):
return comma_separated(self.all_groups)
get_groups_list.short_description = _('groups list')
def get_leaders_list(self):
return comma_separated(self.all_leaders)
get_leaders_list.short_description = _('leaders list')
def get_times_list(self):
return comma_separated(self.all_times)
get_times_list.short_description = _('times')
def get_periods_list(self):
return '<br/>'.join(smart_text(p) for p in self.all_periods)
get_periods_list.short_description = _('periods')
get_periods_list.allow_tags = True
def get_next_time(self, now = None):
try:
return min(t.get_next_time(now) for t in self.all_times)
except ValueError:
return None
@python_2_unicode_compatible
class ClubTime(StartEndMixin, models.Model):
club = models.ForeignKey(Club, verbose_name=_('club'), related_name='times')
day_of_week = DayOfWeekField(_('day of week'))
start = models.TimeField(_('start time'), blank=True, null=True)
end = models.TimeField(_('end time'), blank=True, null=True)
class Meta:
app_label = 'domecek'
ordering = ('day_of_week', 'start')
verbose_name = _('time')
verbose_name_plural = _('times')
def __str__(self):
if self.start is not None and self.end is not None:
return _('{day}, {start:%H:%M} - {end:%H:%M}').format(
day = self.day,
start = self.start,
end = self.end,
)
elif self.start is not None:
return _('{day}, {start:%H:%M}').format(
day = self.day,
start = self.start,
)
else:
return force_text(self.day)
@cached_property
def day(self):
return DAY_OF_WEEK[self.day_of_week]
Time = namedtuple('Time', ('date', 'start', 'end'))
def get_next_time(self, now = None):
now = now or datetime.now()
daydelta = (self.day_of_week - now.isoweekday()) % 7
if daydelta == 0 and (isinstance(now, date) or self.start is None or self.start <= now.time()):
daydelta = 7
if isinstance(now, datetime):
next_date = now.date() + timedelta(daydelta)
else:
next_date = now + timedelta(daydelta)
return self.Time(
date = next_date,
start = self.start,
end = self.end,
)
@python_2_unicode_compatible
class ClubPeriod(StartEndMixin, models.Model):
club = models.ForeignKey(Club, verbose_name=_('club'), related_name='periods')
name = models.CharField(_('name'), max_length=150)
start = models.DateField(_('start date'))
end = models.DateField(_('end date'))
class Meta:
app_label = 'domecek'
ordering = ('club__name', 'start')
verbose_name = _('period')
verbose_name_plural = _('periods')
def __str__(self):
return _('{name}, {start:%m/%d %y} - {end:%m/%d %y}').format(
name = self.name,
start = self.start,
end = self.end,
)
@cached_property
def journal_entries(self):
return self.club.journal_entries.filter(date__gte=self.start, date__lte=self.end)
@cached_property
def all_journal_entries(self):
return list(self.journal_entries.all())
@cached_property
def all_registrations(self):
return list(self.club.registrations.filter(created__lt=self.end))
@cached_property
def all_alternates(self):
alternates = set()
for entry in self.all_journal_entries:
for alternate in entry.all_alternates:
alternates.add(alternate)
return list(alternates)
PresenceRecord = namedtuple('PresenceRecord', ('name', 'presences'))
def get_participant_presences(self):
return [
self.PresenceRecord(
reg.participant,
[
reg.participant in entry.all_participants
for entry in self.all_journal_entries
]
) for reg in self.all_registrations
]
def get_leader_presences(self):
return [
self.PresenceRecord(
leader,
[
entry.all_leader_entries_by_leader.get(leader, None)
for entry in self.all_journal_entries
]
) for leader in self.club.all_leaders
]
def get_alternate_presences(self):
return [
self.PresenceRecord(
alternate,
[
entry.all_leader_entries_by_leader.get(alternate, None)
for entry in self.all_journal_entries
]
) for alternate in self.all_alternates
]
@python_2_unicode_compatible
class ClubAttachment(models.Model):
club = models.ForeignKey(Club, verbose_name=_('club'), related_name='attachments')
file = FilerFileField()
order = models.IntegerField(_('order'), blank=True, default=0)
class Meta:
app_label = 'domecek'
ordering = ('order',)
verbose_name = _('attachment')
verbose_name_plural = _('attachments')
def __str__(self):
return force_text(self.file)
@python_2_unicode_compatible
class ClubRegistration(models.Model):
slug = models.SlugField(editable=False)
created = models.DateTimeField(_('time of registration'), editable=False, auto_now_add=True)
club = models.ForeignKey(Club, verbose_name=_('club'), related_name='registrations')
participant = models.ForeignKey(Participant, verbose_name=_('participant'), related_name='club_registrations')
age_group = models.ForeignKey(AgeGroup, verbose_name=_('age group'), related_name='+')
citizenship = models.CharField(_('citizenship'), max_length=50)
insurance = models.CharField(_('insurance'), max_length=50)
school = models.ForeignKey(School, verbose_name=_('school'), related_name='club_registrations', blank=True, null=True)
school_other = models.CharField(_('other school'), max_length=150, blank=True, default='')
school_class = models.CharField(_('class'), max_length=30, blank=True, default='')
health = models.TextField(_('health'), blank=True, default='')
answers = models.TextField(_('additional answers'), blank=True, default='{}', editable=False)
cancel_request = models.BooleanField(_('cancel request'), default=False)
canceled = models.DateField(_('date of cancellation'), blank=True, null=True)
discount = PriceField(_('discount'), blank=True, default=0)
explanation = models.TextField(_('discount explanation'), blank=True, default='')
class Meta:
app_label = 'domecek'
verbose_name = _('club registration')
verbose_name_plural = _('club registrations')
unique_together = (('club', 'participant'),)
def __str__(self):
return _('{participant} - {subject}').format(
participant = self.participant,
subject = self.club,
)
def get_answers(self):
return loads(self.answers)
@property
def subject(self):
return self.club
@cached_property
def all_periods(self):
if self.canceled:
return list(self.club.periods.filter(end__gt=self.created, start__lt=self.canceled))
else:
return list(self.club.periods.filter(end__gt=self.created))
@cached_property
def all_payments(self):
return list(self.payments.all())
@cached_property
def school_name(self):
return self.school and smart_text(self.school) or self.school_other
@cached_property
def school_and_class(self):
if self.school_name and self.school_class:
return '{}, {}'.format(self.school_name, self.school_class)
else:
return self.school_name or self.school_class or ''
@cached_property
def all_recipients(self):
recipients = set()
if self.participant.user.email:
recipients.add(self.participant.user.email)
for parent in self.participant.all_parents:
if parent.email:
recipients.add(parent.email)
return recipients
def get_payments(self, d=None):
if d:
return filter(lambda p: p.date <= d, self.all_payments)
else:
return self.all_payments
def get_paid(self, d=None):
return sum(p.amount for p in self.get_payments(d))
@cached_property
def period_payment_statuses(self):
return self.get_payment_statuses()
PeriodPaymentStatus = namedtuple('PeriodPaymentStatus', ('period', 'status'))
def get_period_payment_statuses(self, d=None):
price = self.club.price
paid = self.get_paid(d)
discount = self.discount
for period in self.all_periods:
yield self.PeriodPaymentStatus(
period = period,
status = PaymentStatus(
price = price,
discount = discount,
paid = min(price - discount, paid),
),
)
paid = max(paid - (price - discount), 0)
discount = 0 # discount if one for all periods
@cached_property
def payment_statuses(self):
return self.get_payment_statuses()
PaymentStatuses = namedtuple('PaymentStatuses', ('partial', 'total'))
def get_payment_statuses(self, d=None):
if d is None:
d = date.today()
price = self.club.price
partial_price = price * len(filter(lambda p: p.start <= d, self.all_periods))
total_price = price * len(self.all_periods)
paid = self.get_paid(d)
return self.PaymentStatuses(
partial = PaymentStatus(price = partial_price, discount = self.discount, paid = paid),
total = PaymentStatus(price = total_price, discount = self.discount, paid = paid),
)
def get_absolute_url(self):
return reverse('domecek:club_registration_pdf', kwargs={'slug':self.slug})
def send_mail(self):
get_mailer('ClubRegistration')().send_mail(self)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(smart_text(self))
if self.canceled:
self.cancel_request = False
super(ClubRegistration, self).save(*args, **kwargs)
@python_2_unicode_compatible
class ClubPayment(models.Model):
registration = models.ForeignKey(ClubRegistration, verbose_name=_('registration'), related_name='payments', on_delete=models.PROTECT)
date = models.DateField(_('payment date'))
amount = PriceField(_('amount'))
class Meta:
app_label = 'domecek'
verbose_name = _('club payment')
verbose_name_plural = _('club payments')
def __str__(self):
return '{registration}, {amount}'.format(
registration = self.registration,
amount = currency(self.amount),
)
def get_default_agenda():
return '<p>{}</p>'.format(_('instruction on OSH'))
@python_2_unicode_compatible
class ClubJournalEntry(StartEndMixin, models.Model):
club = models.ForeignKey(Club, verbose_name=_('club'), related_name='journal_entries', editable=False)
date = models.DateField(_('date'))
start = models.TimeField(_('start time'), blank=True, null=True,
help_text=_('Leave empty, if the club does not take place'))
end = models.TimeField(_('end time'), blank=True, null=True,
help_text=_('Leave empty, if the club does not take place'))
agenda = HTMLField(_('session agenda'), default=get_default_agenda)
participants = models.ManyToManyField(Participant, verbose_name=_('participants'),
blank=True, related_name='journal_entries')
class Meta:
app_label = 'domecek'
ordering = ('date', 'start', 'end')
verbose_name = _('journal entry')
verbose_name_plural = _('journal entries')
def __str__(self):
return '{club}, {date}'.format(
club = self.club.name,
date = self.date,
)
@cached_property
def datetime_start(self):
try:
return datetime.combine(self.date, self.start)
except:
return None
@cached_property
def datetime_end(self):
try:
return datetime.combine(self.date, self.end)
except:
return None
@cached_property
def duration(self):
try:
return self.datetime_end - self.datetime_start
except:
return timedelta()
duration.short_description = _('duration')
@cached_property
def all_participants(self):
return list(self.participants.all())
@cached_property
def all_leader_entries(self):
return list(self.leader_entries.all())
@cached_property
def all_leader_entries_by_leader(self):
return dict((e.timesheet.leader, e) for e in self.all_leader_entries)
@cached_property
def all_leaders(self):
return list(
le.timesheet.leader for le in self.all_leader_entries
if le.timesheet.leader in self.club.all_leaders
)
@cached_property
def all_alternates(self):
return list(
le.timesheet.leader for le in self.all_leader_entries
if le.timesheet.leader not in self.club.all_leaders
)
@property
def timesheets(self):
from .timesheets import Timesheet
return Timesheet.objects.by_date(self.start).filter(
leader__in = self.all_leaders + self.all_alternates,
)
def save(self, *args, **kwargs):
if self.end is None:
self.end = self.start
super(ClubJournalEntry, self).save(*args, **kwargs)
def get_edit_url(self):
return reverse('domecek:clubjournalentry_update', args=(self.id,))
def get_delete_url(self):
return reverse('domecek:clubjournalentry_delete', args=(self.id,))
@python_2_unicode_compatible
class ClubJournalLeaderEntry(StartEndMixin, models.Model):
club_entry = models.ForeignKey(ClubJournalEntry, verbose_name=_('club journal entry'), related_name='leader_entries', editable=False)
timesheet = models.ForeignKey('domecek.Timesheet', verbose_name=_('timesheet'), related_name='club_entries', editable=False)
start = models.TimeField(_('start time'))
end = models.TimeField(_('end time'))
class Meta:
app_label = 'domecek'
verbose_name = _('club journal leader entry')
verbose_name_plural = _('club journal leader entries')
unique_together = (('club_entry', 'timesheet'),)
def __str__(self):
return '{}'.format(self.duration)
@cached_property
def date(self):
return self.club_entry.date
date.short_description = _('date')
date.admin_order_field = 'club_entry__date'
@cached_property
def club(self):
return self.club_entry.club
club.short_description = _('club')
@cached_property
def datetime_start(self):
return datetime.combine(self.date, self.start)
@cached_property
def datetime_end(self):
return datetime.combine(self.date, self.end)
@cached_property
def duration(self):
return self.datetime_end - self.datetime_start
duration.short_description = _('duration')
@property
def group(self):
return self.club
def get_edit_url(self):
return reverse('domecek:clubjournalleaderentry_update', args=(self.id,))
def get_delete_url(self):
return reverse('domecek:clubjournalleaderentry_delete', args=(self.id,))
class ClubListPlugin(CMSPlugin):
school_year = models.ForeignKey(SchoolYear, verbose_name=_('school year'),
blank=True, null=True)
age_groups = models.ManyToManyField(AgeGroup, verbose_name=_('age groups'),
blank=True,
help_text=_('Keep empty to skip filtering by age groups.'))
groups = models.ManyToManyField(ClubGroup, verbose_name=_('club groups'),
blank=True,
help_text=_('Keep empty to skip filtering by groups.'))
leaders = models.ManyToManyField(Leader, verbose_name=_('leaders'),
blank=True,
help_text=_('Keep empty to skip filtering by leaders.'))
template = models.CharField(_('template'), max_length=100,
choices=settings.DOMECEK_CLUBLIST_TEMPLATES,
default=settings.DOMECEK_CLUBLIST_TEMPLATES[0][0],
help_text=_('The template used to render plugin.'))
class Meta:
app_label = 'domecek'
def copy_relations(self, oldinstance):
self.groups = oldinstance.groups.all()
self.age_groups = oldinstance.age_groups.all()
self.leaders = oldinstance.leaders.all()
class FilteredClubListPlugin(CMSPlugin):
school_year = models.ForeignKey(SchoolYear, verbose_name=_('school year'),
blank=True, null=True)
class Meta:
app_label = 'domecek'
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/clubs.py",
"copies": "1",
"size": "23041",
"license": "bsd-3-clause",
"hash": 4416411186328813600,
"line_mean": 35.0015625,
"line_max": 140,
"alpha_frac": 0.6059198819,
"autogenerated": false,
"ratio": 3.9555364806866953,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5061456362586696,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from cms.plugin_base import CMSPluginBase
from collections import namedtuple
from django.utils.translation import ugettext as _
from ..forms.clubs import ClubFilterForm
from ..forms.events import EventFilterForm
from ..models import (
ClubListPlugin, FilteredClubListPlugin, ClubGroup,
EventListPlugin, FilteredEventListPlugin, EventGroup,
)
Group = namedtuple('Group', ('group', 'objects'))
class ClubListPlugin(CMSPluginBase):
cache = False
model = ClubListPlugin
name = _('Club list')
text_enabled = True
filter_horizontal = ('age_groups', 'groups', 'leaders')
def render(self, context, instance, placeholder):
school_year = instance.school_year or context['request'].school_year
clubs = school_year.clubs.filter(public=True).distinct()
if instance.age_groups.count():
clubs = clubs.filter(age_groups__in = instance.age_groups.all())
if instance.leaders.count():
clubs = clubs.filter(leaders__in = instance.leaders.all())
if instance.groups.count():
clubs = clubs.filter(groups__in = instance.groups.all())
groups = instance.groups.all()
else:
groups = ClubGroup.objects.all()
context.update({
'school_year': school_year,
'clubs': clubs,
'groups': [
Group(group = group, objects = clubs.filter(groups = group))
for group in groups
],
'instance': instance,
'placeholder': placeholder,
})
return context
def get_render_template(self, context, instance, placeholder):
return 'domecek/club_list/%s.html' % instance.template
class FilteredClubListPlugin(CMSPluginBase):
cache = False
model = FilteredClubListPlugin
name = _('Club list with filter form')
render_template = 'domecek/filtered_club_list.html'
def render(self, context, instance, placeholder):
school_year = instance.school_year or context['request'].school_year
clubs = school_year.clubs.filter(public=True).distinct()
form = ClubFilterForm(context['request'], data=context['request'].GET)
if form.is_valid():
clubs = form.filter_queryset(context['request'], clubs)
context.update({
'school_year': school_year,
'form': form,
'clubs': clubs,
'instance': instance,
'placeholder': placeholder,
})
return context
class EventListPlugin(CMSPluginBase):
cache = False
model = EventListPlugin
name = _('Event list')
text_enabled = True
filter_horizontal = ('age_groups', 'groups', 'leaders')
def render(self, context, instance, placeholder):
school_year = instance.school_year or context['request'].school_year
events = school_year.events.filter(event_type=instance.event_type, public=True).distinct()
if instance.age_groups.count():
events = events.filter(age_groups__in = instance.age_groups.all())
if instance.leaders.count():
events = events.filter(leaders__in = instance.leaders.all())
if instance.groups.count():
events = events.filter(groups__in = instance.groups.all())
groups = instance.groups.all()
else:
groups = EventGroup.objects.all()
context.update({
'school_year': school_year,
'events': events,
'groups': [
Group(group = group, objects = events.filter(groups = group))
for group in groups
],
'instance': instance,
'placeholder': placeholder,
})
return context
def get_render_template(self, context, instance, placeholder):
return 'domecek/event_list/%s.html' % instance.template
class FilteredEventListPlugin(CMSPluginBase):
cache = False
model = FilteredEventListPlugin
name = _('Event list with filter form')
render_template = 'domecek/filtered_event_list.html'
def render(self, context, instance, placeholder):
school_year = instance.school_year or context['request'].school_year
events = school_year.events.filter(event_type=instance.event_type, public=True).distinct()
form = EventFilterForm(context['request'], instance.event_type, data=context['request'].GET)
if form.is_valid():
events = form.filter_queryset(context['request'], events)
context.update({
'school_year': school_year,
'form': form,
'events': events,
'instance': instance,
'placeholder': placeholder,
})
return context
| {
"repo_name": "misli/django-domecek",
"path": "domecek/cms_plugins/default_plugins.py",
"copies": "1",
"size": "4975",
"license": "bsd-3-clause",
"hash": 6431998997566060000,
"line_mean": 33.7902097902,
"line_max": 125,
"alpha_frac": 0.6088442211,
"autogenerated": false,
"ratio": 4.098023064250412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01083975510158007,
"num_lines": 143
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from collections import namedtuple
from decimal import Decimal
from django.utils.encoding import python_2_unicode_compatible
from itertools import groupby
from .settings import PRICE_TYPE
from .utils import quantize, currency
@python_2_unicode_compatible
class BasePrice(object):
def __lt__(self, other):
try:
if PRICE_TYPE == 'gross':
return self.gross < other.gross
else:
return self.net < other.net
except:
return NotImplemented
def __le__(self, other):
return self < other or self == other
def __eq__(self, other):
try:
if PRICE_TYPE == 'gross':
return self.gross == other.gross
else:
return self.net == other.net
except:
return False
def __ne__(self, other):
return not self == other
def __radd__(self, other):
return self + other
def __rmul__(self, other):
return self * other
@property
def tax(self):
return self.gross - self.net
def __str__(self):
if PRICE_TYPE == 'gross':
return currency(self.gross)
else:
return currency(self.net)
class Price(BasePrice, namedtuple('Price', 'net gross rate')):
def __new__(cls, price, rate=0):
price = Decimal(price)
rate = Decimal(rate)
if rate:
if PRICE_TYPE == 'gross':
gross = quantize(price)
net = quantize(gross / rate)
else:
net = quantize(price)
gross = quantize(net * rate)
else:
gross = net = quantize(price)
return super(Price, cls).__new__(cls, net, gross, rate)
def __mul__(self, other):
try:
other = Decimal(other)
if PRICE_TYPE == 'gross':
return Price(self.gross * other, self.rate)
else:
return Price(self.net * other, self.rate)
except TypeError:
return NotImplemented
def __add__(self, other):
if isinstance(other, Price):
return ComplexPrice((self, other))
elif isinstance(other, ComplexPrice):
return ComplexPrice((self,) + other.prices)
else:
try:
return self + Price(other, self.rate)
except TypeError:
return NotImplemented
class ComplexPrice(BasePrice, namedtuple('ComplexPrice', 'net gross prices')):
def __new__(cls, prices):
prices = tuple(prices)
net = Decimal(0)
gross = Decimal(0)
for price in prices:
net += price.net
gross += price.gross
return super(ComplexPrice, cls).__new__(cls, net, gross, prices)
def __add__(self, other):
if isinstance(other, Price):
return ComplexPrice(self.prices + (other,))
elif isinstance(other, ComplexPrice):
return ComplexPrice(self.prices + other.prices)
else:
return NotImplemented
@property
def rates(self):
return map(
lambda (rate, prices): (rate, ComplexPrice(prices)),
groupby(self.prices, lambda price: price.rate),
)
| {
"repo_name": "misli/cmsplugin-shop",
"path": "cmsplugin_shop/price.py",
"copies": "1",
"size": "3431",
"license": "bsd-3-clause",
"hash": -7996568578702619000,
"line_mean": 27.1229508197,
"line_max": 125,
"alpha_frac": 0.5476537453,
"autogenerated": false,
"ratio": 4.267412935323383,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.012383144465937058,
"num_lines": 122
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from collections import namedtuple
from django.core.urlresolvers import reverse_lazy as reverse
from django.db.models import Sum
from django.template.response import TemplateResponse
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from ...conf import settings
from ...forms.reports.clubs import ClubPaymentsForm, ClubPaymentsStatusForm
from ...models import ClubPayment
from ...models.utils import PaymentStatus
from ..generic import FormView
class ReportClubPaymentsView(FormView):
form_class = ClubPaymentsForm
template_name = 'domecek/reports/club_payments.html'
title = _('Club payments')
submit_label = _('Show')
back_url = reverse('domecek:reports')
def form_valid(self, form):
context = form.cleaned_data
context['form'] = form
context['payments'] = ClubPayment.objects.filter(
date__gte=context['date_start'],
date__lte=context['date_end'],
)
context['sum'] = context['payments'].aggregate(Sum('amount'))['amount__sum']
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class ReportClubPaymentsStatusView(FormView):
form_class = ClubPaymentsStatusForm
template_name = 'domecek/reports/club_payments_status.html'
title = _('Club payments status')
submit_label = _('Show')
back_url = reverse('domecek:reports')
ClubPaymentsStatusSums = namedtuple('ClubPaymentsStatusSums', ('registrations', 'partial', 'total'))
def form_valid(self, form):
context = form.cleaned_data
context['form'] = form
context['reports'] = [
self.Report(club, context['date'])
for club in self.request.school_year.clubs.all()
]
context['sum'] = self.ClubPaymentsStatusSums(
registrations = sum(len(r.registrations) for r in context['reports']),
partial = sum(r.partial for r in context['reports']),
total = sum(r.total for r in context['reports']),
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class Report:
def __init__(self, club, d):
self.club = club
self.date = d
@cached_property
def periods(self):
return list(self.club.periods.filter(start__lte=self.date))
@cached_property
def registrations(self):
return list(self.club.registrations.filter(
created__lte=self.date,
))
RegPaymentStatuses = namedtuple('RegPaymentStatuses', ('registration', 'statuses'))
@cached_property
def registration_statuses(self):
return [
self.RegPaymentStatuses(
registration = registration,
statuses = registration.get_payment_statuses(self.date),
)
for registration in self.registrations
]
@cached_property
def partial(self):
return sum(rs.statuses.partial for rs in self.registration_statuses)
@cached_property
def total(self):
return sum(rs.statuses.total for rs in self.registration_statuses)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/reports/clubs.py",
"copies": "1",
"size": "3511",
"license": "bsd-3-clause",
"hash": 890851834145140000,
"line_mean": 36.3510638298,
"line_max": 125,
"alpha_frac": 0.6271717459,
"autogenerated": false,
"ratio": 4.204790419161677,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5331962165061677,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from collections import namedtuple
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..utils import currency
class PaymentStatus(namedtuple('_PaymentsStatus', ('price', 'discount', 'paid'))):
@property
def balance(self):
return self.paid - (self.price - self.discount)
@property
def color(self):
if self.balance == 0:
return settings.DOMECEK_COLOR_PAID
elif self.balance < 0:
return settings.DOMECEK_COLOR_NOTPAID
else:
return settings.DOMECEK_COLOR_OVERPAID
@property
def title(self):
if self.balance == 0:
return _('paid')
elif self.balance < 0:
return _('{} let to pay').format(currency(-self.balance))
else:
return _('{} overpaid').format(currency(self.balance))
def __repr__(self):
return 'PaymentStatus(price={price}, discount={discount}, paid={paid}, balance={balance})'.format(
price = self.price,
discount = self.discount,
paid = self.paid,
balance = self.balance,
)
def __add__(self, other):
if other == 0:
return self
return PaymentStatus(
price = self.price + other.price,
discount = self.discount + other.discount,
paid = self.paid + other.paid,
)
__radd__ = __add__
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/utils.py",
"copies": "1",
"size": "1600",
"license": "bsd-3-clause",
"hash": -2170101302592127500,
"line_mean": 29.7692307692,
"line_max": 125,
"alpha_frac": 0.573125,
"autogenerated": false,
"ratio": 4.244031830238727,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5317156830238727,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from datetime import date, datetime, timedelta
from django import forms
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.utils import formats, timezone
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _, ungettext_lazy as ungettext
from json import dumps
from ..models import Leader, Place, AgeGroup, Timesheet, TimesheetPeriod
from ..models.clubs import ClubGroup, Club, ClubRegistration, ClubJournalEntry, ClubJournalLeaderEntry
from ..models.fields import DAY_OF_WEEK
from ..utils import comma_separated
from .fields import ReadonlyField
from .form import FormMixin
from .questions import QuestionsFormMixin
User = get_user_model()
class ClubFilterForm(FormMixin, forms.Form):
q = forms.CharField(label=_('Search term'), required=False)
group = forms.ModelMultipleChoiceField(queryset=None, label=_('Group'), required=False)
leader = forms.ModelMultipleChoiceField(queryset=None, label=_('Leader'), required=False)
place = forms.ModelMultipleChoiceField(queryset=None, label=_('Place'), required=False)
age_group = forms.ModelMultipleChoiceField(queryset=None, label=_('Age group'), required=False)
day_of_week = forms.MultipleChoiceField(label=_('Day of week'),
choices=tuple(sorted(DAY_OF_WEEK.items())),required=False)
invisible = forms.BooleanField(label=_('Show invisible'), required=False)
def __init__(self, request, *args, **kwargs):
super(ClubFilterForm, self).__init__(*args, **kwargs)
self.fields['group' ].queryset = ClubGroup.objects.all()
self.fields['leader' ].queryset = Leader.objects.filter(school_years=request.school_year).order_by('user__first_name', 'user__last_name')
self.fields['place' ].queryset = Place.objects.all()
self.fields['age_group' ].queryset = AgeGroup.objects.all()
if not request.user.is_staff:
del self.fields['invisible']
for f in self.fields:
self.fields[f].help_text=None
def filter_queryset(self, request, qs):
for word in self.cleaned_data['q'].split():
qs = qs.filter(
Q(name__icontains = word)
| Q(description__icontains = word)
)
if self.cleaned_data['group']:
qs = qs.filter(groups__in = self.cleaned_data['group'])
if self.cleaned_data['place']:
qs = qs.filter(place__in = self.cleaned_data['place'])
if self.cleaned_data['leader']:
qs = qs.filter(leaders__in = self.cleaned_data['leader'])
if self.cleaned_data['age_group']:
qs = qs.filter(age_groups__in = self.cleaned_data['age_group'])
if self.cleaned_data['day_of_week']:
qs = qs.filter(times__day_of_week__in = self.cleaned_data['day_of_week'])
if request.user.is_staff and not self.cleaned_data['invisible']:
qs = qs.filter(public=True)
return qs
class ClubForm(FormMixin, forms.ModelForm):
class Meta:
model = Club
fields = ['description', 'risks', 'plan', 'evaluation']
class ClubJournalLeaderEntryAdminForm(forms.ModelForm):
class Meta:
model = ClubJournalLeaderEntry
fields = ['start', 'end']
def __init__(self, *args, **kwargs):
super(ClubJournalLeaderEntryAdminForm, self).__init__(*args, **kwargs)
if self.instance.timesheet.submitted:
try:
del(self.fields['start'])
del(self.fields['end'])
except KeyError:
pass
def clean(self):
cleaned_data = super(ClubJournalLeaderEntryAdminForm, self).clean()
# readonly start end
if self.instance.id and self.instance.timesheet.submitted:
cleaned_data['start'] = self.instance.start
cleaned_data['end'] = self.instance.end
# check entry start and end
errors = {}
club_entry = self.instance.club_entry
if cleaned_data['start'] < club_entry.start:
errors['start'] = _('The club journal entry starts at {start}').format(
start = club_entry.start,
)
if cleaned_data['end'] > club_entry.end:
errors['end'] = _('The club journal entry ends at {end}').format(
end = club_entry.end,
)
if errors:
raise ValidationError(errors)
return cleaned_data
class ClubJournalLeaderEntryForm(FormMixin, ClubJournalLeaderEntryAdminForm):
def __init__(self, *args, **kwargs):
super(ClubJournalLeaderEntryForm, self).__init__(*args, **kwargs)
self.readonly_fields = [
ReadonlyField(label=_('Club'), value=self.instance.club_entry.club.name),
ReadonlyField(label=_('Date'), value=self.instance.club_entry.date),
ReadonlyField(label=_('Leader'),value=self.instance.timesheet.leader),
]
if self.instance.timesheet.submitted:
self.readonly_fields += [
ReadonlyField(label=_('Start'), value=self.instance.start),
ReadonlyField(label=_('End'), value=self.instance.end),
]
class ClubJournalEntryAdminForm(forms.ModelForm):
class Meta:
model = ClubJournalEntry
fields = ['date', 'start', 'end', 'agenda', 'participants']
def __init__(self, *args, **kwargs):
self.club = kwargs.pop('club', None) or kwargs['instance'].club
super(ClubJournalEntryAdminForm, self).__init__(*args, **kwargs)
self.instance.club = self.club
self.fields['participants'].widget.choices.queryset = \
self.fields['participants'].widget.choices.queryset.filter(
club_registrations__club = self.instance.club,
)
self.fields['participants'].help_text = None
if self.instance.id:
self.readonly_fields = [
ReadonlyField(label=_('Date'), value=self.instance.date),
]
try:
del(self.fields['date'])
except:
pass
if not self.instance.id:
last = self.instance.club.journal_entries.last()
if last:
last_end = last.datetime_end or last.date
else:
last_end = None
next_time = self.instance.club.get_next_time(last_end)
if next_time:
self.initial['date'] = next_time.date
self.initial['start'] = next_time.start
self.initial['end'] = next_time.end
else:
self.initial['date'] = date.today()
def clean(self):
# check overlaping entries
if self.cleaned_data.get('start', None) and self.cleaned_data.get('end', None):
qs = ClubJournalEntry.objects.filter(
club = self.instance.club,
date = self.cleaned_data.get('date', self.instance.date),
start__lt = self.cleaned_data['end'],
end__gt = self.cleaned_data['start'],
)
if self.instance.id:
qs = qs.exclude(id=self.instance.id)
if qs.exists():
raise ValidationError(_('An overlaping entry has already been added in the club journal.'))
# check submitted leader entries
submitted_leader_entries = [
e for e in self.instance.all_leader_entries
if e.timesheet.submitted
]
if submitted_leader_entries:
max_start = min(e.start for e in submitted_leader_entries)
min_end = max(e.end for e in submitted_leader_entries)
errors = {}
if self.cleaned_data['start'] > max_start:
errors['start'] = _('Some submitted timesheet entries start at {start}').format(
start = max_start,
)
if self.cleaned_data['end'] < min_end:
errors['end'] = _('Some submitted timesheet entries end at {end}').format(
end = min_end,
)
if errors:
raise ValidationError(errors)
return self.cleaned_data
class ClubJournalEntryForm(FormMixin, ClubJournalEntryAdminForm):
leaders = forms.ModelMultipleChoiceField(Leader.objects, label=_('Leaders'), required=False)
alternates = forms.ModelMultipleChoiceField(Leader.objects, label=_('Alternates'), required=False)
def __init__(self, *args, **kwargs):
super(ClubJournalEntryForm, self).__init__(*args, **kwargs)
self.readonly_fields = [
ReadonlyField(label=_('Club'), value=self.club),
]
# only allow to select leaders or alterates with not submitted timesheets
d = self.initial['date']
leaders = self.instance.club.all_leaders
alternates = [ l for l in Leader.objects.all() if l not in leaders ]
self.fields['leaders'].widget.choices = tuple((l.id, l) for l in leaders)
self.fields['leaders'].help_text = None
self.fields['alternates'].widget.choices = tuple((l.id, l) for l in alternates)
self.fields['alternates'].help_text = None
if self.instance.id:
self.initial['leaders'] = [ l.id for l in self.instance.all_leaders ]
self.initial['alternates'] = [ l.id for l in self.instance.all_alternates ]
else:
self.initial['leaders'] = [ l.id for l in leaders ]
def clean(self):
self.cleaned_data = super(ClubJournalEntryForm, self).clean()
self.cleaned_entries = []
self.deleted_entries = []
d = self.cleaned_data.get('date', self.instance.date)
if d is None:
# no other validation makes sense without date
return self.cleaned_data
if 'start' in self.cleaned_data and 'end' in self.cleaned_data:
errors = {}
entries_by_leader = {
'leaders': dict(
(entry.timesheet.leader, entry) for entry in self.instance.all_leader_entries
if entry.timesheet.leader in self.instance.all_leaders
),
'alternates': dict(
(entry.timesheet.leader, entry) for entry in self.instance.all_leader_entries
if entry.timesheet.leader in self.instance.all_alternates
),
}
leaders_with_submitted_timesheets = {}
period = TimesheetPeriod.objects.for_date(d)
for group in ('leaders', 'alternates'):
leaders_with_submitted_timesheets[group] = []
for leader in self.cleaned_data[group]:
if leader not in entries_by_leader[group]:
# try to create new leader entry
timesheet = Timesheet.objects.for_leader_and_date(leader=leader, date=d)
if timesheet.submitted:
# can not create entry
leaders_with_submitted_timesheets[group].append(leader)
continue
entry = ClubJournalLeaderEntry()
entry.club_entry = self.instance
entry.timesheet = Timesheet.objects.for_leader_and_date(leader=leader, date=d)
entry.start = self.cleaned_data['start']
entry.end = self.cleaned_data['end']
else:
# try to update existing leader entry
entry = entries_by_leader[group].pop(leader)
if not entry.timesheet.submitted:
if self.cleaned_data['start'] <> self.instance.start:
if entry.start == self.instance.start:
entry.start = self.cleaned_data['start']
else:
entry.start = max(entry.start, self.cleaned_data['start'])
if self.cleaned_data['end'] <> self.instance.end:
if entry.end == self.instance.end:
entry.end = self.cleaned_data['end']
else:
entry.end = min(entry.end, self.cleaned_data['end'])
# store cleaned entry, or delete it, if it is broken by the update
if entry.start < entry.end:
self.cleaned_entries.append(entry)
elif entry.id:
self.deleted_entries.append(entry)
# try to delete stale entries
for entry in entries_by_leader[group].values():
if entry.timesheet.submitted:
# can not delete entry
leaders_with_submitted_timesheets[group].append(entry.timesheet.leader)
continue
# store deleted entry
self.deleted_entries.append(entry)
if leaders_with_submitted_timesheets['leaders']:
errors['leaders'] = ungettext(
'Leader {leaders} has already submitted timesheet for {period}.',
'Leaders {leaders} have already submitted timesheet for {period}.',
len(leaders_with_submitted_timesheets['leaders'])
).format(
leaders = comma_separated(leaders_with_submitted_timesheets['leaders']),
period = period.name,
)
if leaders_with_submitted_timesheets['alternates']:
errors['alternates'] = ungettext(
'Alternate {leaders} has already submitted timesheet for {period}.',
'Alternates {leaders} have already submitted timesheet for {period}.',
len(leaders_with_submitted_timesheets['alternates'])
).format(
leaders = comma_separated(leaders_with_submitted_timesheets['alternates']),
period = period.name,
)
if errors:
raise ValidationError(errors)
return self.cleaned_data
def save(self, commit=True):
self.instance = super(ClubJournalEntryForm, self).save(commit)
for entry in self.cleaned_entries:
entry.club_entry = self.instance
entry.save()
for entry in self.deleted_entries:
entry.delete()
class ClubRegistrationPublicForm(FormMixin, QuestionsFormMixin, forms.ModelForm):
from .parent import ParentForm as _ParentForm
from .participant import ParticipantForm
from .user import UserFormMixin
from django.contrib.auth.forms import UserCreationForm
class ParentForm(UserFormMixin, _ParentForm):
pass
def __init__(self, club, *args, **kwargs):
self.club = club
self.questions = self.club.all_questions
super(ClubRegistrationPublicForm, self).__init__(*args, **kwargs)
kwargs['prefix'] = 'parent'
self.parent_form = self.ParentForm(user=User(), *args, **kwargs)
kwargs['prefix'] = 'participant'
self.participant_form = self.ParticipantForm(user=User(), *args, **kwargs)
kwargs['prefix'] = 'user'
self.user_form = self.UserCreationForm(*args, **kwargs)
self.parent_form.fields['email'].required = True
del self.participant_form.fields['parents']
def is_valid(self):
return super(ClubRegistrationPublicForm, self).is_valid() \
and self.parent_form.is_valid() \
and self.participant_form.is_valid() \
and self.user_form.is_valid()
def save(self, commit=True):
user = self.user_form.save()
parent = self.parent_form.instance
participant = self.participant_form.instance
user.first_name = parent.first_name
user.last_name = parent.last_name
user.email = parent.email
user.save()
parent.user = user
parent.save()
participant.user = user
participant.save()
participant.parents.add(parent)
self.instance.club = self.club
self.instance.participant = participant
self.instance.age_group = participant.age_group
self.instance.citizenship = participant.citizenship
self.instance.insurance = participant.insurance
self.instance.school = participant.school
self.instance.school_other = participant.school_other
self.instance.school_class = participant.school_class
self.instance.health = participant.health
return super(ClubRegistrationPublicForm, self).save(commit)
save.alters_data = True
class Meta:
model = ClubRegistration
fields = ()
class ClubRegistrationBaseForm(QuestionsFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
if kwargs.get('instance', None):
self.club = kwargs['instance'].club
self.participant = kwargs['instance'].participant
else:
self.club = kwargs.pop('club')
self.participant = kwargs.pop('participant')
self.questions = self.club.all_questions
super(ClubRegistrationBaseForm, self).__init__(*args, **kwargs)
if not self.instance.id:
for attr in ['age_group', 'citizenship', 'insurance', 'school', 'school_other', 'school_class', 'health']:
self.initial[attr] = getattr(self.participant, attr)
self.fields['age_group'].widget.choices.queryset = self.club.age_groups
class ClubRegistrationForm(FormMixin, ClubRegistrationBaseForm):
def __init__(self, *args, **kwargs):
super(ClubRegistrationForm, self).__init__(*args, **kwargs)
self.readonly_fields = [
ReadonlyField(label=_('Club'), value=self.club),
ReadonlyField(label=_('Participant'), value=self.participant),
]
def save(self, commit=True):
self.instance.club = self.club
self.instance.participant = self.participant
return super(ClubRegistrationForm, self).save(commit)
save.alters_data = True
class Meta:
model = ClubRegistration
fields = [
'age_group', 'citizenship', 'insurance',
'school', 'school_other', 'school_class', 'health',
]
class ClubRegistrationAdminForm(ClubRegistrationBaseForm):
class Meta:
model = ClubRegistration
fields = [
'club', 'participant', 'canceled',
'age_group', 'citizenship', 'insurance',
'school', 'school_other', 'school_class', 'health',
'discount', 'explanation',
]
| {
"repo_name": "misli/django-domecek",
"path": "domecek/forms/clubs.py",
"copies": "1",
"size": "19194",
"license": "bsd-3-clause",
"hash": -3430397390144174000,
"line_mean": 41,
"line_max": 148,
"alpha_frac": 0.5804939043,
"autogenerated": false,
"ratio": 4.2,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.52804939043,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from datetime import date
from ..models import ClubRegistration, EventRegistration
from .generic import TemplateView
class SummaryView(TemplateView):
summary = True
template_name = 'domecek/summary.html'
def get_context_data(self, **kwargs):
context = super(SummaryView, self).get_context_data(**kwargs)
context['user'] = self.request.user
context['payment_status'] = sum(
reg.payment_statuses.partial
for reg in ClubRegistration.objects.filter(
club__school_year = self.request.school_year,
participant__user = self.request.user,
)
) + sum(
reg.payment_status
for reg in EventRegistration.objects.filter(
event__school_year = self.request.school_year,
participant__user = self.request.user,
)
)
if self.request.leader:
context['clubs'] = self.request.leader.clubs.filter(school_year=self.request.school_year)
context['events'] = self.request.leader.events.filter(school_year=self.request.school_year)
context['timesheets'] = self.request.leader.timesheets.filter(submitted=False, period__end__lte=date.today())
return context
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/summary.py",
"copies": "1",
"size": "1408",
"license": "bsd-3-clause",
"hash": -4301671805149510700,
"line_mean": 38.1111111111,
"line_max": 125,
"alpha_frac": 0.6420454545,
"autogenerated": false,
"ratio": 4.104956268221574,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.03429349148129812,
"num_lines": 36
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from datetime import datetime, timedelta
from django import forms
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.utils import formats, timezone
from django.utils.translation import ugettext_lazy as _, ungettext_lazy as ungettext
from json import dumps
from ..forms.widgets import CheckboxSelectMultipleBootstrap
from ..models import TimesheetEntry
from ..models.fields import DAY_OF_WEEK
from ..utils import comma_separated
from .fields import ReadonlyField
from .form import FormMixin
class TimesheetEntryAdminForm(forms.ModelForm):
"""
Validation of ManyToManyField must be performed in form.clean()
Always use TimesheetEntryAdminForm to change TimesheetEntry
"""
class Meta:
model = TimesheetEntry
fields = ('date', 'start', 'end', 'entry_type', 'description')
def __init__(self, *args, **kwargs):
timesheet = kwargs.pop('timesheet', None) or kwargs['instance'].timesheet
super(TimesheetEntryAdminForm, self).__init__(*args, **kwargs)
self.instance.timesheet = timesheet
# make date, start and end read only, if timesheet is already submitted
if self.instance.id and self.instance.timesheet.submitted:
self.readonly = True
self.readonly_fields = [
ReadonlyField(label=_('Date'), value=self.instance.date),
ReadonlyField(label=_('Start'), value=self.instance.start),
ReadonlyField(label=_('End'), value=self.instance.end),
ReadonlyField(label=_('Entry type'),value=self.instance.entry_type),
]
self.fields = {'description': self.fields['description']}
else:
self.readonly = False
def clean_date(self):
if self.cleaned_data['date'] < self.instance.timesheet.period.start:
raise ValidationError(_('The timesheet period {period} starts {start}').format(
period = self.instance.timesheet.period.name,
start = formats.date_format(self.instance.timesheet.period.start, "SHORT_DATE_FORMAT"),
))
if self.cleaned_data['date'] > self.instance.timesheet.period.end:
raise ValidationError(_('The timesheet period {period} ends {end}').format(
period = self.instance.timesheet.period.name,
end = formats.date_format(self.instance.timesheet.period.end, "SHORT_DATE_FORMAT"),
))
return self.cleaned_data['date']
def clean(self):
# check overlaping entries
if 'date' in self.cleaned_data and 'start' in self.cleaned_data and 'end' in self.cleaned_data:
qs = self.instance.timesheet.timesheet_entries.filter(
date = self.cleaned_data['date'],
start__lt = self.cleaned_data['end'],
end__gt = self.cleaned_data['start'],
)
if self.instance.id:
qs = qs.exclude(id=self.instance.id)
if qs.exists():
raise ValidationError(_('An overlaping entry has already been added in the timesheet.'))
return self.cleaned_data
class TimesheetEntryForm(FormMixin, TimesheetEntryAdminForm):
pass
| {
"repo_name": "misli/django-domecek",
"path": "domecek/forms/timesheets.py",
"copies": "1",
"size": "3397",
"license": "bsd-3-clause",
"hash": -6645191351583758000,
"line_mean": 42,
"line_max": 125,
"alpha_frac": 0.6464527524,
"autogenerated": false,
"ratio": 4.327388535031847,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.018825770515494077,
"num_lines": 79
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.conf import settings
from django.conf.urls import include, url
from django.views.generic.base import RedirectView
from . import views
from .conf import settings
# dictionary of all DOMECEK_URL_* attributes of settings
DOMECEK_URL = dict(
(attr.lower()[len('DOMECEK_URL_'):], getattr(settings, attr))
for attr in dir(settings) if attr.startswith('DOMECEK_URL_')
)
def d_url(pattern, name):
return url(pattern.format(**DOMECEK_URL), getattr(views, name), name=name)
urlpatterns = [
url(r'^$', RedirectView.as_view(url='{summary}/'.format(**DOMECEK_URL), permanent=True), name='index'),
d_url(r'^{summary}/$', 'summary'),
d_url(r'^{create_account}/$', 'user_create'),
d_url(r'^{user}/$', 'user_update'),
d_url(r'^{user}/{password}/$', 'user_password'),
d_url(r'^{registrations}/$', 'registrations'),
d_url(r'^{participants}/$', 'participant_list'),
d_url(r'^{participants}/{add}/$', 'participant_create'),
d_url(r'^{participants}/(?P<pk>[0-9]+)/$', 'participant_update'),
d_url(r'^{parent}/$', 'parent_create'),
d_url(r'^{parent}/(?P<pk>[0-9]+)/$', 'parent_update'),
d_url(r'^{clubs}/$', 'club_list'),
d_url(r'^{clubs}/{mine}/$', 'club_list_mine'),
d_url(r'^{clubs}/{alternating}/$', 'club_alternating'),
d_url(r'^{clubs}/(?P<pk>[0-9]+)/$', 'club_detail'),
d_url(r'^{clubs}/(?P<pk>[0-9]+)/{participants}/$', 'club_participants'),
d_url(r'^{clubs}/(?P<pk>[0-9]+)/{journal}/$', 'club_journal'),
d_url(r'^{clubs}/(?P<pk>[0-9]+)/{edit}/$', 'club_update'),
d_url(r'^{clubs}/(?P<club>[0-9]+)/{registration}/$', 'club_registration_public'),
d_url(r'^{clubs}/(?P<club>[0-9]+)/(?P<participant>[0-9]+)/$', 'club_registration_form'),
d_url(r'^{clubs}/{registration}/(?P<slug>[^.]+).pdf$', 'club_registration_pdf'),
d_url(r'^{clubs}/{registration}/(?P<pk>[0-9]+)/{cancel}/$', 'club_registration_cancel'),
d_url(r'^{clubs}/{journal}/{entry}/{add}/(?P<club>[0-9]+)/$', 'clubjournalentry_create'),
d_url(r'^{clubs}/{journal}/{entry}/(?P<pk>[0-9]+)/$', 'clubjournalentry_update'),
d_url(r'^{clubs}/{journal}/{entry}/(?P<pk>[0-9]+)/{delete}/$', 'clubjournalentry_delete'),
d_url(r'^{timesheets}/{journal}/{entry}/(?P<pk>[0-9]+)/$', 'clubjournalleaderentry_update'),
d_url(r'^{timesheets}/{journal}/{entry}/(?P<pk>[0-9]+)/{delete}/$', 'clubjournalleaderentry_delete'),
d_url(r'^{events}/$', 'event_list'),
d_url(r'^{events}/{mine}/$', 'event_list_mine'),
d_url(r'^{events}/(?P<pk>[0-9]+)/$', 'event_detail'),
d_url(r'^{events}/(?P<event_type>[^/]+)/$', 'event_list'),
d_url(r'^{events}/(?P<pk>[0-9]+)/{participants}/$', 'event_participants'),
d_url(r'^{events}/(?P<pk>[0-9]+)/{edit}/$', 'event_update'),
d_url(r'^{events}/(?P<event>[0-9]+)/{registration}/$', 'event_registration_public'),
d_url(r'^{events}/(?P<event>[0-9]+)/(?P<participant>[0-9]+)/$', 'event_registration_form'),
d_url(r'^{events}/{registration}/(?P<slug>[^.]+).pdf$', 'event_registration_pdf'),
d_url(r'^{events}/{registration}/(?P<pk>[0-9]+)/{cancel}/$', 'event_registration_cancel'),
d_url(r'^{leaders}/$', 'leader_list'),
d_url(r'^{timesheets}/$', 'timesheet_list'),
d_url(r'^{timesheets}/(?P<pk>[0-9]+)/$', 'timesheet_detail'),
d_url(r'^{timesheets}/(?P<pk>[0-9]+)/{submit}/$', 'timesheet_submit'),
d_url(r'^{timesheets}/(?P<pk>[0-9]+)/{add}/$', 'timesheetentry_create'),
d_url(r'^{timesheets}/{entry}/(?P<pk>[0-9]+)/$', 'timesheetentry_update'),
d_url(r'^{timesheets}/{entry}/(?P<pk>[0-9]+)/{delete}/$', 'timesheetentry_delete'),
d_url(r'^{login}/$', 'user_login'),
d_url(r'^{logout}/$', 'user_logout'),
d_url(r'^{password_reset}/$', 'password_reset'),
d_url(r'^{password_reset}/done/$', 'password_reset_done'),
d_url(r'^{password_reset}/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{{1,13}}-[0-9A-Za-z]{{1,20}})/$',
'password_reset_confirm'),
d_url(r'^{password_reset}/complete/$', 'password_reset_complete'),
d_url(r'^{school_year}/$', 'school_year'),
d_url(r'^{support}/$', 'support'),
d_url(r'^{reports}/$', 'reports'),
d_url(r'^{reports}/{clubs}/{payments}/$', 'report_club_payments'),
d_url(r'^{reports}/{clubs}/{payments_status}/$', 'report_club_payments_status'),
d_url(r'^{reports}/{events}/{payments}/$', 'report_event_payments'),
d_url(r'^{reports}/{events}/{payments_status}/$', 'report_event_payments_status'),
]
| {
"repo_name": "misli/django-domecek",
"path": "domecek/urls.py",
"copies": "1",
"size": "5921",
"license": "bsd-3-clause",
"hash": 7810270007500836000,
"line_mean": 64.7888888889,
"line_max": 125,
"alpha_frac": 0.4538084783,
"autogenerated": false,
"ratio": 3.4166185804962494,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9274650634367849,
"avg_score": 0.019155284885680204,
"num_lines": 90
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.util import get_model_from_relation
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_text, force_text
from django.utils.translation import ugettext_lazy as _
from django_mptt_admin.admin import DjangoMpttAdmin
from mptt.models import TreeForeignKey
from cms.utils import get_language_from_request
from . import models
from .utils import get_form, get_admin
class CategoryTreeListFilter(admin.FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg_tid = '%s__tree_id' % (field_path)
self.lookup_kwarg_lft = '%s__lft__gte' % (field_path)
self.lookup_kwarg_rght = '%s__rght__lte' % (field_path)
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val_tid = request.GET.get(self.lookup_kwarg_tid, None)
self.lookup_val_lft = request.GET.get(self.lookup_kwarg_lft, None)
self.lookup_val_rght = request.GET.get(self.lookup_kwarg_rght, None)
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull, None)
self.lookup_choices = models.Category.objects.order_by('tree_id', 'lft')
super(CategoryTreeListFilter, self).__init__(
field, request, params, model, model_admin, field_path
)
if hasattr(field, 'verbose_name'):
self.lookup_title = field.verbose_name
else:
self.lookup_title = other_model._meta.verbose_name
self.title = self.lookup_title
def has_output(self):
if hasattr(self.field, 'rel') and self.field.null:
extra = 1
else:
extra = 0
return len(self.lookup_choices) + extra > 1
def expected_parameters(self):
return [self.lookup_kwarg_lft, self.lookup_kwarg_rght, self.lookup_kwarg_isnull]
def choices(self, cl):
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
yield {
'selected': self.lookup_val_tid is None
and self.lookup_val_lft is None
and self.lookup_val_rght is None
and not self.lookup_val_isnull,
'query_string': cl.get_query_string({}, [
self.lookup_kwarg_tid,
self.lookup_kwarg_lft,
self.lookup_kwarg_rght,
self.lookup_kwarg_isnull,
]),
'display': _('All'),
}
for val in self.lookup_choices:
yield {
'selected': self.lookup_val_lft == val.lft and self.lookup_val_rght == val.rght,
'query_string': cl.get_query_string({
self.lookup_kwarg_tid: val.tree_id,
self.lookup_kwarg_lft: val.lft,
self.lookup_kwarg_rght: val.rght,
}, [self.lookup_kwarg_isnull]),
'display': '{}{}'.format(val.level*'- ', val),
}
if self.field.null:
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [
self.lookup_kwarg_tid,
self.lookup_kwarg_lft,
self.lookup_kwarg_rght,
]),
'display': EMPTY_CHANGELIST_VALUE,
}
class CategoryAdmin(admin.ModelAdmin):
form = get_form('Category')
ordering = ['tree_id', 'lft']
list_display = ['name', 'parent', 'active']
list_filter = [('parent', CategoryTreeListFilter)]
search_fields = ['name', 'summary', 'description']
prepopulated_fields = {'slug': ('name',)}
trigger_save_after_move = True
def lookup_allowed(self, key, value):
return key in ['parent__lft__gte', 'parent__rght__lte', 'parent__tree_id'] \
and value.isdigit() and True or super(ProductAdmin, self).lookup_allowed(key, value)
class ProductPackageInlineAdmin(admin.TabularInline):
model = models.ProductPackage
extra = 0
class ProductAdmin(admin.ModelAdmin):
form = get_form('Product')
ordering = ['tree_id', 'lft']
list_display = ['name', 'parent', 'active', 'multiple', 'unit', 'price', 'tax_rate']
list_editable = ['active', 'price', 'tax_rate']
list_filter = ['active', ('parent', CategoryTreeListFilter)]
search_fields = ['name', 'summary', 'description']
inlines = [ProductPackageInlineAdmin]
filter_horizontal = ['related']
prepopulated_fields = {'slug': ('name',)}
def lookup_allowed(self, key, value):
return key in ['parent__lft__gte', 'parent__rght__lte', 'parent__tree_id'] \
and value.isdigit() and True or super(ProductAdmin, self).lookup_allowed(key, value)
class NodeAdmin(DjangoMpttAdmin):
def has_add_permission(self, request):
# Nodes must always be added as Product or Category
return False
class OrderStateAdmin(admin.ModelAdmin):
pass
class CartItemInlineAdmin(admin.TabularInline):
model = models.CartItem
extra = 0
class CartAdmin(admin.ModelAdmin):
ordering = ['-last_updated']
inlines = [CartItemInlineAdmin]
readonly_fields = ['last_updated', 'get_price']
class VoucherAdmin(admin.ModelAdmin):
filter_horizontal = ['categories', 'delivery_methods', 'payment_methods']
ordering = ['-valid_from']
prepopulated_fields = {'slug': ('name',)}
class OrderAdmin(admin.ModelAdmin):
actions = ('send_customer_mail',)
readonly_fields = ['slug', 'cart_link']
list_filter = ['state']
list_display = ['id', 'date', 'first_name', 'last_name', 'email',
'phone', 'address', 'delivery_method', 'payment_method',
'state', 'price', 'cart_link']
list_editable = ['state']
search_fields = ['first_name', 'last_name', 'email', 'phone', 'address']
def cart_link(self, order):
return '<a href="{}">{}</a>'.format(
reverse('admin:cmsplugin_shop_cart_change', args=(order.cart_id,)),
order.cart,
)
cart_link.short_description = _('cart')
cart_link.allow_tags = True
def send_customer_mail(self, request, queryset):
for order in queryset.all():
try:
order.send_customer_mail()
except Exception as e:
self.message_user(request,
_('Failed to send notification e-mail to {}').format(order.email)
)
send_customer_mail.short_description = _('Resend notification e-mail to the customer')
class DeliveryMethodAdmin(admin.ModelAdmin):
pass
class PaymentMethodAdmin(admin.ModelAdmin):
pass
| {
"repo_name": "misli/cmsplugin-shop",
"path": "cmsplugin_shop/admins.py",
"copies": "1",
"size": "7086",
"license": "bsd-3-clause",
"hash": -6726605387474755000,
"line_mean": 35.7150259067,
"line_max": 125,
"alpha_frac": 0.5907423088,
"autogenerated": false,
"ratio": 3.8679039301310043,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49586462389310043,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.conf import settings
from django.core.mail import send_mail, get_connection
from django.template import Context as _Context
from django.template.loader import get_template
# Context for with statement (will be in Django 1.7)
class Context(_Context):
def update(self, data):
super(Context, self).update(data)
return self
def __exit__(self, *args):
self.pop()
def __enter__(self):
pass
class TemplateMailer(object):
template = None
template_name = None
from_email = settings.SERVER_EMAIL
recipient_list = None
auth_user = None
auth_password = None
connection = None
fail_silently = False
context = None
context_class = Context
def __init__(self, **kwargs):
for name in kwargs:
if hasattr(self, name):
setattr(self, name, kwargs.pop(name))
self.kwargs = kwargs
if not self.template:
self.template = self.get_template()
if not self.context:
self.context = self.context_class(kwargs)
if not self.connection:
self.connection = get_connection(
username=self.auth_user,
password=self.auth_password,
fail_silently=self.fail_silently
)
def get_template(self):
return get_template(self.template_name)
def get_context(self, dictionary):
context = self.context_class(**self.kwargs)
context.update(dictionary)
return context
def send_mail(self, from_email=None, recipient_list=None, **kwargs):
with self.context.update(kwargs):
content = self.template.render(self.context).split('\n', 1)
subject = content[0]
try:
message = content[1]
except:
message = ''
send_mail(
subject = subject,
message = message,
from_email = from_email or self.from_email,
recipient_list = recipient_list or self.recipient_list,
connection = self.connection,
)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/templatemailer.py",
"copies": "1",
"size": "2287",
"license": "bsd-3-clause",
"hash": -8940577900569347000,
"line_mean": 29.9054054054,
"line_max": 125,
"alpha_frac": 0.5889811981,
"autogenerated": false,
"ratio": 4.274766355140187,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02596425459699375,
"num_lines": 74
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
def S(name, default_value):
return getattr(settings, 'CMSPLUGIN_SHOP_'+name, default_value)
AUTH_USER_MODEL = settings.AUTH_USER_MODEL
LOCALECONV = S('LOCALECONV', {
'currency_symbol': '$',
'int_curr_symbol': 'USD ',
})
DECIMAL_PLACES = S('PRICE_DECIMAL_PLACES', 2)
MAX_DIGITS = S('PRICE_MAX_DIGITS', 9)
TAX_RATES = S('TAX_RATES', {0:_('no tax')})
DEFAULT_TAX_RATE = S('DEFAULT_TAX_RATE', TAX_RATES.keys()[0])
PRICE_TYPE = S('PRICE_TYPE', 'gross')
PRODUCT_TEMPLATES = S('PRODUCT_TEMPLATES', (('default', _('default')),))
CATEGORY_TEMPLATES = S('CATEGORY_TEMPLATES', (('default', _('default')),))
CART_EXPIRY_DAYS = S('CART_EXPIRY_DAYS', 1)
SESSION_KEY_CART = S('SESSION_KEY_CART', 'cmsplugin_shop_cart_id')
SHOP_EMAIL = S('EMAIL', settings.SERVER_EMAIL)
SEND_MAIL_KWARGS = S('SEND_MAIL_KWARGS', {})
INITIAL_ORDER_STATE = S('INITIAL_ORDER_STATE', 'new')
PROFILE_ATTRIBUTE = getattr(settings, 'AUTH_USER_PROFILE_ATTRIBUTE', 'profile')
| {
"repo_name": "misli/cmsplugin-shop",
"path": "cmsplugin_shop/settings.py",
"copies": "1",
"size": "1250",
"license": "bsd-3-clause",
"hash": -6140712433513801000,
"line_mean": 32.7837837838,
"line_max": 125,
"alpha_frac": 0.6512,
"autogenerated": false,
"ratio": 3.0637254901960786,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42149254901960787,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.conf.urls import url as urls_url
from django.contrib import admin
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.core.urlresolvers import reverse
from django.db.models import Count, Sum
from django.http import HttpResponseRedirect
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..forms.events import EventRegistrationAdminForm
from ..models import *
from ..utils import currency, comma_separated
from .export import AdminExportMixin
from .filters import SchoolYearListFilter, EventTypeListFilter, EventListFilter, LeaderListFilter
class EventTypeAttachmentInlineAdmin(admin.TabularInline):
model = EventTypeAttachment
extra = 3
class EventTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'order')
list_editable = ('order',)
inlines = (
EventTypeAttachmentInlineAdmin,
)
class EventGroupAdmin(admin.ModelAdmin):
list_display = ('name', 'order')
list_editable = ('order',)
class EventAttachmentInlineAdmin(admin.TabularInline):
model = EventAttachment
extra = 3
class EventAdmin(AdminExportMixin, admin.ModelAdmin):
list_display = (
'id', 'start_date', 'start_time', 'end_date', 'end_time',
'name', 'event_type', 'get_groups_list', 'get_leaders_list', 'place', 'public', 'reg_active',
'get_registrations_link', 'icon', 'note',
)
list_editable = ('public', 'reg_active', 'note')
list_filter = (
('school_year', SchoolYearListFilter),
('event_type', EventTypeListFilter),
'age_groups',
'groups',
('leaders', LeaderListFilter),
)
inlines = (
EventAttachmentInlineAdmin,
)
filter_horizontal = ('age_groups', 'groups', 'leaders')
date_hierarchy = 'start_date'
actions = (
'publish', 'unpublish',
'allow_registration', 'disallow_registration',
)
search_fields = ('name', 'description')
save_as = True
def get_queryset(self, request):
return super(EventAdmin, self).get_queryset(request)\
.annotate(registrations_count=Count('registrations'))
def get_form(self, request, obj=None, **kwargs):
form = super(EventAdmin, self).get_form(request, obj=None, **kwargs)
if obj:
school_year = obj.school_year
else:
school_year = request.school_year
leaders_choices = form.base_fields['leaders'].widget.widget.choices
leaders_choices.queryset = leaders_choices.queryset.filter(school_years = school_year)
form.base_fields['leaders'].choices = leaders_choices
return form
def publish(self, request, queryset):
Event.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(public = True)
self.message_user(request, _('Selected events were published.'))
publish.short_description = _('Publish selected events')
def unpublish(self, request, queryset):
Event.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(public = False)
self.message_user(request, _('Selected events were unpublished.'))
unpublish.short_description = _('Unpublish selected events')
def allow_registration(self, request, queryset):
Event.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(reg_active = True)
self.message_user(request, _('Registration was allowed for selected events.'))
allow_registration.short_description = _('Allow registration for selected events')
def disallow_registration(self, request, queryset):
Event.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(reg_active = False)
self.message_user(request, _('Registration was disallowed for selected events.'))
disallow_registration.short_description = _('Disallow registration for selected events')
def get_registrations_link(self, obj):
icon = False
if obj.registrations_count == 0:
title = _('There are no registrations for this event.')
elif obj.min_count is not None and obj.registrations_count < obj.min_count:
title = _('The number of registrations is lower than {}.').format(obj.min_count)
elif obj.max_count is not None and obj.registrations_count > obj.max_count:
title = _('The number of registrations is greater than {}.').format(obj.max_count)
else:
icon = True
title = ''
return '<a href="{url}" title="{title}">{icon} {count}</a>'.format(
url = reverse('admin:{}_{}_changelist'.format(
EventRegistration._meta.app_label,
EventRegistration._meta.model_name,
)) + '?event={}'.format(obj.id),
title = title,
icon = _boolean_icon(icon),
count = obj.registrations_count,
)
get_registrations_link.short_description = _('registrations')
get_registrations_link.admin_order_field = 'registrations_count'
get_registrations_link.allow_tags = True
def icon(self, obj):
return obj.photo and '<a href="{admin_url}"><img src="{icon_url}" alt=""/>'.format(
admin_url = obj.photo.get_admin_url_path(),
icon_url = obj.photo.icons['48'],
) or ''
icon.allow_tags = True
icon.short_description = _('photo')
class EventRegistrationAdmin(AdminExportMixin, admin.ModelAdmin):
form = EventRegistrationAdminForm
list_display = (
'id', 'get_download_tag', 'event', 'participant', 'parents_link',
'discount', 'get_payments_html', 'created',
'cancel_request', 'canceled',
)
list_export = (
'id', 'created', 'event', 'age_group',
'participant__first_name', 'participant__last_name', 'participant__birth_num',
'participant__email', 'participant__phone', 'school_name', 'school_class',
'participant__street', 'participant__city', 'participant__postal_code', 'citizenship', 'insurance', 'health',
'parents', 'parent_emails',
'get_payments_paid', 'get_payments_balance',
)
list_filter = (
('event__school_year', SchoolYearListFilter),
('event__event_type', EventTypeListFilter),
('event', EventListFilter),
('event__leaders', LeaderListFilter),
)
actions = ('send_mail',)
search_fields = (
'participant__first_name', 'participant__last_name',
'participant__birth_num', 'participant__email',
'participant__parents__first_name', 'participant__parents__last_name', 'participant__parents__email',
'school__name', 'event__name',
)
ordering = ('-cancel_request', '-created')
raw_id_fields = ('event', 'participant')
def has_add_permission(self, request):
return False
def get_fields(self, request, obj=None):
fields = super(EventRegistrationAdmin, self).get_fields(request, obj)
if obj:
fields += ['q_'+q.name for q in obj.event.all_questions]
return fields
def parents(self, obj):
return comma_separated(obj.participant.all_parents)
parents.short_description = _('parents')
def parent_emails(self, obj):
return ', '.join(
'{} <{}>'.format(p.full_name, p.email)
for p in obj.participant.all_parents if p.email
)
parent_emails.short_description = _('parent emails')
def school_name(self, obj):
return obj.school_name
school_name.short_description = _('school')
def get_download_tag(self, obj):
return '<a href="{}">PDF</a>'.format(reverse('admin:domecek_eventregistration_pdf', args=(obj.id,)))
get_download_tag.short_description = _('download')
get_download_tag.allow_tags = True
def get_fullname(self, obj):
return '{} {}'.format(obj.participant.first_name, obj.participant.last_name)
get_fullname.short_description = _('full name')
@cached_property
def parents_url(self):
return reverse('admin:domecek_parent_changelist')
def parents_link(self, obj):
return '<a href="{url}?participants__id={participant}">{names}</a>'.format(
url = self.parents_url,
participant = obj.participant.id,
names = ', '.join(smart_text(parent) for parent in obj.participant.all_parents),
)
parents_link.allow_tags = True
parents_link.short_description = _('parents')
def get_payments_paid(self, obj):
return obj.get_payment_status().paid
get_payments_paid.short_description = _('paid')
def get_payments_balance(self, obj):
return obj.get_payment_status().balance
get_payments_balance.short_description = _('balance')
def get_payments_html(self, obj):
status = obj.get_payment_status()
return format_html('<a target="_blank" style="color: {color}" href="{href_list}" title="{title}"><b>{amount}</b></a> ' \
'<a target="_blank" class="addlink" href="{href_add}" style="background-position: 0 0" title="{add}"></a>',
color = status.color,
href_list = reverse('admin:domecek_eventpayment_changelist') + '?registration={}'.format(obj.id),
href_add = reverse('admin:domecek_eventpayment_add') + '?registration={}'.format(obj.id),
title = status.title,
add = _('add payment'),
amount = currency(status.paid),
)
get_payments_html.allow_tags = True
get_payments_html.short_description = _('event payments')
def get_urls(self):
urls = super(EventRegistrationAdmin, self).get_urls()
return [
urls_url(r'(?P<reg_id>\d+).pdf$', self.admin_site.admin_view(self.pdf), name='domecek_eventregistration_pdf'),
] + urls
def pdf(self, request, reg_id):
from ..views.events import EventRegistrationPdfView
return EventRegistrationPdfView.as_view()(request, pk=reg_id)
def send_mail(self, request, queryset):
for registration in queryset.all():
recipients = registration.all_recipients
if recipients:
registration.send_mail()
self.message_user(
request,
_('Registration {registration} ({id}) successfully sent to {recipients}.').format(
registration = registration,
id = registration.id,
recipients = comma_separated(recipients),
),
)
else:
self.message_user(
request,
_('Registration {registration} ({id}) has no recipients.').format(
registration = registration,
id = registration.id,
),
)
send_mail.short_description = _('Send selected registrations by email')
class EventPaymentAdmin(AdminExportMixin, admin.ModelAdmin):
list_display = ('registration', 'date', 'amount')
list_filter = (
('registration__event__school_year', SchoolYearListFilter),
('registration__event__event_type', EventTypeListFilter),
('registration__event', EventListFilter),
('registration__event__leaders', LeaderListFilter),
)
search_fields = ('registration__event__name', 'registration__participant__first_name', 'registration__participant__last_name',
'registration__participant__birth_num')
date_hierarchy = 'date'
ordering = ('-date',)
raw_id_fields = ('registration',)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/events.py",
"copies": "1",
"size": "12203",
"license": "bsd-3-clause",
"hash": -2793844249821315000,
"line_mean": 41.0793103448,
"line_max": 135,
"alpha_frac": 0.61394739,
"autogenerated": false,
"ratio": 4.090848139456923,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013404978278800217,
"num_lines": 290
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib.auth.decorators import login_required
from django.utils.decorators import available_attrs
from functools import wraps
from ..conf import settings
from .summary import *
from .user import *
from .participants import *
from .parents import *
from .clubs import *
from .events import *
from .leaders import *
from .timesheets import *
from .schoolyear import *
from .support import *
from .registrations import *
from .reports import *
from .reports.clubs import *
from .reports.events import *
def lr(function):
from django.core.urlresolvers import reverse_lazy as reverse
return login_required(function,
login_url=reverse('domecek:user_login'),
redirect_field_name=settings.DOMECEK_PARAM_BACK,
)
def leader_required(view):
@wraps(view, assigned=available_attrs(view))
def wrapped_view(request, *args, **kwargs):
if request.leader:
return view(request, *args, **kwargs)
else:
from django.http import Http404
raise Http404()
return wrapped_view
def staff_required(view):
@wraps(view, assigned=available_attrs(view))
def wrapped_view(request, *args, **kwargs):
if request.user.is_staff:
return view(request, *args, **kwargs)
else:
from django.http import Http404
raise Http404()
return wrapped_view
summary = lr(SummaryView.as_view())
user_create = UserCreateView.as_view()
user_update = lr(UserUpdateView.as_view())
registrations = lr(RegistrationsView.as_view())
participant_list = lr(ParticipantListView.as_view())
participant_create = lr(ParticipantCreateView.as_view())
participant_update = lr(ParticipantUpdateView.as_view())
parent_create = lr(ParentCreateView.as_view())
parent_update = lr(ParentUpdateView.as_view())
club_list = ClubListView.as_view()
club_list_mine = leader_required(ClubListMineView.as_view())
club_alternating = leader_required(ClubAlternatingView.as_view())
club_detail = ClubDetailView.as_view()
club_participants = lr(ClubParticipantsView.as_view())
club_journal = lr(ClubJournalView.as_view())
club_update = lr(ClubUpdateView.as_view())
clubjournalentry_create = lr(ClubJournalEntryCreateView.as_view())
clubjournalentry_update = lr(ClubJournalEntryUpdateView.as_view())
clubjournalentry_delete = lr(ClubJournalEntryDeleteView.as_view())
clubjournalleaderentry_update = lr(ClubJournalLeaderEntryUpdateView.as_view())
clubjournalleaderentry_delete = lr(ClubJournalLeaderEntryDeleteView.as_view())
club_registration_public = ClubRegistrationPublicFormView.as_view()
club_registration_form = lr(ClubRegistrationFormView.as_view())
club_registration_confirm = lr(ClubRegistrationConfirmView.as_view())
club_registration_pdf = lr(ClubRegistrationPdfView.as_view())
club_registration_cancel = lr(ClubRegistrationCancelView.as_view())
event_list = EventListView.as_view()
event_list_mine = leader_required(EventListMineView.as_view())
event_detail = EventDetailView.as_view()
event_participants = lr(EventParticipantsView.as_view())
event_update = lr(EventUpdateView.as_view())
event_registration_public = EventRegistrationPublicFormView.as_view()
event_registration_form = lr(EventRegistrationFormView.as_view())
event_registration_confirm = lr(EventRegistrationConfirmView.as_view())
event_registration_pdf = lr(EventRegistrationPdfView.as_view())
event_registration_cancel = lr(EventRegistrationCancelView.as_view())
leader_list = LeaderListView.as_view()
timesheet_list = leader_required(TimesheetListView.as_view())
timesheet_detail = leader_required(TimesheetDetailView.as_view())
timesheet_submit = leader_required(TimesheetSubmitView.as_view())
timesheetentry_create = leader_required(TimesheetEntryCreateView.as_view())
timesheetentry_update = leader_required(TimesheetEntryUpdateView.as_view())
timesheetentry_delete = leader_required(TimesheetEntryDeleteView.as_view())
school_year = SchoolYearView.as_view()
support = lr(SupportView.as_view())
reports = staff_required(ReportsView.as_view())
report_club_payments = staff_required(ReportClubPaymentsView.as_view())
report_club_payments_status = staff_required(ReportClubPaymentsStatusView.as_view())
report_event_payments = staff_required(ReportEventPaymentsView.as_view())
report_event_payments_status = staff_required(ReportEventPaymentsStatusView.as_view())
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/__init__.py",
"copies": "1",
"size": "5136",
"license": "bsd-3-clause",
"hash": 1268926850511441700,
"line_mean": 44.8571428571,
"line_max": 125,
"alpha_frac": 0.6691978193,
"autogenerated": false,
"ratio": 3.798816568047337,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4968014387347337,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib.auth import get_user_model
from django.contrib.auth.views import (
password_change, login, logout,
password_reset as pr,
password_reset_done as pr_done,
password_reset_confirm as pr_confirm,
password_reset_complete as pr_complete,
)
from django.core.urlresolvers import reverse_lazy as reverse
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..forms.user import (
UserCreateForm, UserUpdateForm, UserPasswordForm,
UserLoginForm, PasswordResetForm, SetPasswordForm,
)
from .generic import CreateView, UpdateView
__all__ = [
'UserCreateView', 'UserUpdateView',
'user_password', 'user_login', 'user_logout',
'password_reset', 'password_reset_done',
'password_reset_confirm', 'password_reset_complete',
]
class UserCreateView(CreateView):
model = get_user_model()
form_class = UserCreateForm
title = _('Create account')
def get_message(self, form):
return _('User account {} has been created.').format(self.object)
class UserUpdateView(UpdateView):
model = get_user_model()
form_class = UserUpdateForm
title = _('Change user')
def get_object(self):
return self.request.user
def get_message(self, form):
return _('User account {} has been updated.').format(self.object)
def user_password(request):
return password_change(request,
template_name='domecek/form.html',
password_change_form=UserPasswordForm,
post_change_redirect=reverse('domecek:summary'),
extra_context={
'submit_label': _('Change password'),
'back_label': _('Back'),
'back_url': reverse('domecek:summary'),
'placeholder': 'user_password',
},
)
def user_login(request):
return login(request,
template_name='domecek/form.html',
authentication_form=UserLoginForm,
redirect_field_name=settings.DOMECEK_PARAM_BACK,
extra_context={
'title': _('Log in'),
'submit_label': _('Log in'),
'back_label': _('Reset password'),
'back_url': reverse('domecek:password_reset'),
'placeholder': 'user_login',
},
)
def user_logout(request):
return logout(request, next_page='/',
redirect_field_name=settings.DOMECEK_PARAM_BACK,
)
def password_reset(request):
return pr(request,
template_name='domecek/form.html',
password_reset_form=PasswordResetForm,
email_template_name='domecek/password_reset_email.html',
from_email=settings.SERVER_EMAIL,
extra_context={
'instructions': '<p>{}<p>'.format(_('Enter your email address, and we\'ll email instructions for setting a new one.')),
'submit_label': _('Reset my password'),
'placeholder': 'password_reset',
},
post_reset_redirect=reverse('domecek:password_reset_done'),
)
def password_reset_done(request):
return pr_done(request,
template_name='domecek/password_reset_done.html',
)
def password_reset_confirm(request, uidb64=None, token=None):
return pr_confirm(request, uidb64, token,
template_name='domecek/form.html',
set_password_form=SetPasswordForm,
extra_context={
'instructions': '<p>{}<p>'.format(_('Please enter your new password twice so we can verify you typed it in correctly.')),
'submit_label': _('Set my password'),
'placeholder': 'password_set',
},
post_reset_redirect=reverse('domecek:password_reset_complete'),
)
def password_reset_complete(request):
return pr_complete(request,
template_name='domecek/password_reset_complete.html',
)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/user.py",
"copies": "1",
"size": "3890",
"license": "bsd-3-clause",
"hash": 6464590371036659000,
"line_mean": 30.3709677419,
"line_max": 133,
"alpha_frac": 0.6467866324,
"autogenerated": false,
"ratio": 3.8062622309197653,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9862449961803381,
"avg_score": 0.018119780303276874,
"num_lines": 124
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib.auth import login
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from ..forms.clubs import ClubForm, ClubFilterForm, ClubRegistrationForm, ClubRegistrationPublicForm, ClubJournalEntryForm, ClubJournalLeaderEntryForm
from ..models import Club, ClubPeriod, ClubJournalEntry, ClubJournalLeaderEntry, ClubRegistration, Participant
from .generic import FilteredListView, DetailView, CreateView, UpdateView, ConfirmUpdateView, DeleteView, TemplateView, PdfView
class ClubListView(FilteredListView):
model = Club
form_class = ClubFilterForm
preview_template = 'domecek/club_preview.html'
template_name = 'domecek/club_list.html'
message_empty = _('No clubs matching given filter.')
paginate_by = 10
def get_title(self):
return _('Clubs in school year {}').format(self.request.school_year)
def get_queryset(self):
qs = super(ClubListView, self).get_queryset()
qs = qs.filter(school_year=self.request.school_year)
if not self.request.user.is_staff:
qs = qs.filter(public=True)
form = self.get_form()
if form.is_valid():
qs = form.filter_queryset(self.request, qs)
return qs
class ClubListMineView(ClubListView):
def get_queryset(self):
return super(ClubListMineView, self).get_queryset().filter(leaders=self.request.leader)
def get_title(self):
return _('My clubs in school year {}').format(self.request.school_year)
class ClubAlternatingView(TemplateView):
template_name = 'domecek/club_alternating.html'
def get_title(self):
return _('Alternating in school year {}').format(self.request.school_year)
def get_context_data(self, **kwargs):
context = super(ClubAlternatingView, self).get_context_data(**kwargs)
context['alternate_leader_entries'] = self.request.leader.get_alternate_leader_entries(self.request.school_year)
return context
class ClubDetailView(DetailView):
model = Club
def get_queryset(self):
qs = super(ClubDetailView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(public=True)
return qs
class ClubParticipantsView(DetailView):
model = Club
template_name_suffix = '_participants'
def get_queryset(self):
qs = super(ClubParticipantsView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leaders=self.request.leader)
return qs
class ClubJournalView(DetailView):
model = Club
template_name_suffix = '_journal'
def get_queryset(self):
qs = super(ClubJournalView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leaders=self.request.leader)
return qs
class ClubUpdateView(UpdateView):
model = Club
form_class = ClubForm
title = _('Change club')
def get_queryset(self):
qs = super(ClubUpdateView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leaders=self.request.leader)
return qs
def get_message(self, form):
return _('The club {} has been updated.').format(self.object)
class ClubJournalEntryCreateView(CreateView):
model = ClubJournalEntry
form_class = ClubJournalEntryForm
template_name = 'domecek/clubjournalentry_form.html'
title = _('New journal entry')
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_staff:
self.club = get_object_or_404(Club,
id = int(kwargs.pop('club')),
)
else:
self.club = get_object_or_404(Club,
id = int(kwargs.pop('club')),
leaders = self.request.leader,
)
return super(ClubJournalEntryCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(ClubJournalEntryCreateView, self).get_form_kwargs()
kwargs['club'] = self.club
return kwargs
def get_message(self, form):
return _('The journal entry has been created.')
class ClubJournalEntryUpdateView(UpdateView):
model = ClubJournalEntry
form_class = ClubJournalEntryForm
template_name = 'domecek/clubjournalentry_form.html'
title = _('Change journal entry')
def get_object(self):
obj = super(ClubJournalEntryUpdateView, self).get_object()
if (self.request.user.is_staff
or self.request.leader in obj.club.all_leaders + obj.all_alternates):
return obj
else:
raise Http404()
def get_message(self, form):
return _('The journal entry has been updated.')
class ClubJournalEntryDeleteView(DeleteView):
model = ClubJournalEntry
title = _('Delete journal entry')
def get_queryset(self):
return super(ClubJournalEntryDeleteView, self).get_queryset().filter(
club__leaders = self.request.leader,
)
def get_object(self):
obj = super(ClubJournalEntryDeleteView, self).get_object()
if obj.timesheets.filter(submitted = True).exists():
raise Http404()
return obj
def get_question(self):
return _('Do You really want to delete club journal entry?')
def get_message(self):
return _('The journal entry has been deleted.')
class ClubJournalLeaderEntryUpdateView(UpdateView):
model = ClubJournalLeaderEntry
form_class = ClubJournalLeaderEntryForm
template_name = 'domecek/clubjournalleaderentry_form.html'
title = _('Change timesheet entry')
def get_object(self):
obj = super(ClubJournalLeaderEntryUpdateView, self).get_object()
if self.request.user.is_staff \
or obj.timesheet.leader == self.request.leader \
or self.request.leader in obj.club_entry.club.all_leaders:
return obj
else:
raise Http404()
def get_message(self, form):
return _('The timesheet entry has been updated.')
class ClubJournalLeaderEntryDeleteView(DeleteView):
model = ClubJournalLeaderEntry
title = _('Delete timesheet entry')
def get_queryset(self):
return super(ClubJournalLeaderEntryDeleteView, self).get_queryset().filter(
timesheet__leader = self.request.leader,
timesheet__submitted = False,
)
def get_question(self):
return _('Do You really want to delete timesheet entry?')
def get_message(self):
return _('The timesheet entry has been deleted.')
class ClubRegistrationPublicFormView(CreateView):
model = ClubRegistration
form_class = ClubRegistrationPublicForm
template_name = 'domecek/registration_form.html'
def get_title(self):
return _('Registration for club {}').format(self.club.name)
def dispatch(self, request, *args, **kwargs):
club_kwargs = {
'id': int(kwargs.pop('club')),
'school_year': self.request.school_year,
'reg_active': True,
}
if not self.request.user.is_staff:
club_kwargs['public'] = True
self.club = get_object_or_404(Club, **club_kwargs)
if self.request.user.is_authenticated() and not self.request.toolbar.use_draft:
return HttpResponseRedirect(reverse('domecek:club_detail', args=(self.club.id, )))
return super(ClubRegistrationPublicFormView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(ClubRegistrationPublicFormView, self).get_form_kwargs()
kwargs['club'] = self.club
return kwargs
def form_valid(self, form):
response = super(ClubRegistrationPublicFormView, self).form_valid(form)
user = form.instance.participant.user
user.backend = 'django.contrib.auth.backends.ModelBackend'
login(self.request, user)
return response
def get_message(self, form):
return _('The registration has been accepted.')
class ClubRegistrationFormView(CreateView):
model = ClubRegistration
form_class = ClubRegistrationForm
def get_title(self):
return _('Registration for club {}').format(self.club.name)
def dispatch(self, request, *args, **kwargs):
club_kwargs = {
'id': int(kwargs.pop('club')),
'school_year': self.request.school_year,
'reg_active': True,
}
if not self.request.user.is_staff:
club_kwargs['public'] = True
self.club = get_object_or_404(Club, **club_kwargs)
self.participant = get_object_or_404(Participant,
user = self.request.user,
id = int(kwargs.pop('participant')),
)
# user may get back to this page after successful registration
if self.club.registrations.filter(participant=self.participant).exists():
return HttpResponseRedirect(reverse('domecek:summary'))
return super(ClubRegistrationFormView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(ClubRegistrationFormView, self).get_form_kwargs()
kwargs['club'] = self.club
kwargs['participant'] = self.participant
return kwargs
def get_message(self, form):
return _('The registration has been accepted.')
class ClubRegistrationConfirmView(DetailView):
model = ClubRegistration
template_name_suffix = '_confirm'
class ClubRegistrationPdfView(PdfView):
model = ClubRegistration
template_name_suffix = '_pdf'
class ClubRegistrationCancelView(ConfirmUpdateView):
model = ClubRegistration
title = _('Cancellation request')
def get_queryset(self):
return super(ClubRegistrationCancelView, self).get_queryset().filter(participant__user=self.request.user)
def get_question(self):
return _('Are you sure You want to cancel the registration "{}"?').format(self.object)
def get_message(self, form):
return _('The cancellation request for {} has been saved.').format(self.object)
def confirmed(self):
self.object.cancel_request = True
self.object.save()
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/clubs.py",
"copies": "1",
"size": "10650",
"license": "bsd-3-clause",
"hash": 5836589517587520000,
"line_mean": 31.9721362229,
"line_max": 150,
"alpha_frac": 0.6537089202,
"autogenerated": false,
"ratio": 3.8392213410237925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49929302612237925,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from ..models import *
from .filters import SchoolYearListFilter
class ContactInlineAdmin(admin.TabularInline):
model = Contact
extra = 0
class LeaderAdmin(admin.ModelAdmin):
filter_horizontal = ('school_years',)
inlines = (ContactInlineAdmin,)
search_fields = ('user__first_name', 'user__last_name', 'contacts__contact')
list_display = ('id', 'user_link', 'first_name', 'last_name', 'email', 'clubs_link', 'events_link', 'contacts')
ordering = ('user__first_name', 'user__last_name')
actions = ('add_current_school_year',)
list_filter = (('school_years', SchoolYearListFilter),)
raw_id_fields = ('user',)
def add_current_school_year(self, request, queryset):
for leader in queryset.all():
leader.school_years.add(request.school_year)
self.message_user(request, _('Selected leaders were added to school_year {}.').format(request.school_year))
add_current_school_year.short_description = _('Add to current school year')
def first_name(self, obj):
return obj.user.first_name
first_name.short_description = _('first name')
first_name.admin_order_field = 'user__first_name'
def last_name(self, obj):
return obj.user.last_name
last_name.short_description = _('last name')
last_name.admin_order_field = 'user__last_name'
def email(self, obj):
return obj.user.email
email.short_description = _('email')
email.admin_order_field = 'user__email'
def contacts(self, obj):
return ', '.join(c.contact for c in obj.all_contacts)
contacts.short_description = _('contacts')
def user_link(self, obj):
return '<a href="{url}">{user}</a>'.format(
url = reverse('admin:auth_user_change', args=(obj.user.id,)),
user = obj.user,
)
user_link.allow_tags = True
user_link.short_description = _('user')
@cached_property
def clubs_url(self):
return reverse('admin:domecek_club_changelist')
@cached_property
def events_url(self):
return reverse('admin:domecek_event_changelist')
def clubs_link(self, obj):
return '<a href="{url}?leaders__id={leader}">{count}</a>'.format(
url = self.clubs_url,
leader = obj.id,
count = obj.clubs.count(),
)
clubs_link.allow_tags = True
clubs_link.short_description = _('clubs')
def events_link(self, obj):
return '<a href="{url}?leaders__id={leader}">{count}</a>'.format(
url = self.events_url,
leader = obj.id,
count = obj.events.count(),
)
events_link.allow_tags = True
events_link.short_description = _('events')
class ParentAdmin(admin.ModelAdmin):
search_fields = ('first_name', 'last_name', 'street', 'email', 'phone',
'user__first_name', 'user__last_name', 'user__username', 'user__email')
list_display = ('id', 'user_link', 'first_name', 'last_name', 'address', 'email', 'phone', 'participants_link')
raw_id_fields = ('user',)
def first_name(self, obj):
return obj.user.first_name
first_name.short_description = _('first name')
first_name.admin_order_field = 'user__first_name'
def last_name(self, obj):
return obj.user.last_name
last_name.short_description = _('last name')
last_name.admin_order_field = 'user__last_name'
def address(self, obj):
return obj.address
address.short_description = _('address')
def email(self, obj):
return obj.user.email
email.short_description = _('email')
email.admin_order_field = 'user__email'
def user_link(self, obj):
return '<a href="{url}">{user}</a>'.format(
url = reverse('admin:auth_user_change', args=(obj.user.id,)),
user = obj.user,
)
user_link.allow_tags = True
user_link.short_description = _('user')
@cached_property
def participants_url(self):
return reverse('admin:domecek_participant_changelist')
def participants_link(self, obj):
return '<a href="{url}?parents__id={parent}">{names}</a>'.format(
url = self.participants_url,
parent = obj.id,
names = ', '.join(smart_text(participant) for participant in obj.all_participants),
)
participants_link.allow_tags = True
participants_link.short_description = _('participants')
class ParticipantAdmin(admin.ModelAdmin):
search_fields = ('first_name', 'last_name', 'birth_num', 'street', 'email', 'phone',
'user__first_name', 'user__last_name', 'user__username', 'user__email')
list_display = ('id', 'user_link', 'first_name', 'last_name', 'birth_num', 'address', 'email', 'phone', 'school_name',
'registrations_links', 'parents_link')
filter_horizontal = ('parents',)
raw_id_fields = ('user',)
def user_link(self, obj):
return '<a href="{url}">{user}</a>'.format(
url = reverse('admin:auth_user_change', args=(obj.user.id,)),
user = obj.user,
)
user_link.allow_tags = True
user_link.short_description = _('user')
def address(self, obj):
return obj.address
address.short_description = _('address')
@cached_property
def club_regs_url(self):
return reverse('admin:domecek_clubregistration_changelist')
@cached_property
def event_regs_url(self):
return reverse('admin:domecek_eventregistration_changelist')
@cached_property
def parents_url(self):
return reverse('admin:domecek_parent_changelist')
def registrations_links(self, obj):
return '<a href="{club_regs_url}?participant__id={participant}">{club_regs_name}</a>, '\
'<a href="{event_regs_url}?participant__id={participant}">{event_regs_name}</a>'.format(
club_regs_url = self.club_regs_url,
event_regs_url = self.event_regs_url,
club_regs_name = _('clubs'),
event_regs_name = _('events'),
participant = obj.id,
)
registrations_links.allow_tags = True
registrations_links.short_description = _('registrations')
def parents_link(self, obj):
return '<a href="{url}?participants__id={participant}">{names}</a>'.format(
url = self.parents_url,
participant = obj.id,
names = ', '.join(smart_text(parent) for parent in obj.all_parents),
)
parents_link.allow_tags = True
parents_link.short_description = _('parents')
def school_name(self, obj):
return obj.school_name
school_name.short_description = _('school')
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/roles.py",
"copies": "1",
"size": "7212",
"license": "bsd-3-clause",
"hash": 4174990594411280400,
"line_mean": 36.175257732,
"line_max": 125,
"alpha_frac": 0.607598447,
"autogenerated": false,
"ratio": 3.659056316590563,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4766654763590563,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib import admin
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from ..models import SchoolYear, Club, Event, Leader
class SchoolYearListFilter(admin.FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
try:
request.school_year = SchoolYear.objects.get(year=params['year'])
except:
pass
self.school_year = request.school_year
super(SchoolYearListFilter, self).__init__(field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return ['year']
def choices(self, cl):
return [
{
'selected': school_year == self.school_year,
'query_string': cl.get_query_string({'year': school_year.year}),
'display': school_year,
}
for school_year in SchoolYear.objects.all()
]
def queryset(self, request, queryset):
return queryset.filter(**{self.field_path: self.school_year})
class ClubListFilter(admin.RelatedFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.clubs = Club.objects.filter(school_year=request.school_year)
super(ClubListFilter, self).__init__(field, request, params, model, model_admin, field_path)
def field_choices(self, field, request, model_admin):
return [(club.id, club.name) for club in self.clubs]
class EventTypeListFilter(admin.RelatedFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super(EventTypeListFilter, self).__init__(field, request, params, model, model_admin, field_path)
request.event_type_id = self.lookup_val
class EventListFilter(admin.RelatedFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.events = Event.objects.filter(school_year=request.school_year)
if hasattr(request, 'event_type_id') and request.event_type_id:
self.events = self.events.filter(event_type__id=request.event_type_id)
super(EventListFilter, self).__init__(field, request, params, model, model_admin, field_path)
def field_choices(self, field, request, model_admin):
return [(event.id, event.name) for event in self.events]
class LeaderListFilter(admin.RelatedFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.leaders = Leader.objects.filter(school_years=request.school_year)
super(LeaderListFilter, self).__init__(field, request, params, model, model_admin, field_path)
def field_choices(self, field, request, model_admin):
return [(leader.id, leader) for leader in self.leaders]
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/filters.py",
"copies": "1",
"size": "2982",
"license": "bsd-3-clause",
"hash": 1387595110796866000,
"line_mean": 39.2972972973,
"line_max": 125,
"alpha_frac": 0.6773977197,
"autogenerated": false,
"ratio": 3.8181818181818183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9913337317757829,
"avg_score": 0.016448444024797833,
"num_lines": 74
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy as reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..forms.participant import ParticipantForm
from ..models import Participant
from ..utils import reverse_with_back
from .generic import ListView, CreateView, UpdateView
class ParticipantListView(ListView):
add_label = _('add participant')
model = Participant
template_name = 'domecek/participants.html'
preview_template = 'domecek/participant_preview.html'
def get_title(self):
return _('Participants and parents')
def get_queryset(self):
return self.request.user.domecek_participants.all()
def get_add_url(self):
return reverse_with_back(self.request, 'domecek:participant_create')
class ParticipantCreateView(CreateView):
model = Participant
form_class = ParticipantForm
success_url = reverse('domecek:summary')
title = _('New participant')
def get(self, request, *args, **kwargs):
if self.request.user.domecek_parents.count():
return super(ParticipantCreateView, self).get(request, *args, **kwargs)
else:
messages.info(self.request, _('Before adding participant, you need to add parent.'))
return HttpResponseRedirect(
reverse_with_back(request, 'domecek:parent_create')
)
def get_form_kwargs(self):
kwargs = super(ParticipantCreateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
parent = self.request.user.domecek_parents.first()
if parent:
kwargs['initial'] = dict((attr, getattr(parent, attr))
for attr in ['street', 'city', 'postal_code'])
kwargs['initial']['parents'] = self.request.user.domecek_parents.all()
return kwargs
def get_message(self, form):
return _('New participant {} has been created.').format(self.object)
class ParticipantUpdateView(UpdateView):
model = Participant
form_class = ParticipantForm
success_url = reverse('domecek:summary')
title = _('Change participant')
def get_queryset(self):
# only allow to edit user's own participants
return super(ParticipantUpdateView, self).get_queryset().filter(user=self.request.user)
def get_form_kwargs(self):
kwargs = super(ParticipantUpdateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_message(self, form):
return _('The participant {} has been updated.').format(self.object)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/participants.py",
"copies": "1",
"size": "2818",
"license": "bsd-3-clause",
"hash": -3311492203951022000,
"line_mean": 33.7901234568,
"line_max": 125,
"alpha_frac": 0.6795599716,
"autogenerated": false,
"ratio": 4.002840909090909,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.015349511896030748,
"num_lines": 81
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib import messages
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..forms.timesheets import TimesheetEntryForm
from ..models import Timesheet, TimesheetEntry
from ..utils import reverse_with_back
from .generic import ListView, DetailView, CreateView, UpdateView, ConfirmUpdateView, DeleteView
class TimesheetListView(ListView):
model = Timesheet
preview_template = 'domecek/timesheet_preview.html'
paginate_by = 6
add_label = _('add entry')
def get_title(self):
return _('Timesheets')
def get_queryset(self):
return super(TimesheetListView, self).get_queryset().filter(leader=self.request.leader)
class TimesheetDetailView(DetailView):
model = Timesheet
def get_queryset(self):
qs = super(TimesheetDetailView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leader = self.request.leader)
return qs
class TimesheetSubmitView(ConfirmUpdateView):
model = Timesheet
title = _('Submit timesheet')
def get_queryset(self):
return super(TimesheetSubmitView, self).get_queryset().filter(
leader = self.request.leader,
submitted = False,
)
def get_question(self):
return _(
'Are you sure You want to submit the timesheet now? '
'You won\'t be able to edit the entries for {} any more.'
).format(self.object.period.name)
def get_message(self, form):
return _('The timesheet for () has been submitted.').format(self.object.period.name)
def confirmed(self):
self.object.submitted = True
self.object.save()
class TimesheetEntryCreateView(CreateView):
model = TimesheetEntry
form_class = TimesheetEntryForm
template_name = 'domecek/timesheetentry_form.html'
title = _('New timesheet entry')
def dispatch(self, request, *args, **kwargs):
self.timesheet = get_object_or_404(Timesheet,
id = int(kwargs.pop('pk')),
leader = self.request.leader,
submitted = False,
)
return super(TimesheetEntryCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(TimesheetEntryCreateView, self).get_form_kwargs()
kwargs['timesheet'] = self.timesheet
return kwargs
def get_message(self, form):
return _('New timesheet entry has been created.')
class TimesheetEntryUpdateView(UpdateView):
model = TimesheetEntry
form_class = TimesheetEntryForm
template_name = 'domecek/timesheetentry_form.html'
title = _('Change timesheet entry')
def get_queryset(self):
# only allow to edit user's own not submitted timesheets
return super(TimesheetEntryUpdateView, self).get_queryset().filter(
leader = self.request.leader,
)
def get_queryset(self):
qs = super(TimesheetEntryUpdateView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(leader = self.request.leader)
return qs
def get_message(self, form):
return _('The timesheet entry has been updated.')
class TimesheetEntryDeleteView(DeleteView):
model = TimesheetEntry
title = _('Delete timesheet entry')
def get_queryset(self):
qs = super(TimesheetEntryDeleteView, self).get_queryset()
if not self.request.user.is_staff:
qs = qs.filter(timesheet__leader = self.request.leader)
return qs
def get_object(self):
obj = super(TimesheetEntryDeleteView, self).get_object()
if obj.timesheet.submitted:
raise Http404()
return obj
def get_question(self):
return _('Do You really want to delete timesheet entry?')
def get_message(self):
return _('The timesheet entry has been deleted.')
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/timesheets.py",
"copies": "1",
"size": "4211",
"license": "bsd-3-clause",
"hash": -570380281228082400,
"line_mean": 30.4253731343,
"line_max": 125,
"alpha_frac": 0.6568511043,
"autogenerated": false,
"ratio": 4.088349514563107,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5245200618863106,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from json import loads
from ..conf import settings
from ..utils import import_string
@python_2_unicode_compatible
class Question(models.Model):
name = models.CharField(_('name'), max_length=50, unique=True)
question = models.CharField(_('question'), max_length=50)
help_text = models.TextField(_('help text'), blank=True, null=True,
help_text=_('This is help text. The help text is shown next to the form field.'))
field = models.CharField(_('field'), max_length=150,
choices=( (key, val['name']) for key, val in settings.DOMECEK_QUESTION_FIELDS.items() ))
field_args = models.TextField(_('field_args'), blank=True, default='{}', help_text=_('Enter valid JSON structure representing field configuration.'))
class Meta:
app_label = 'domecek'
verbose_name = _('additional question')
verbose_name_plural = _('additional questions')
def __str__(self):
return self.question
@cached_property
def field_class(self):
return import_string(settings.DOMECEK_QUESTION_FIELDS[self.field]['class'])
@cached_property
def field_kwargs(self):
return loads(self.field_args)
@cached_property
def field_label(self):
return self.question[0].upper() + self.question[1:]
def get_field(self, initial=None):
return self.field_class(
label = self.field_label,
initial = initial,
help_text = self.help_text,
**self.field_kwargs
)
def clean(self):
try:
self.get_field()
except Exception as e:
raise ValidationError({'field_args': [_('Failed to create field with given field args: {}').format(e)]})
class AnswersBaseModel(models.Model):
answers = models.TextField(_('additional answers'), blank=True, default='{}', editable=False)
class Meta:
abstract = True
def get_answers(self):
return loads(self.answers)
def get_questions_and_answers(self):
answers = self.get_answers()
for q in self.all_questions:
yield {
'question': q.question,
'answer': q.get_value(answers.get(q.name, None)),
}
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/question.py",
"copies": "1",
"size": "2677",
"license": "bsd-3-clause",
"hash": -6435941802051084000,
"line_mean": 33.7662337662,
"line_max": 154,
"alpha_frac": 0.6290623833,
"autogenerated": false,
"ratio": 4.1248073959938365,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5253869779293836,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.core.urlresolvers import reverse_lazy as reverse
from django.utils.translation import ugettext_lazy as _
from ..forms.parent import ParentForm
from ..models import Parent
from .generic import CreateView, UpdateView
class ParentCreateView(CreateView):
model = Parent
form_class = ParentForm
success_url = reverse('domecek:summary')
title = _('New parent')
def get_form_kwargs(self):
kwargs = super(ParentCreateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
if self.request.user.domecek_parents.count() == 0:
kwargs['initial'] = dict((attr, getattr(self.request.user, attr))
for attr in ['first_name', 'last_name', 'email'])
return kwargs
def get_message(self, form):
return _('New parent {} has been created.').format(self.object)
class ParentUpdateView(UpdateView):
model = Parent
form_class = ParentForm
success_url = reverse('domecek:summary')
title = _('Change parent')
def get_queryset(self):
# only allow to edit user's own parents
return super(ParentUpdateView, self).get_queryset().filter(user=self.request.user)
def get_form_kwargs(self):
kwargs = super(ParentUpdateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_message(self, form):
return _('The parent {} has been updated.').format(self.object)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/parents.py",
"copies": "1",
"size": "1584",
"license": "bsd-3-clause",
"hash": -3729124628848840700,
"line_mean": 32,
"line_max": 125,
"alpha_frac": 0.6742424242,
"autogenerated": false,
"ratio": 3.9402985074626864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5114540931662686,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class School(models.Model):
name = models.CharField(_('name'), max_length=50)
street = models.CharField(_('street'), max_length=50, blank=True, default='')
city = models.CharField(_('city'), max_length=50, blank=True, default='')
class Meta:
app_label = 'domecek'
ordering = ('city', 'name')
verbose_name = _('school')
verbose_name_plural = _('schools')
def __str__(self):
if self.address:
return '{}, {}'.format(self.name, self.address)
else:
return self.name
@cached_property
def address(self):
if self.street and self.city:
return '{}, {}'.format(self.street, self.city)
else:
return self.street or self.city
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/school.py",
"copies": "1",
"size": "1152",
"license": "bsd-3-clause",
"hash": -1937571958196099600,
"line_mean": 33.9090909091,
"line_max": 125,
"alpha_frac": 0.6223958333,
"autogenerated": false,
"ratio": 3.986159169550173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5108555002850172,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from ..models import Leader
from ..forms.leaders import LeaderFilterForm
from .generic import FilteredListView
class LeaderListView(FilteredListView):
model = Leader
form_class = LeaderFilterForm
preview_template = 'domecek/leader_preview.html'
template_name = 'domecek/leader_list.html'
message_empty = _('No leaders matching given query.')
def get_title(self):
return _('Leaders in school year {}').format(self.request.school_year)
def get_queryset(self):
qs = super(LeaderListView, self).get_queryset()
qs = qs.filter(school_years=self.request.school_year)
form = self.get_form()
if form.is_valid():
for word in form.cleaned_data['q'].split():
qs = qs.filter(
Q(user__first_name__icontains = word)
| Q(user__last_name__icontains = word)
| Q(description__icontains = word)
)
return qs
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/leaders.py",
"copies": "1",
"size": "1221",
"license": "bsd-3-clause",
"hash": -1319599554896457500,
"line_mean": 36,
"line_max": 125,
"alpha_frac": 0.6257166257,
"autogenerated": false,
"ratio": 3.9387096774193546,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5064426303119355,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import forms
from django.conf.urls import url as urls_url
from django.contrib import admin
from django.contrib.admin import helpers
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.core.urlresolvers import reverse
from django.db.models import Count, Sum
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..forms.clubs import ClubRegistrationAdminForm, ClubJournalEntryAdminForm, ClubJournalLeaderEntryAdminForm
from ..models import *
from ..utils import currency, comma_separated
from .export import AdminExportMixin
from .filters import SchoolYearListFilter, ClubListFilter, LeaderListFilter
class ClubGroupAdmin(admin.ModelAdmin):
list_display = ('name', 'order')
list_editable = ('order',)
class ClubTimeInlineAdmin(admin.TabularInline):
model = ClubTime
extra = 0
class ClubPeriodInlineAdmin(admin.TabularInline):
model = ClubPeriod
extra = 0
ordering = ('start',)
class ClubAttachmentInlineAdmin(admin.TabularInline):
model = ClubAttachment
extra = 3
class ClubAdmin(AdminExportMixin, admin.ModelAdmin):
list_display = (
'name', 'get_groups_list', 'get_leaders_list',
'get_times_list', 'get_periods_list',
'place', 'public', 'reg_active',
'get_registrations_link', 'get_journal_link', 'icon', 'note',
)
list_editable = ('public', 'reg_active', 'note')
list_filter = (
('school_year', SchoolYearListFilter),
'age_groups',
'groups',
('leaders', LeaderListFilter),
)
inlines = (
ClubTimeInlineAdmin,
ClubPeriodInlineAdmin,
ClubAttachmentInlineAdmin,
)
filter_horizontal = ('age_groups', 'groups', 'leaders')
actions = (
'publish', 'unpublish',
'allow_registration', 'disallow_registration',
'merge',
)
search_fields = ('name', 'description')
save_as = True
def get_queryset(self, request):
return super(ClubAdmin, self).get_queryset(request)\
.annotate(registrations_count=Count('registrations'))
def get_form(self, request, obj=None, **kwargs):
form = super(ClubAdmin, self).get_form(request, obj, **kwargs)
if obj:
school_year = obj.school_year
else:
school_year = request.school_year
leaders_choices = form.base_fields['leaders'].widget.widget.choices
leaders_choices.queryset = leaders_choices.queryset.filter(school_years = school_year)
form.base_fields['leaders'].choices = leaders_choices
return form
def publish(self, request, queryset):
Club.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(public = True)
self.message_user(request, _('Selected clubs were published.'))
publish.short_description = _('Publish selected clubs')
def unpublish(self, request, queryset):
Club.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(public = False)
self.message_user(request, _('Selected clubs were unpublished.'))
unpublish.short_description = _('Unpublish selected clubs')
def allow_registration(self, request, queryset):
Club.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(reg_active = True)
self.message_user(request, _('Registration was allowed for selected clubs.'))
allow_registration.short_description = _('Allow registration for selected clubs')
def disallow_registration(self, request, queryset):
Club.objects.filter(id__in=[reg['id'] for reg in queryset.values('id')]).update(reg_active = False)
self.message_user(request, _('Registration was disallowed for selected clubs.'))
disallow_registration.short_description = _('Disallow registration for selected clubs')
def merge(self, request, queryset):
class MergeForm(forms.Form):
target = forms.ModelChoiceField(
label=_('Target club'),
help_text=_('All information will be merged into selected club.'),
queryset=queryset,
)
if request.POST.get('post', 'no') == 'yes':
form = MergeForm(request.POST)
if form.is_valid():
target = form.cleaned_data['target']
clubs = [ club for club in queryset.all() if club != target ]
for club in clubs:
# merge groups
for group in club.all_groups:
target.groups.add(group)
# merge age_groups
for age_group in club.all_age_groups:
target.age_groups.add(age_group)
# merge leaders
for leader in club.all_leaders:
target.leaders.add(leader)
# merge times
for time in club.all_times:
time.club = target
time.save()
# merge questions
for question in club.all_questions:
target.questions.add(question)
# merge registrations
for registration in club.all_registrations:
registration.club = target
registration.save()
# merge journal_entries
for entry in club.all_journal_entries:
entry.club = target
entry.save()
club.delete()
self.message_user(request, _('Selected clubs were merged into club {}.').format(club))
return
else:
form = MergeForm()
return render_to_response('domecek/admin/club_merge.html', {
'title': _('Select target club for merge'),
'queryset': queryset,
'opts': self.model._meta,
'form': form,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}, context_instance=RequestContext(request))
merge.short_description = _('Merge selected clubs into one')
def get_registrations_link(self, obj):
icon = False
if obj.registrations_count == 0:
title = _('There are no registrations for this club.')
elif obj.min_count is not None and obj.registrations_count < obj.min_count:
title = _('The number of registrations is lower than {}.').format(obj.min_count)
elif obj.max_count is not None and obj.registrations_count > obj.max_count:
title = _('The number of registrations is greater than {}.').format(obj.max_count)
else:
icon = True
title = ''
return '<a href="{url}" title="{title}">{icon} {count}</a>'.format(
url = reverse('admin:{}_{}_changelist'.format(
ClubRegistration._meta.app_label,
ClubRegistration._meta.model_name,
)) + '?club={}'.format(obj.id),
title = title,
icon = _boolean_icon(icon),
count = obj.registrations_count,
)
get_registrations_link.short_description = _('registrations')
get_registrations_link.admin_order_field = 'registrations_count'
get_registrations_link.allow_tags = True
def get_journal_link(self, obj):
return '<a href="{url}" title="{title}" target="_blank">{journal}</a>'.format(
url = reverse('admin:domecek_club_journal', args=[obj.id]),
title = _('printable club journal'),
journal = _('journal'),
)
get_journal_link.short_description = _('journal')
get_journal_link.allow_tags = True
def get_urls(self):
urls = super(ClubAdmin, self).get_urls()
return [
urls_url(r'(?P<club_id>\d+)/journal/$', self.admin_site.admin_view(self.journal), name='domecek_club_journal'),
] + urls
def journal(self, request, club_id):
club = get_object_or_404(Club, id=club_id)
return render_to_response('domecek/club_journal.html', {
'club': club,
'admin': True,
}, context_instance=RequestContext(request))
def icon(self, obj):
return obj.photo and '<a href="{admin_url}"><img src="{icon_url}" alt=""/>'.format(
admin_url = obj.photo.get_admin_url_path(),
icon_url = obj.photo.icons['48'],
) or ''
icon.allow_tags = True
icon.short_description = _('photo')
class ClubRegistrationAdmin(AdminExportMixin, admin.ModelAdmin):
form = ClubRegistrationAdminForm
list_display = (
'id', 'get_download_tag', 'club', 'participant', 'parents_link',
'discount', 'get_payments_partial_balance_html', 'get_payments_total_balance_html', 'get_club_payments', 'created',
'cancel_request', 'canceled',
)
list_export = (
'id', 'created', 'club', 'age_group',
'participant__first_name', 'participant__last_name', 'participant__birth_num',
'participant__email', 'participant__phone', 'school_name', 'school_class',
'participant__street', 'participant__city', 'participant__postal_code', 'citizenship', 'insurance', 'health',
'parents', 'parent_emails',
'get_payments_partial_balance', 'get_payments_total_balance',
)
list_filter = (
('club__school_year', SchoolYearListFilter),
('club', ClubListFilter),
('club__leaders', LeaderListFilter),
)
actions = ('send_mail',)
search_fields = (
'participant__first_name', 'participant__last_name',
'participant__birth_num', 'participant__email',
'participant__parents__first_name', 'participant__parents__last_name', 'participant__parents__email',
'school__name', 'club__name',
)
ordering = ('-cancel_request', '-created')
raw_id_fields = ('club', 'participant')
def has_add_permission(self, request):
return False
def get_fields(self, request, obj=None):
fields = super(ClubRegistrationAdmin, self).get_fields(request, obj)
if obj:
fields += ['q_'+q.name for q in obj.club.all_questions]
return fields
def parents(self, obj):
return comma_separated(obj.participant.all_parents)
parents.short_description = _('parents')
def parent_emails(self, obj):
return ', '.join(
'{} <{}>'.format(p.full_name, p.email)
for p in obj.participant.all_parents if p.email
)
parent_emails.short_description = _('parent emails')
def school_name(self, obj):
return obj.school_name
school_name.short_description = _('school')
def get_download_tag(self, obj):
return '<a href="{}">PDF</a>'.format(reverse('admin:domecek_clubregistration_pdf', args=(obj.id,)))
get_download_tag.short_description = _('download')
get_download_tag.allow_tags = True
def get_fullname(self, obj):
return '{} {}'.format(obj.participant.first_name, obj.participant.last_name)
get_fullname.short_description = _('full name')
@cached_property
def parents_url(self):
return reverse('admin:domecek_parent_changelist')
def parents_link(self, obj):
return '<a href="{url}?participants__id={participant}">{names}</a>'.format(
url = self.parents_url,
participant = obj.participant.id,
names = ', '.join(smart_text(parent) for parent in obj.participant.all_parents),
)
parents_link.allow_tags = True
parents_link.short_description = _('parents')
def get_club_payments(self, obj):
html = []
price = obj.club.price
for period in obj.get_period_payment_statuses():
html.append(format_html('{period}: <a target="_blank" style="color: {color}" href="{href}" title="{title}"><b>{amount}</b></a>',
period = period.period.name,
color = period.status.color,
href = reverse('admin:domecek_clubpayment_changelist') + '?registration={}'.format(obj.id),
title = period.status.title,
amount = currency(period.status.paid),
))
return mark_safe('<br/>'.join(html) + format_html(' <a target="_blank" class="addlink" href="{href}" style="background-position: 0 0" title="{title}"></a>',
href = reverse('admin:domecek_clubpayment_add') + '?registration={}'.format(obj.id),
title = _('add payment'),
))
get_club_payments.short_description = _('club payments')
def get_payments_partial_balance(self, obj):
return obj.get_payment_statuses().partial.balance
get_payments_partial_balance.short_description = _('actual balance')
def get_payments_total_balance(self, obj):
return obj.get_payment_statuses().total.balance
get_payments_total_balance.short_description = _('total balance')
def get_payments_partial_balance_html(self, obj):
status = obj.get_payment_statuses().partial
return '<strong title="{title}" style="color: {color}">{balance}</strong>'.format(
color = status.color,
balance = currency(status.balance),
title = status.title,
)
get_payments_partial_balance_html.allow_tags = True
get_payments_partial_balance_html.short_description = _('actual balance')
def get_payments_total_balance_html(self, obj):
status = obj.get_payment_statuses().total
return '<strong title="{title}" style="color: {color}">{balance}</strong>'.format(
color = status.color,
balance = currency(status.balance),
title = status.title,
)
get_payments_total_balance_html.allow_tags = True
get_payments_total_balance_html.short_description = _('total balance')
def get_urls(self):
urls = super(ClubRegistrationAdmin, self).get_urls()
return [
urls_url(r'(?P<reg_id>\d+).pdf$', self.admin_site.admin_view(self.pdf), name='domecek_clubregistration_pdf'),
] + urls
def pdf(self, request, reg_id):
from ..views.clubs import ClubRegistrationPdfView
return ClubRegistrationPdfView.as_view()(request, pk=reg_id)
def send_mail(self, request, queryset):
for registration in queryset.all():
recipients = registration.all_recipients
if recipients:
registration.send_mail()
self.message_user(
request,
_('Registration {registration} ({id}) successfully sent to {recipients}.').format(
registration = registration,
id = registration.id,
recipients = comma_separated(recipients),
),
)
else:
self.message_user(
request,
_('Registration {registration} ({id}) has no recipients.').format(
registration = registration,
id = registration.id,
),
)
send_mail.short_description = _('Send selected registrations by email')
class ClubPaymentAdmin(AdminExportMixin, admin.ModelAdmin):
list_display = ('registration', 'date', 'amount')
list_filter = (
('registration__club__school_year', SchoolYearListFilter),
('registration__club', ClubListFilter),
)
search_fields = ('registration__club__name', 'registration__participant__first_name', 'registration__participant__last_name',
'registration__participant__birth_num')
date_hierarchy = 'date'
ordering = ('-date',)
raw_id_fields = ('registration',)
class ClubJournalLeaderEntryAdmin(AdminExportMixin, admin.ModelAdmin):
form = ClubJournalLeaderEntryAdminForm
list_display = ('timesheet', 'date', 'start', 'end', 'duration', 'club')
list_filter = (('timesheet__leader', LeaderListFilter),)
ordering = ('-club_entry__date', '-start')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj and obj.timesheet.submitted:
return False
return super(ClubJournalLeaderEntryAdmin, self).has_delete_permission(request, obj)
def get_readonly_fields(self, request, obj=None):
if obj and obj.timesheet.submitted:
return ('start', 'end')
return self.readonly_fields
class ClubJournalLeaderEntryInlineAdmin(admin.TabularInline):
class form(forms.ModelForm):
class Meta:
model = ClubJournalLeaderEntry
fields = []
model = ClubJournalLeaderEntry
ordering = ('club_entry__date', 'start')
readonly_fields = ('date', 'start', 'end', 'edit_link')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj:
# obj may be Timesheet or ClubJournalEntry
# this inline is used in both ClubJournalEntryAdmin and TimesheetAdmin
try:
entries = obj.leader_entries
except AttributeError:
entries = obj.club_entries
if entries.filter(timesheet__submitted=True).exists():
return False
return super(ClubJournalLeaderEntryInlineAdmin, self).has_delete_permission(request, obj)
def edit_link(self, obj):
return '<a href="{url}" title="{title}" target="_blank">{edit}</a>'.format(
url = reverse('admin:domecek_clubjournalleaderentry_change', args=[obj.id]),
title = _('update entry'),
edit = _('edit'),
)
edit_link.short_description = ''
edit_link.allow_tags = True
class ClubJournalEntryAdmin(AdminExportMixin, admin.ModelAdmin):
form = ClubJournalEntryAdminForm
date_hierarchy = 'date'
list_display = ('club_name', 'period_name', 'date', 'start', 'end', 'duration', 'agenda_html')
list_filter = (
('period__club__school_year', SchoolYearListFilter),
('period__club', ClubListFilter),
)
filter_horizontal = ('participants',)
inlines = (ClubJournalLeaderEntryInlineAdmin,)
ordering = ('-date', '-start')
readonly_fields = ('club_name', 'period_name', 'date',)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj:
if obj.leader_entries.filter(timesheet__submitted=True).exists():
return False
else:
return super(ClubJournalEntryAdmin, self).has_delete_permission(request, obj)
return False
def get_actions(self, request):
actions = super(ClubJournalEntryAdmin, self).get_actions(request)
if 'delete_selected' in actions:
del(actions['delete_selected'])
return actions
def club_name(self, obj):
return obj.period.club.name
club_name.short_description = _('club')
club_name.admin_order_field = 'period__club__name'
def period_name(self, obj):
return obj.period.name
period_name.short_description = _('period')
period_name.admin_order_field = 'period__name'
def agenda_html(self, obj):
return obj.agenda
agenda_html.short_description = _('agenda')
agenda_html.admin_order_field = 'agenda'
agenda_html.allow_tags = True
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/clubs.py",
"copies": "1",
"size": "20269",
"license": "bsd-3-clause",
"hash": -3344847173302274600,
"line_mean": 40.7057613169,
"line_max": 171,
"alpha_frac": 0.5991908826,
"autogenerated": false,
"ratio": 4.050559552358114,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01008571186775132,
"num_lines": 486
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import (
UserCreationForm as _UserCreationForm,
AuthenticationForm as _AuthenticationForm,
PasswordResetForm as _PasswordResetForm,
SetPasswordForm as _SetPasswordForm,
PasswordChangeForm as _PasswordChangeForm,
)
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from ..models import Parent
from .form import FormMixin
User = get_user_model()
class UserFormMixin(FormMixin):
def clean_email(self):
if self.cleaned_data['email'] and \
User.objects.filter(email=self.cleaned_data['email']).first():
raise ValidationError(
_('User with this email already exists. '
'You may use password reset form if You have forgotten your user name or password.'),
code='exists',
params={'email': self.cleaned_data['email']},
)
else:
return self.cleaned_data['email']
class UserAdminCreateForm(UserFormMixin, forms.ModelForm):
# create user with random password
# users without password may not reset the password themselves
def save(self, commit=True):
user = super(UserAdminCreateForm, self).save(commit)
user.set_password(User.objects.make_random_password())
user.save()
return user
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email']
class UserCreateForm(UserFormMixin, _UserCreationForm):
def save(self, commit=True):
user = super(UserCreateForm, self).save()
parent = Parent()
parent.first_name = user.first_name
parent.last_name = user.last_name
parent.email = user.email
parent.user = user
parent.save()
return user
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email']
class UserUpdateForm(FormMixin, forms.ModelForm):
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email']
class UserLoginForm(FormMixin, _AuthenticationForm):
pass
class UserPasswordForm(FormMixin, _PasswordChangeForm):
pass
class PasswordResetForm(FormMixin, _PasswordResetForm):
pass
class SetPasswordForm(FormMixin, _SetPasswordForm):
pass
| {
"repo_name": "misli/django-domecek",
"path": "domecek/forms/user.py",
"copies": "1",
"size": "2554",
"license": "bsd-3-clause",
"hash": -4927483343973540000,
"line_mean": 25.3298969072,
"line_max": 125,
"alpha_frac": 0.6683633516,
"autogenerated": false,
"ratio": 4.2214876033057855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0019665412449288502,
"num_lines": 97
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import forms
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from ..forms.timesheets import TimesheetEntryAdminForm
from ..models import *
from .export import AdminExportMixin
from .filters import LeaderListFilter
from .clubs import ClubJournalLeaderEntryInlineAdmin
class TimesheetEntryAdmin(AdminExportMixin, admin.ModelAdmin):
form = TimesheetEntryAdminForm
date_hierarchy = 'date'
list_display = ('timesheet', 'date', 'start', 'end', 'duration', 'entry_type')
list_filter = ('entry_type', ('timesheet__leader', LeaderListFilter))
ordering = ('-date', '-start',)
def get_readonly_fields(self, request, obj=None):
if obj and obj.timesheet.submitted:
return ('timesheet', 'entry_type', 'date', 'start', 'end')
return ()
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj and obj.timesheet.submitted:
return False
return super(TimesheetEntryAdmin, self).has_delete_permission(request, obj)
class TimesheetEntryInlineAdmin(admin.TabularInline):
class form(forms.ModelForm):
class Meta:
model = TimesheetEntry
fields = []
model = TimesheetEntry
ordering = ('date', 'start')
readonly_fields = ('date', 'start', 'end', 'entry_type', 'description_html', 'edit_link')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj and obj.submitted:
return False
return super(TimesheetEntryInlineAdmin, self).has_delete_permission(request, obj)
def description_html(self, obj):
return obj.description
description_html.short_description = _('description')
description_html.admin_order_field = 'description'
description_html.allow_tags = True
def edit_link(self, obj):
return '<a href="{url}" title="{title}" target="_blank">{edit}</a>'.format(
url = reverse('admin:domecek_timesheetentry_change', args=[obj.id]),
title = _('update entry'),
edit = _('edit'),
)
edit_link.short_description = ''
edit_link.allow_tags = True
class CJLEInlineAdmin(ClubJournalLeaderEntryInlineAdmin):
readonly_fields = ('date', 'start', 'end', 'club', 'edit_link')
class TimesheetAdmin(AdminExportMixin, admin.ModelAdmin):
list_display = ('leader', 'period', 'group_durations', 'submitted', 'paid')
list_filter = (('leader', LeaderListFilter), 'period')
inlines = (TimesheetEntryInlineAdmin, CJLEInlineAdmin)
actions = ('submit', 'set_paid')
# do not allow to add timesheets in admin
# timesheets are created automatically
def has_add_permission(self, request):
return False
# do not allow to add entries in admin (keep it simple)
def has_delete_permission(self, request, obj=None):
if obj:
return not obj.submitted
return False
def get_actions(self, request):
actions = super(TimesheetAdmin, self).get_actions(request)
if 'delete_selected' in actions:
del(actions['delete_selected'])
return actions
def group_durations(self, obj):
return '<br/>'.join(
'<label>{name}</label>: {duration}'.format(
name = group.name,
duration = group.duration,
) for group in obj.groups
)
group_durations.allow_tags = True
def set_paid(self, request, queryset):
queryset.update(submitted=True, paid=True)
self.message_user(request, _('Selected timesheets where marked as paid.'))
set_paid.short_description = _('Mark selected timesheets as paid')
def submit(self, request, queryset):
queryset.update(submitted=True)
self.message_user(request, _('Selected timesheets where submitted.'))
submit.short_description = _('Submit selected timesheets')
class TimesheetEntryTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'order')
list_editable = ('order',)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/admin/timesheets.py",
"copies": "1",
"size": "4374",
"license": "bsd-3-clause",
"hash": -8414952984543917000,
"line_mean": 35.45,
"line_max": 125,
"alpha_frac": 0.6502057613,
"autogenerated": false,
"ratio": 4.084033613445378,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.014614560741545749,
"num_lines": 120
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import forms
from ..forms.widgets import CheckboxSelectMultipleBootstrap
from ..models import Participant
from .form import FormMixin
class ParticipantForm(FormMixin, forms.ModelForm):
def __init__(self, user, *args, **kwargs):
self.user = user
super(ParticipantForm, self).__init__(*args, **kwargs)
self.fields['parents'].widget.choices.queryset = \
self.fields['parents'].widget.choices.queryset.filter(user=self.user)
self.fields['parents'].widget = CheckboxSelectMultipleBootstrap(
choices = self.fields['parents'].widget.choices,
attrs = {},
)
self.fields['parents'].help_text = None
def save(self, commit=True):
self.instance.user = self.user
return super(ParticipantForm, self).save(commit)
save.alters_data = True
class Meta:
model = Participant
fields = [
'first_name', 'last_name', 'birth_num',
'age_group', 'insurance',
'street', 'city', 'postal_code', 'citizenship',
'email', 'phone',
'school', 'school_other', 'school_class',
'health',
'parents',
]
| {
"repo_name": "misli/django-domecek",
"path": "domecek/forms/participant.py",
"copies": "1",
"size": "1330",
"license": "bsd-3-clause",
"hash": 5712827468609002000,
"line_mean": 31.4390243902,
"line_max": 125,
"alpha_frac": 0.6165413534,
"autogenerated": false,
"ratio": 4.079754601226994,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5196295954626994,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import template
from django.core.urlresolvers import reverse_lazy as reverse
from ..conf import settings
from ..forms.schoolyear import SchoolYearForm
from ..utils import (
currency as _currency,
comma_separated as _comma_separated,
current_url as _current_url,
url_back as _url_back,
url_with_back as _url_with_back,
)
register = template.Library()
@register.filter
def currency(value):
try:
return _currency(value)
except ValueError:
return ''
@register.filter
def comma_separated(value):
return _comma_separated(value)
@register.filter
def filter_current_school_year(value, school_year):
return value.filter(school_year=school_year)
@register.simple_tag
def param_back():
return settings.DOMECEK_PARAM_BACK
@register.simple_tag(takes_context=True)
def url_back(context):
return _url_back(context['request'])
@register.simple_tag(takes_context=True)
def current_url(context):
return _current_url(context['request'])
@register.inclusion_tag('domecek/registration_links.html', takes_context=True)
def registration_links(context, subject):
context = context.__copy__()
context['reg_active'] = subject.reg_active
if context['request'].user.is_authenticated():
context['links'] = [
{
'participant': participant,
'registration': subject.registrations.filter(participant=participant).first(),
'url': _url_with_back(
subject.get_registration_url(participant),
_current_url(context['request']),
),
}
for participant in context['request'].user.domecek_participants.all()
]
context['participant_create_url'] = _url_with_back(
reverse('domecek:participant_create'),
subject.get_absolute_url(),
)
else:
context['public_registration_url'] = subject.get_public_registration_url()
return context
@register.inclusion_tag('domecek/schoolyear_form.html', takes_context=True)
def school_year_form(context):
context = context.__copy__()
context['school_year_form'] = SchoolYearForm(initial={'school_year': context['request'].school_year})
return context
class URLWithBackNode(template.base.Node):
def __init__(self, original_node):
self.original_node = original_node
def render(self, context):
return _url_with_back(
self.original_node.render(context),
current_url(context),
)
@register.tag
def url_with_back(parser, token):
"""
Returns an absolute URL as built-in tag url does,
but adds parameter back with current url.
"""
return URLWithBackNode(template.defaulttags.url(parser, token))
| {
"repo_name": "misli/django-domecek",
"path": "domecek/templatetags/domecek_tags.py",
"copies": "1",
"size": "2938",
"license": "bsd-3-clause",
"hash": 9166240583002760000,
"line_mean": 25.4684684685,
"line_max": 125,
"alpha_frac": 0.6586113002,
"autogenerated": false,
"ratio": 3.891390728476821,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9982516497820663,
"avg_score": 0.013497106171231638,
"num_lines": 111
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django import template
from ..models import Seat
register = template.Library()
@register.filter
def split(value):
return value.split()
@register.inclusion_tag('seat.html', takes_context=True)
def seat(context, room, table):
# this should not happen with valid template
if room not in Seat.ROOMS:
raise Exception('"{}" is not valid room'.format(room))
if not table:
raise Exception('"{}" is not valid table'.format(table))
# use form.seat_counts to store counters
# it is always the same with different copies of context
if not hasattr(context['form'], 'seat_counts'):
context['form'].seat_counts = {}
rt = '{}-{}'.format(room, table)
s = context['form'].seat_counts[rt] = context['form'].seat_counts.get(rt, 0) + 1
seat = Seat.objects.get_or_create(room=room, table=table, seat=s)[0]
seat_context = {}
seat_context['seat'] = seat
try:
seat_context['variant'] = context['form']['seat-{}-variant'.format(seat.id)]
except KeyError:
pass
return seat_context
| {
"repo_name": "misli/ples",
"path": "app/ples/templatetags/ples.py",
"copies": "1",
"size": "1195",
"license": "bsd-3-clause",
"hash": -3278893823172250000,
"line_mean": 31.2972972973,
"line_max": 125,
"alpha_frac": 0.6627615063,
"autogenerated": false,
"ratio": 3.4941520467836256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4656913553083626,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.utils.safestring import mark_safe
from django.forms.widgets import (
RadioSelect, CheckboxSelectMultiple,
CheckboxFieldRenderer, RadioFieldRenderer,
)
class BootstrapRenderer:
def render_option(self, value, label, i):
widget = self.choice_input_class(self.name, self.value, self.attrs.copy(), (value, label), i)
row = '<div class="row select">' \
'<div class="col-md-1 right">{tag}</div>' \
'<div class="col-md-11"><label class="form-control" style="font-weight:normal" for="{id}">{label}</label></div>' \
'</div>'
return row.format(
tag = widget.tag(),
id = widget.attrs['id'],
label = widget.choice_label,
)
def render(self):
rows = []
i = 0
for value, label in self.choices:
if isinstance(label, (list, tuple)):
for v, l in label:
rows.append(self.render_option(v, l, i))
i += 1
else:
rows.append(self.render_option(value, label, i))
i += 1
return mark_safe('\n'.join(rows))
class RadioFieldRendererBootstrap(BootstrapRenderer, RadioFieldRenderer):
''' Renders RadioSelect in a nice table '''
class CheckboxFieldRendererBootstrap(BootstrapRenderer, CheckboxFieldRenderer):
''' Renders CheckboxSelectMultiple in a nice table '''
class RadioSelectBootstrap(RadioSelect):
renderer = RadioFieldRendererBootstrap
class CheckboxSelectMultipleBootstrap(CheckboxSelectMultiple):
renderer = CheckboxFieldRendererBootstrap
| {
"repo_name": "misli/django-domecek",
"path": "domecek/forms/widgets.py",
"copies": "1",
"size": "1793",
"license": "bsd-3-clause",
"hash": -8793057984603707000,
"line_mean": 29.9137931034,
"line_max": 134,
"alpha_frac": 0.6157278305,
"autogenerated": false,
"ratio": 4.131336405529954,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5247064236029954,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import calendar
from collections import namedtuple
from datetime import date, datetime, time, timedelta
from django.core.urlresolvers import reverse_lazy as reverse
from django.db import models
from django.db.models import Q
from django.utils.dateformat import DateFormat
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from djangocms_text_ckeditor.fields import HTMLField
from .clubs import ClubJournalLeaderEntry
from .roles import Leader
from .startend import StartEndMixin
def start_end_by_date(d):
# we work with monthly timesheets by default
# TODO: allow weekly and quarterly timesheets by settings
return {
'start': date(d.year, d.month, 1),
'end': date(d.year, d.month, calendar.monthrange(d.year, d.month)[1]),
}
class TimesheetPeriodManager(models.Manager):
def for_date(self, date):
return self.get_or_create(**start_end_by_date(date))[0]
@python_2_unicode_compatible
class TimesheetPeriod(StartEndMixin, models.Model):
start = models.DateField(_('start date'), editable=False, unique=True)
end = models.DateField(_('end date'), editable=False, unique=True)
objects = TimesheetPeriodManager()
class Meta:
app_label = 'domecek'
ordering = ('-start',)
verbose_name = _('timesheet period')
verbose_name_plural = _('timesheet periods')
def __str__(self):
return self.name
@cached_property
def name(self):
# we work with monthly timesheets by default
# TODO: allow weekly and quarterly timesheets by settings
return DateFormat(self.start).format('F Y')
@cached_property
def all_timesheets(self):
return list(self.timesheets.all())
class TimesheetManager(models.Manager):
def for_leader_and_date(self, leader, date):
return self.get_or_create(
leader = leader,
period = TimesheetPeriod.objects.for_date(date),
)[0]
@python_2_unicode_compatible
class Timesheet(models.Model):
period = models.ForeignKey(TimesheetPeriod, verbose_name=_('period'), related_name='timesheets', editable=False)
leader = models.ForeignKey(Leader, verbose_name=_('leader'), related_name='timesheets', editable=False)
submitted = models.BooleanField(_('submitted'), default=False)
paid = models.BooleanField(_('paid'), default=False)
objects = TimesheetManager()
class Meta:
app_label = 'domecek'
ordering = ('-period__start',)
unique_together = (('period', 'leader'),)
verbose_name = _('timesheet')
verbose_name_plural = _('timesheets')
def __str__(self):
return '{leader}, {period}'.format(
leader = self.leader,
period = self.period.name,
)
@property
def club_entries(self):
return ClubJournalLeaderEntry.objects.filter(
club_entry__date__gte = self.period.start,
club_entry__date__lte = self.period.end,
leader = self.leader,
)
@cached_property
def all_club_entries(self):
return list(self.club_entries.all())
@cached_property
def all_timesheet_entries(self):
return list(self.timesheet_entries.all())
@cached_property
def all_entries(self):
return sorted(
self.all_timesheet_entries + self.all_club_entries,
cmp = lambda e1, e2: cmp(e1.datetime_start, e2.datetime_start),
)
class EntryGroup(namedtuple('_EntryGroup', ('name', 'entries'))):
@property
def duration(self):
return sum((e.duration for e in self.entries), timedelta())
@cached_property
def groups(self):
gs = {}
for entry in self.all_entries:
if entry.group not in gs:
gs[entry.group] = self.EntryGroup(
name = entry.group.name,
entries = [],
)
gs[entry.group].entries.append(entry)
return gs.values()
@python_2_unicode_compatible
class TimesheetEntryType(models.Model):
name = models.CharField(_('name'), max_length=150)
order = models.IntegerField(_('order'), blank=True, default=0)
class Meta:
app_label = 'domecek'
ordering = ('order',)
verbose_name = _('timesheet entry type')
verbose_name_plural = _('timesheet entry types')
def __str__(self):
return self.name
@python_2_unicode_compatible
class TimesheetEntry(StartEndMixin, models.Model):
timesheet = models.ForeignKey(Timesheet, editable=False,
verbose_name=_('timesheet'), related_name='timesheet_entries')
entry_type = models.ForeignKey(TimesheetEntryType, null=True,
verbose_name=_('entry type'), related_name='entries')
date = models.DateField(_('date'))
start = models.TimeField(_('start time'))
end = models.TimeField(_('end time'))
description = HTMLField(_('work description'))
class Meta:
app_label = 'domecek'
ordering = ('start',)
verbose_name = _('timesheet entry')
verbose_name_plural = _('timesheet entries')
def __str__(self):
return '{}'.format(self.duration)
@cached_property
def datetime_start(self):
return datetime.combine(self.date, self.start)
@cached_property
def datetime_end(self):
return datetime.combine(self.date, self.end)
@cached_property
def duration(self):
return self.datetime_end - self.datetime_start
duration.short_description = _('duration')
@property
def group(self):
return self.entry_type
def get_edit_url(self):
return reverse('domecek:timesheetentry_update', args=(self.id,))
def get_delete_url(self):
return reverse('domecek:timesheetentry_delete', args=(self.id,))
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/timesheets.py",
"copies": "1",
"size": "6326",
"license": "bsd-3-clause",
"hash": 4417428202660593700,
"line_mean": 31.7772020725,
"line_max": 125,
"alpha_frac": 0.6184002529,
"autogenerated": false,
"ratio": 4.047344849648113,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013549223596555169,
"num_lines": 193
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import cStringIO
from cms.views import details as cms_page
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.forms import ModelForm
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.views.generic import (
CreateView, DetailView, FormView, ListView,
TemplateView, UpdateView, View,
)
from formtools.wizard.views import SessionWizardView
from os.path import basename
from xhtml2pdf import pisa
from . import models, settings
from .utils import get_view, get_form
class PdfViewMixin(object):
"""
A base view for displaying a Pdf
"""
def get_attachment_filename(self):
if hasattr(self, 'attachment_filename'):
return self.attachment_filename
filename = basename(self.request.path)
return filename.endswith('.pdf') and filename or '{}.pdf'.format(filename)
def get(self, request, *args, **kwargs):
content = super(PdfViewMixin, self).get(request, *args, **kwargs).render().content
result = cStringIO.StringIO()
pdf = pisa.CreatePDF(cStringIO.StringIO(content), result, encoding='UTF-8')
if pdf.err:
raise Exception(pdf.err)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="{}"'.format(self.get_attachment_filename())
response.write(result.getvalue())
result.close()
return response
class ProductView(FormView):
form_class = get_form('CartItem')
def get_context_data(self, **kwargs):
self.kwargs.update(kwargs)
return self.kwargs
def get_form_kwargs(self):
kwargs = super(ProductView, self).get_form_kwargs()
kwargs['product'] = self.kwargs['product']
return kwargs
def form_valid(self, form):
self.request.save_cart()
form.instance.cart = self.request.cart
form.instance.product = self.kwargs['product']
try:
item = form.instance.__class__.objects.get(
cart = form.instance.cart,
product = form.instance.product,
package = form.instance.package,
)
item.quantity += form.instance.quantity
item.save()
except form.instance.__class__.DoesNotExist:
form.instance.tax_rate = form.instance.product.tax_rate
form.instance.price = form.instance.package \
and form.instance.package.price \
or form.instance.product.price
form.instance.save()
messages.add_message(self.request, messages.INFO,
mark_safe(_('Product has been added to <a href="{}">shopping cart</a>').format(reverse('Cart:cart')))
)
return super(ProductView, self).form_valid(form)
def get_success_url(self):
if 'add-and-cart' in self.request.POST:
return reverse('Cart:cart')
else:
return self.kwargs['product'].get_absolute_url()
# catalog views
root = TemplateView.as_view(template_name='cmsplugin_shop/root.html')
category = TemplateView.as_view(template_name='cmsplugin_shop/category.html')
product = ProductView.as_view(template_name='cmsplugin_shop/product.html')
class CatalogView(View):
root_view = staticmethod(get_view('root'))
category_view = staticmethod(get_view('category'))
product_view = staticmethod(get_view('product'))
category_model = models.Category
product_model = models.Product
def dispatch(self, request, path):
slug_list = [slug for slug in path.split('/') if slug]
# do not allow disabled nodes if user is not staff
if request.toolbar.use_draft:
active = {}
else:
active = {'active':True}
# display root view, if the path is empty
if not slug_list:
return self.root_view(request,
categories = self.category_model.objects.filter(parent=None, **active),
products = self.product_model.objects.filter(parent=None, **active),
)
# handle cms subpages
if request.current_page.application_namespace != 'Catalog':
return cms_page(request, path)
# lookup node by path
node = None
for slug in slug_list:
node = get_object_or_404(models.Node, parent=node, slug=slug, **active)
# display product view
try:
product = node.product
return self.product_view(request,
node = product,
product = product,
)
except self.product_model.DoesNotExist:
# or category view
category = node.category
return self.category_view(request,
node = category,
category = category,
categories = self.category_model.objects.filter(parent=node, **active),
products = self.product_model.objects.filter(parent=node, **active),
)
catalog = CatalogView.as_view()
class CartView(UpdateView):
form_class = get_form('Cart')
model = models.Cart
template_name = 'cmsplugin_shop/cart.html'
def get_object(self, queryset=None):
return self.request.cart
def form_valid(self, form):
self.request.save_cart()
return super(CartView, self).form_valid(form)
def get_success_url(self):
if 'update-and-order' in self.request.POST:
return reverse('Order:form')
else:
return reverse('Cart:cart')
cart = CartView.as_view()
class OrderFormView(SessionWizardView):
template_name = 'cmsplugin_shop/order_form.html'
form_list = [
get_form('Order'),
get_form('OrderConfirm')
]
def get_form_initial(self, step):
initial = {}
if step == '0' and self.request.user.is_authenticated():
for attr in 'first_name', 'last_name', 'email':
if hasattr(self.request.user, attr):
initial[attr] = getattr(self.request.user, attr)
if hasattr(self.request.user, settings.PROFILE_ATTRIBUTE):
profile = getattr(self.request.user, settings.PROFILE_ATTRIBUTE)
for attr in 'phone', 'address':
if hasattr(profile, attr):
initial[attr] = getattr(profile, attr)
return initial
def get_order(self):
# create order using data from first form
order = models.Order()
for attr, value in self.get_cleaned_data_for_step('0').items():
setattr(order, attr, value)
# find get initial order_state
try:
state = models.OrderState.objects.get(code=settings.INITIAL_ORDER_STATE)
except models.OrderState.DoesNotExist:
state = models.OrderState(code=settings.INITIAL_ORDER_STATE, name='New')
state.save()
# set order.state and cart
self.request.save_cart()
order.cart = self.request.cart
order.state = state
if self.request.user.is_authenticated():
order.user = self.request.user
return order
def get_context_data(self, form, **kwargs):
context = super(OrderFormView, self).get_context_data(form=form, **kwargs)
if self.steps.current == '1':
context.update({'order': self.get_order()})
return context
def done(self, form_list, **kwargs):
# get order
order = self.get_order()
# save order
order.save()
# send notifications
order.send_customer_mail()
order.send_manager_mail()
messages.add_message(self.request, messages.INFO, mark_safe(_(
'Your order has been accepted. The confirmation email has been sent to {}.'
).format(order.email)))
# redirect to order detail
return HttpResponseRedirect(order.get_absolute_url())
order_form = OrderFormView.as_view()
class OrderDetailView(DetailView):
model = models.Order
def get_queryset(self):
return super(OrderDetailView, self).get_queryset().filter(user = None)
class MyOrderDetailView(DetailView):
model = models.Order
def get_queryset(self):
return super(MyOrderDetailView, self).get_queryset().filter(user = self.request.user)
class OrderPdfView(PdfViewMixin, OrderDetailView):
template_name_suffix = '_pdf'
class MyOrderPdfView(PdfViewMixin, MyOrderDetailView):
template_name_suffix = '_pdf'
order_detail = OrderDetailView.as_view()
order_pdf = OrderPdfView.as_view()
my_order_detail = login_required(MyOrderDetailView.as_view())
my_order_pdf = login_required(MyOrderPdfView.as_view())
class MyOrdersView(ListView):
model = models.Order
def get_queryset(self):
user = self.request.user
return self.model._default_manager.filter(user=user)
def get_context_data(self):
context = super(MyOrdersView, self).get_context_data()
context['orders'] = context['object_list']
return context
my_orders = login_required(MyOrdersView.as_view())
| {
"repo_name": "misli/cmsplugin-shop",
"path": "cmsplugin_shop/views.py",
"copies": "1",
"size": "9684",
"license": "bsd-3-clause",
"hash": 4470866412899456500,
"line_mean": 32.1643835616,
"line_max": 125,
"alpha_frac": 0.6250516316,
"autogenerated": false,
"ratio": 4.098180279305967,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010498510098786091,
"num_lines": 292
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import cStringIO
from django.http import HttpResponse
from os.path import basename
from xhtml2pdf import pisa
class PdfViewMixin(object):
"""
A base view for displaying a Pdf
"""
def get_attachment_filename(self):
if hasattr(self, 'attachment_filename'):
return self.attachment_filename
filename = basename(self.request.path)
return filename.endswith('.pdf') and filename or '{}.pdf'.format(filename)
def get(self, request, *args, **kwargs):
content = super(PdfViewMixin, self).get(request, *args, **kwargs).render().content
result = cStringIO.StringIO()
pdf = pisa.CreatePDF(cStringIO.StringIO(content), result, encoding='UTF-8')
if pdf.err:
raise Exception(pdf.err)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="{}"'.format(self.get_attachment_filename())
response.write(result.getvalue())
result.close()
return response
| {
"repo_name": "misli/django-domecek",
"path": "domecek/views/pdf.py",
"copies": "1",
"size": "1166",
"license": "bsd-3-clause",
"hash": 5899626489889660000,
"line_mean": 32.3142857143,
"line_max": 125,
"alpha_frac": 0.6740994854,
"autogenerated": false,
"ratio": 4.209386281588448,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5383485766988448,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import datetime
import warnings
from .models import Leader, SchoolYear
class school_year(object):
def __get__(self, request, type=None):
if request is None:
return self
try:
return request._domecek_school_year
except AttributeError:
try:
# return year stored in the session
school_year = SchoolYear.objects.get(id=request.session['school_year_id'])
except (KeyError, SchoolYear.DoesNotExist):
try:
# return last active year
school_year = SchoolYear.objects.filter(active=True).order_by('-year')[0]
except IndexError:
# Create or activate current year
if datetime.date.today().month < 7:
year = datetime.date.today().year - 1
else:
year = datetime.date.today().year
school_year = SchoolYear.objects.get_or_create(year=year)[0]
school_year.active = True
school_year.save()
request._domecek_school_year = school_year
return request._domecek_school_year
def __set__(self, request, school_year):
if request:
request._domecek_school_year = school_year
request.session['school_year_id'] = school_year.id
def __delete__(self, request):
if request:
del(request._domecek_school_year)
class leader(object):
def __get__(self, request, type=None):
if request is None:
return self
try:
return request._domecek_leader
except AttributeError:
try:
request._domecek_leader = request.user.domecek_leader
except (AttributeError, Leader.DoesNotExist):
request._domecek_leader = None
return request._domecek_leader
def __set__(self, request, leader):
pass
def __delete__(self, request):
if request:
del(request._domecek_leader)
class SchoolYearMiddleware(object):
def process_request(self, request):
warnings.warn("Using SchoolYearMiddleware is deprecated. Use DomecekMiddleware instead.")
type(request).school_year = school_year()
type(request).leader = leader()
class DomecekMiddleware(object):
def process_request(self, request):
type(request).school_year = school_year()
type(request).leader = leader()
| {
"repo_name": "misli/django-domecek",
"path": "domecek/middleware.py",
"copies": "1",
"size": "2660",
"license": "bsd-3-clause",
"hash": 2583316239998467000,
"line_mean": 31.0481927711,
"line_max": 125,
"alpha_frac": 0.5823308271,
"autogenerated": false,
"ratio": 4.262820512820513,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013923481096123031,
"num_lines": 83
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import json
import logging
from django.core.exceptions import (
ObjectDoesNotExist, PermissionDenied, SuspiciousOperation
)
from django.db.models import Model, QuerySet
from django.http import JsonResponse
from django.views.generic import View
from django.views.generic.edit import FormMixin
from functools import partial
from .core import (
new_image_callback, move_image_callback,
)
from .forms import NewImageForm, MoveImageForm
from ..task_api import TaskApi
from ..models import Image, Task, TaskData
logger = logging.getLogger(__name__)
builder_api = TaskApi()
def translate_args(translation_dict, values):
"""
translate keys in dict values using translation_dict
"""
response = {}
for key, value in values.items():
try:
response[translation_dict[key]] = value
except KeyError:
response[key] = value
return response
class ModelJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Model):
return obj.__json__()
elif isinstance(obj, QuerySet):
return [o.__json__() for o in obj]
else:
return super(ModelJSONEncoder, self).default(obj)
class JsonView(View):
"""
Overrides dispatch method to always return JsonResponse.
"""
def dispatch(self, request, *args, **kwargs):
try:
response = super(JsonView, self).dispatch(request, *args, **kwargs)
if not isinstance(response, JsonResponse):
response = JsonResponse(response, encoder=ModelJSONEncoder, safe=False)
except ObjectDoesNotExist:
logger.warning('Not Found: %s', request.path,
extra={'status_code': 404, 'request': request})
response = JsonResponse({'error': 'Not Found'})
response.status_code = 404
except PermissionDenied:
logger.warning('Forbidden (Permission denied): %s', request.path,
extra={'status_code': 403, 'request': request})
response = JsonResponse({'error': 'Forbidden'})
response.status_code = 403
except SuspiciousOperation as e:
logger.error(force_text(e),
extra={'status_code': 400, 'request': request})
response = JsonResponse({'error': 'Bad Request'})
response.status_code = 400
except SystemExit:
# Allow sys.exit()
raise
except:
logger.exception('Failed to handle request: %s', request.path,
extra={'status_code': 500, 'request': request})
response = JsonResponse({'error': 'Internal Server Error'})
response.status_code = 500
return response
class FormJsonView(FormMixin, JsonView):
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a form instance with the passed
POST variables and then checked for validity.
"""
#self.args = args
#self.kwargs = kwargs
form_class = self.get_form_class()
form = self.get_form(form_class)
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
def form_valid(self, form):
return {'message': 'OK'}
def form_invalid(self, form):
return {'errors': form.errors}
def get_form_kwargs(self):
kwargs = super(FormJsonView, self).get_form_kwargs()
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': json.loads(self.request.body),
})
return kwargs
class ImageStatusCall(JsonView):
def get(self, request, image_id):
img = Image.objects.get(hash=image_id)
return {
'image_id': image_id,
'status': img.get_status_display(),
}
class ImageInfoCall(JsonView):
def get(self, request, image_id):
return Image.objects.get(hash=image_id)
class ImageDepsCall(JsonView):
def get_deps(self, image):
return {
'image_id': image.hash,
'deps': [self.get_deps(i) for i in image.children()],
}
def get(self, request, image_id):
image = Image.objects.get(hash=image_id)
return self.get_deps(image)
class ListImagesCall(JsonView):
def get(self, request):
return Image.objects.all()
class TaskStatusCall(JsonView):
def get(self, request, task_id):
return Task.objects.get(id=task_id)
class ListTasksCall(JsonView):
def get(self, request):
return Task.objects.all()
class NewImageCall(FormJsonView):
form_class = NewImageForm
def form_valid(self, form):
""" initiate a new build """
cleaned_data = form.cleaned_data
owner = 'testuser' # XXX: hardcoded
logger.debug('cleaned_data = %s', cleaned_data)
local_tag = '%s.%s' % (owner, cleaned_data['tag'])
td = TaskData(json=json.dumps(cleaned_data))
td.save()
t = Task(builddev_id='buildroot-fedora', status=Task.STATUS_PENDING,
type=Task.TYPE_BUILD, owner=owner, task_data=td)
t.save()
cleaned_data.update({'build_image': 'buildroot-fedora', 'local_tag': local_tag,
'callback': partial(new_image_callback, t.id)})
task_id = builder_api.build_docker_image(**cleaned_data)
t.celery_id = task_id
t.save()
return {'task_id': t.id}
class MoveImageCall(FormJsonView):
form_class = MoveImageForm
def form_valid(self, form):
data = form.cleaned_data
data['image_id'] = self.kwargs['image_id']
td = TaskData(json=json.dumps(data))
td.save()
owner = 'testuser' # XXX: hardcoded
t = Task(type=Task.TYPE_MOVE, owner=owner, task_data=td)
t.save()
data['callback'] = partial(move_image_callback, t.id)
task_id = builder_api.push_docker_image(**data)
t.celery_id = task_id
t.save()
return {'task_id': t.id}
class RebuildImageCall(JsonView):
""" rebuild provided image; use same response as new_image """
def post(self, request, image_id):
post_args = json.loads(self.request.body)
try:
data = json.loads(
Image.objects.get(hash=image_id).task.task_data.json
)
except (ObjectDoesNotExist, AttributeError) as e:
logger.error(repr(e))
raise ErrorDuringRequest('Image does not exist or was not built from task.')
else:
if post_args:
data.update(post_args)
data['image_id'] = image_id
td = TaskData(json=json.dumps(data))
td.save()
owner = 'testuser' # XXX: hardcoded
t = Task(type=Task.TYPE_MOVE, owner=owner, task_data=td)
t.save()
data['callback'] = partial(move_image_callback, t.id)
task_id = builder_api.push_docker_image(**data)
t.celery_id = task_id
t.save()
return {'task_id': t.id}
class InvalidateImageCall(JsonView):
def post(self, request, image_id):
count = Image.objects.invalidate(image_id)
return {'message': 'Invalidated {} images.'.format(count)}
| {
"repo_name": "DBuildService/dbs-server",
"path": "dbs/api/views.py",
"copies": "1",
"size": "7495",
"license": "bsd-3-clause",
"hash": -5388774310777787000,
"line_mean": 29.8436213992,
"line_max": 125,
"alpha_frac": 0.6016010674,
"autogenerated": false,
"ratio": 3.9488935721812433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5050494639581243,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import json
import re
import logging
import socket
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
logger = logging.getLogger(__name__)
class TaskData(models.Model):
json = models.TextField()
def __unicode__(self):
return json.dumps(json.loads(self.json), indent=4)
class Task(models.Model):
STATUS_PENDING = 1
STATUS_RUNNING = 2
STATUS_FAILED = 3
STATUS_SUCCESS = 4
_STATUS_NAMES = {
STATUS_PENDING: 'Pending',
STATUS_RUNNING: 'Running',
STATUS_FAILED: 'Failed',
STATUS_SUCCESS: 'Successful',
}
TYPE_BUILD = 1
TYPE_MOVE = 2
_TYPE_NAMES = {
TYPE_BUILD: 'Build',
TYPE_MOVE: 'Move',
}
celery_id = models.CharField(max_length=42, blank=True, null=True)
date_started = models.DateTimeField(auto_now_add=True)
date_finished = models.DateTimeField(null=True, blank=True)
builddev_id = models.CharField(max_length=38)
status = models.IntegerField(choices=_STATUS_NAMES.items(), default=STATUS_PENDING)
type = models.IntegerField(choices=_TYPE_NAMES.items())
owner = models.CharField(max_length=38)
task_data = models.ForeignKey(TaskData)
log = models.TextField(blank=True, null=True)
class Meta:
ordering = ['-date_finished']
def __unicode__(self):
return "%d [%s]" % (self.id, self.get_status())
def get_type(self):
return self._TYPE_NAMES[self.type]
def get_status(self):
return self._STATUS_NAMES[self.status]
def __json__(self):
response = {
"task_id": self.id,
"status": self.get_status_display(),
"type": self.get_type_display(),
"owner": self.owner,
"started": str(self.date_started),
"finished": str(self.date_finished),
"builddev-id": self.builddev_id,
}
if hasattr(self, 'image'):
response['image_id'] = self.image.hash
task_data = json.loads(self.task_data.json)
domain = socket.gethostname()
response['message'] = 'You can pull your image with command: \'docker pull {}:5000/{}\''.format(
domain, task_data['tag']
)
return response
class Package(models.Model):
""" TODO: software collections """
name = models.CharField(max_length=64)
class RpmQuerySet(models.QuerySet):
def get_or_create_from_nvr(self, nvr):
re_nvr = re.match('(.*)-(.*)-(.*)', nvr)
if re_nvr:
name, version, release = re_nvr.groups()
p, _ = Package.objects.get_or_create(name=name)
rpm, _ = Rpm.objects.get_or_create(package=p, nvr=nvr)
return rpm
else:
logger.error('"%s" is not an N-V-R', nvr)
class Rpm(models.Model):
package = models.ForeignKey(Package)
nvr = models.CharField(max_length=128)
part_of = GenericRelation('Content')
objects = RpmQuerySet.as_manager()
def __unicode__(self):
return '%s: %s' % (self.package, self.nvr)
class Content(models.Model):
"""
generic many to many
"""
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class Registry(models.Model):
url = models.URLField()
class YumRepo(models.Model):
url = models.URLField()
class ImageQuerySet(models.QuerySet):
def children_as_list(self, image_id):
return self.filter(parent=image_id).values_list('hash', flat=True)
def invalidate(self, image_id):
"""
TODO:
make this more efficient
:param image_id:
:return:
"""
count = 0
to_invalidate = [image_id]
while True:
try:
parent_image = to_invalidate.pop()
except IndexError:
break
count += self.filter(hash=parent_image, image__is_invalidated=False).update(is_invalidated=True)
to_invalidate.extend(self.children_as_list(parent_image))
return count
class Image(models.Model):
STATUS_BUILD = 1
STATUS_TESTING = 2
STATUS_STABLE = 3
STATUS_BASE = 4
_STATUS_NAMES = {
STATUS_BUILD: 'Built',
STATUS_TESTING: 'Pushed-Testing',
STATUS_STABLE: 'Pushed-Stable',
STATUS_BASE: 'Base-Image',
}
hash = models.CharField(max_length=64, primary_key=True)
parent = models.ForeignKey('self', null=True, blank=True) # base images doesnt have parents
task = models.OneToOneField(Task, null=True, blank=True)
status = models.IntegerField(choices=_STATUS_NAMES.items(), default=STATUS_BUILD)
content = models.ManyToManyField(Content)
dockerfile = models.ForeignKey('Dockerfile', null=True, blank=True)
is_invalidated = models.BooleanField(default=False)
objects = ImageQuerySet.as_manager()
def __unicode__(self):
return u'%s: %s' % (self.hash[:12], self.get_status())
def get_status(self):
return self._STATUS_NAMES[self.status]
@classmethod
def create(cls, image_id, status, tags=None, task=None, parent=None, dockerfile=None):
image, _ = cls.objects.get_or_create(hash=image_id, status=status)
image.task = task
image.parent = parent
if dockerfile:
image.dockerfile = dockerfile
image.save()
for tag in tags:
t, _ = Tag.objects.get_or_create(name=tag)
t.save()
rel = ImageRegistryRelation(tag=t, image=image)
rel.save()
return image
@property
def tags(self):
return Tag.objects.for_image_as_list(self)
@property
def children(self):
return Image.objects.filter(parent=self)
def ordered_rpms_list(self):
return list(Rpm.objects.filter(part_of__image=self).values_list('nvr', flat=True).order_by('nvr'))
@property
def rpms_count(self):
return Rpm.objects.filter(part_of__image=self).count()
def add_rpms_list(self, nvr_list):
"""
provide a list of RPM nvrs and link them to image
"""
for nvr in nvr_list:
rpm = Rpm.objects.get_or_create_from_nvr(nvr)
if rpm:
rpm_ct = ContentType.objects.get(model='rpm')
content, _ = Content.objects.get_or_create(object_id=rpm.id, content_type=rpm_ct)
self.content.add(content)
def __json__(self):
response = {
'hash': self.hash,
'status': self.get_status_display(),
'is_invalidated': self.is_invalidated,
'rpms': self.ordered_rpms_list(),
'tags': self.tags,
# 'registries': copy.copy(registries),
'parent': getattr(self.parent, 'hash', None)
}
if self.task:
response['built_on'] = str(self.task.date_finished)
return response
class TagQuerySet(models.QuerySet):
def for_image(self, image):
return self.filter(registry_bindings__image=image)
def for_image_as_list(self, image):
return list(self.for_image(image).values_list('name', flat=True))
# TODO: do relations with this
class Tag(models.Model):
name = models.CharField(max_length=64)
objects = TagQuerySet.as_manager()
class ImageRegistryRelation(models.Model):
tag = models.ForeignKey(Tag, related_name='registry_bindings')
image = models.ForeignKey(Image)
registry = models.ForeignKey(Registry, blank=True, null=True)
class Dockerfile(models.Model):
content = models.TextField()
| {
"repo_name": "DBuildService/dbs-server",
"path": "dbs/models.py",
"copies": "1",
"size": "8105",
"license": "bsd-3-clause",
"hash": 4261915397677616000,
"line_mean": 28.2599277978,
"line_max": 125,
"alpha_frac": 0.600987045,
"autogenerated": false,
"ratio": 3.7453789279112755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4846365972911275,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import locale
import os
import string
from datetime import date
from django.core.urlresolvers import reverse_lazy as reverse
from django.utils.encoding import iri_to_uri, smart_text
from django.utils.translation import get_language, ugettext_lazy as _
from urllib import urlencode
from .conf import settings
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
def get_mailer(name):
return import_string(getattr(settings,
'DOMECEK_{}_MAILER'.format(name.upper()),
'domecek.mailers.{}Mailer'.format(name),
))
def get_plugin(name):
return import_string(getattr(settings,
'DOMECEK_{}_PLUGIN'.format(name.upper()),
'domecek.cms_plugins.default_plugins.{}Plugin'.format(name),
))
class LocaleConv:
def __init__(self, languages):
"""
This function loads localeconv for all languages during module load.
It is necessary, because using locale.setlocale later may be dangerous
(It is not thread-safe in most of the implementations.)
"""
original_locale_name = locale.setlocale(locale.LC_ALL)
self.localeconv = {}
for code, name in languages:
locale_name = locale.locale_alias[code].split('.')[0]+'.UTF-8'
locale.setlocale(locale.LC_ALL, str(locale_name))
self.localeconv[code] = locale.localeconv()
locale.setlocale(locale.LC_ALL, original_locale_name)
def __call__(self, language=None):
return self.localeconv[language or get_language()]
localeconv = LocaleConv(settings.LANGUAGES)
# This function is inspired by python's standard locale.currency().
def currency(val, international=False):
"""Formats val according to the currency settings for current language."""
conv = localeconv()
digits = settings.PRICE_DECIMAL_PLACES
# grouping
groups = []
s = str(abs(int(val)))
for interval in locale._grouping_intervals(conv['mon_grouping']):
if not s:
break
groups.append(s[-interval:])
s = s[:-interval]
if s:
groups.append(s)
groups.reverse()
s = smart_text(conv['mon_thousands_sep']).join(groups)
# display fraction for non integer values
if digits and not isinstance(val, int):
s += smart_text(conv['mon_decimal_point']) + '{{:.{}f}}'.format(digits).format(val).split('.')[1]
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = '<' + s + '>'
smb = smart_text(conv[international and 'int_curr_symbol' or 'currency_symbol'])
precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes']
separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space']
if precedes:
s = smb + (separated and ' ' or '') + s
else:
s = s + (separated and ' ' or '') + smb
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
sign = conv[val<0 and 'negative_sign' or 'positive_sign']
if sign_pos == 0:
s = '(' + s + ')'
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace('<', sign)
elif sign_pos == 4:
s = s.replace('>', sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
return s.replace('<', '').replace('>', '').replace(' ', '\u00A0')
def comma_separated(l):
l = map(smart_text, l)
if len(l) > 2:
return _(', and ').join([', '.join(l[:-1]), l[-1]])
else:
return _(', and ').join(l)
def get_rand_hash(length=32, stringset=string.ascii_letters+string.digits):
return ''.join([stringset[i%len(stringset)] for i in [ord(x) for x in os.urandom(length)]])
def current_url(request):
if request.META['QUERY_STRING']:
return '{}?{}'.format(request.path, request.META['QUERY_STRING'])
else:
return request.path
def url_back(request):
return request.POST.get(
settings.DOMECEK_PARAM_BACK,
request.GET.get(
settings.DOMECEK_PARAM_BACK,
reverse('domecek:summary'),
),
)
def url_with_back(url, url_back):
return '{}?{}'.format(url, urlencode({settings.DOMECEK_PARAM_BACK: iri_to_uri(url_back)}))
def reverse_with_back(request, *args, **kwargs):
return url_with_back(reverse(*args, **kwargs), current_url(request))
| {
"repo_name": "misli/django-domecek",
"path": "domecek/utils.py",
"copies": "1",
"size": "4619",
"license": "bsd-3-clause",
"hash": 5342300339243587000,
"line_mean": 29.7933333333,
"line_max": 125,
"alpha_frac": 0.6267590388,
"autogenerated": false,
"ratio": 3.544896392939371,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46716554317393705,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import locale
import os
import string
from decimal import Decimal
from django.db.models import Model
from django.utils.encoding import smart_text
from django.utils.translation import get_language
from . import settings
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
class EmptyMixin(Model):
class Meta:
abstract = True
def get_admin(name):
return import_string(getattr(settings.settings,
'CMSPLUGIN_SHOP_{}_ADMIN'.format(name.upper()),
'cmsplugin_shop.admins.{}Admin'.format(name),
))
def get_form(name):
return import_string(getattr(settings.settings,
'CMSPLUGIN_SHOP_{}_FORM'.format(name.upper()),
'cmsplugin_shop.forms.{}Form'.format(name),
))
def get_html_field():
return import_string(getattr(settings.settings,
'CMSPLUGIN_SHOP_HTML_FIELD',
'djangocms_text_ckeditor.fields.HTMLField',
))
def get_plugin(name):
return import_string(getattr(settings.settings,
'CMSPLUGIN_SHOP_{}_PLUGIN'.format(name.upper()),
'cmsplugin_shop.plugins.{}Plugin'.format(name),
))
def get_toolbar(name):
return import_string(getattr(settings.settings,
'CMSPLUGIN_SHOP_{}_TOOLBAR'.format(name.upper()),
'cmsplugin_shop.cms_toolbars.{}Toolbar'.format(name),
))
def get_view(name):
view = import_string(getattr(
settings.settings,
'CMSPLUGIN_SHOP_{}_VIEW'.format(name.upper()),
'cmsplugin_shop.views.{}'.format(name),
))
return hasattr(view, 'as_view') and view.as_view() or view
QUANTIZE = Decimal((0,(1,),-settings.DECIMAL_PLACES))
def quantize(price):
return price.quantize(QUANTIZE)
class LocaleConvCache(object):
def __init__(self, languages):
"""
This function loads localeconv for all languages during module load.
It is necessary, because using locale.setlocale later may be dangerous
(It is not thread-safe in most of the implementations.)
"""
self._conv = {}
original_locale_name = locale.setlocale(locale.LC_ALL)
for code, name in languages:
locale_name = locale.locale_alias[code].split('.')[0]+'.UTF-8'
locale.setlocale(locale.LC_ALL, str(locale_name))
self._conv[code] = locale.localeconv()
locale.setlocale(locale.LC_ALL, original_locale_name)
def getconv(self, language=None):
return self._conv[language or get_language()].copy()
localeconv_cache = LocaleConvCache(settings.settings.LANGUAGES)
# This function is inspired by python's standard locale.currency().
def currency(val, localeconv=None, international=False):
"""Formats val according to the currency settings for current language."""
val = Decimal(val)
conv = localeconv_cache.getconv()
conv.update(localeconv or settings.LOCALECONV)
# split integer part and fraction
parts = str(abs(val)).split('.')
# grouping
groups = []
s = parts[0]
for interval in locale._grouping_intervals(conv['mon_grouping']):
if not s:
break
groups.append(s[-interval:])
s = s[:-interval]
if s:
groups.append(s)
groups.reverse()
s = smart_text(conv['mon_thousands_sep']).join(groups)
# display fraction for non integer values
if len(parts) > 1:
s += smart_text(conv['mon_decimal_point']) + parts[1]
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = '<' + s + '>'
smb = smart_text(conv[international and 'int_curr_symbol' or 'currency_symbol'])
precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes']
separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space']
if precedes:
s = smb + (separated and ' ' or '') + s
else:
s = s + (separated and ' ' or '') + smb
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
sign = conv[val<0 and 'negative_sign' or 'positive_sign']
if sign_pos == 0:
s = '(' + s + ')'
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace('<', sign)
elif sign_pos == 4:
s = s.replace('>', sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
return s.replace('<', '').replace('>', '')
def get_rand_hash(length=32, stringset=string.ascii_letters+string.digits):
return ''.join([stringset[i%len(stringset)] for i in [ord(x) for x in os.urandom(length)]])
| {
"repo_name": "misli/cmsplugin-shop",
"path": "cmsplugin_shop/utils.py",
"copies": "1",
"size": "4790",
"license": "bsd-3-clause",
"hash": 2825058325914769400,
"line_mean": 29.5095541401,
"line_max": 125,
"alpha_frac": 0.6384133612,
"autogenerated": false,
"ratio": 3.593398349587397,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4731811710787397,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import locale
import os
import string
from django.conf import settings
from django.utils.encoding import smart_text
from django.utils.translation import get_language, ugettext_lazy as _
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
def get_admin(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_ADMIN'.format(name.upper()),
'cmsplugin_articles.admins.{}Admin'.format(name),
))
def get_form(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_FORM'.format(name.upper()),
'cmsplugin_articles.forms.{}Form'.format(name),
))
def get_html_field():
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_HTML_FIELD',
'djangocms_text_ckeditor.fields.HTMLField',
))
def get_menu(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_MENU'.format(name.upper()),
'cmsplugin_articles.cms_menus.{}Menu'.format(name),
))
def get_model(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_MODEL'.format(name.upper()),
'cmsplugin_articles.models.{}'.format(name),
))
def get_plugin(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_PLUGIN'.format(name.upper()),
'cmsplugin_articles.plugins.{}Plugin'.format(name),
))
def get_toolbar(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_TOOLBAR'.format(name.upper()),
'cmsplugin_articles.cms_toolbars.{}Toolbar'.format(name),
))
def get_view(name):
view = import_string(getattr(
settings,
'CMSPLUGIN_ARTICLES_{}_VIEW'.format(name.upper()),
'cmsplugin_articles.views.{}'.format(name),
))
return hasattr(view, 'as_view') and view.as_view() or view
# this is used to get all these names translated
WEEKDAYS = {
0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'),
5:_('Saturday'), 6:_('Sunday')
}
WEEKDAYS_ABBR = {
0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'),
5:_('Sat'), 6:_('Sun')
}
MONTHS = {
1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'),
7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'),
12:_('December')
}
MONTHS_ABBR = {
1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'),
7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec')
}
| {
"repo_name": "misli/cmsplugin-articles",
"path": "cmsplugin_articles/utils.py",
"copies": "1",
"size": "2722",
"license": "bsd-3-clause",
"hash": -984223443796419600,
"line_mean": 30.2873563218,
"line_max": 125,
"alpha_frac": 0.6204996326,
"autogenerated": false,
"ratio": 3.2099056603773586,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9142751611993891,
"avg_score": 0.03753073619669335,
"num_lines": 87
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import logging
from datetime import datetime
from ..models import Task, Dockerfile, Image
from ..utils import chain_dict_get
logger = logging.getLogger(__name__)
def new_image_callback(task_id, build_results):
build_logs = getattr(build_results, 'build_logs', None)
t = Task.objects.get(id=task_id)
t.date_finished = datetime.now()
if build_logs:
t.log = '\n'.join(build_logs)
if build_results:
image_id = getattr(build_results, "built_img_info", {}).get("Id", None)
logger.debug("image_id = %s", image_id)
parent_image_id = getattr(build_results, "base_img_info", {}).get("Id", None)
logger.debug("parent_image_id = %s", parent_image_id)
image_tags = getattr(build_results, "built_img_info", {}).get("RepoTags", None)
logger.debug("image_tags = %s", image_tags)
parent_tags = getattr(build_results, "base_img_info", {}).get("RepoTags", None)
logger.debug("parent_tags = %s", parent_tags)
df = getattr(build_results, "dockerfile", None)
if image_id and parent_image_id:
parent_image = Image.create(parent_image_id, Image.STATUS_BASE, tags=parent_tags)
image = Image.create(image_id, Image.STATUS_BUILD, tags=image_tags,
task=t, parent=parent_image)
if df:
df_model = Dockerfile(content=df)
df_model.save()
image.dockerfile = df_model
image.save()
rpm_list = getattr(build_results, "built_img_plugins_output", {}).get("all_packages", None)
base_rpm_list = getattr(build_results, "base_plugins_output", {}).get("all_packages", None)
if rpm_list:
image.add_rpms_list(rpm_list)
if base_rpm_list:
image.add_rpms_list(base_rpm_list)
else:
t.status = Task.STATUS_FAILED
t.status = Task.STATUS_SUCCESS
else:
t.status = Task.STATUS_FAILED
t.save()
def move_image_callback(task_id, response):
logger.debug("move callback: %s %s", task_id, response)
t = Task.objects.get(id=task_id)
t.date_finished = datetime.now()
if response and response.get("error", False):
t.status = Task.STATUS_FAILED
else:
t.status = Task.STATUS_SUCCESS
t.save()
| {
"repo_name": "DBuildService/dbs-server",
"path": "dbs/api/core.py",
"copies": "1",
"size": "2469",
"license": "bsd-3-clause",
"hash": -4537258175124355000,
"line_mean": 38.1904761905,
"line_max": 125,
"alpha_frac": 0.6059133252,
"autogenerated": false,
"ratio": 3.5423242467718796,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46482375719718794,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import logging
from celery import shared_task
from dock.core import DockerTasker
from dock.api import build_image_in_privileged_container, build_image_using_hosts_docker
logger = logging.getLogger(__name__)
@shared_task
def build_image_hostdocker(
build_image, git_url, local_tag, git_dockerfile_path=None,
git_commit=None, parent_registry=None, target_registries=None,
tag=None, repos=None, store_results=True):
"""
build docker image, image is built inside container using docker instance
from host (mount socket inside container)
:param build_image: name of the build image (supplied docker image is built inside this image)
:param git_url: url to git repo
:param local_tag: image is known within the service with this tag
:param git_dockerfile_path: path to dockerfile within git repo (default is ./Dockerfile)
:param git_commit: which commit to checkout (master by default)
:param parent_registry: pull base image from this registry
:param target_registries: list of urls where built image will be pushed
:param tag: tag image with this tag (and push it to target_repo if specified)
:param repos: list of yum repos to enable in image
:param store_results: if set to True, store built image and associated buildroot
in local docker registry
:return: dict with data from docker inspect
"""
logger.info("build image using hostdocker method")
target_registries = target_registries or []
push_buildroot_to = None
if store_results:
target_registries.append('172.17.42.1:5000')
push_buildroot_to = "172.17.42.1:5000"
results = build_image_using_hosts_docker(
build_image,
git_url=git_url,
image=local_tag,
git_dockerfile_path=git_dockerfile_path,
git_commit=git_commit,
parent_registry=parent_registry,
target_registries=target_registries,
repos=repos,
push_buildroot_to=push_buildroot_to,
)
return results
@shared_task
def build_image(build_image, git_url, local_tag, git_dockerfile_path=None,
git_commit=None, parent_registry=None, target_registries=None,
tag=None, repos=None, store_results=True):
"""
build docker image from provided arguments inside privileged container
:param build_image: name of the build image (supplied docker image is built inside this image)
:param git_url: url to git repo
:param local_tag: image is known within the service with this tag
:param git_dockerfile_path: path to dockerfile within git repo (default is ./Dockerfile)
:param git_commit: which commit to checkout (master by default)
:param parent_registry: pull base image from this registry
:param target_registries: list of urls where built image will be pushed
:param tag: tag image with this tag (and push it to target_repo if specified)
:param repos: list of yum repos to enable in image
:param store_results: if set to True, store built image and associated buildroot
in local docker registry
:return: dict with data from docker inspect
"""
logger.info("build image in privileged container")
target_registries = target_registries or []
push_buildroot_to = None
if store_results:
target_registries.append('172.17.42.1:5000')
push_buildroot_to = "172.17.42.1:5000"
results = build_image_in_privileged_container(
build_image,
git_url=git_url,
image=local_tag,
git_dockerfile_path=git_dockerfile_path,
git_commit=git_commit,
parent_registry=parent_registry,
target_registries=target_registries,
repos=repos,
push_buildroot_to=push_buildroot_to,
)
return results
@shared_task
def push_image(image_id, source_registry, target_registry, tags):
"""
pull image from source_registry and push it to target_registry (with provided tags)
:param image_id: image to pull
:param source_registry: registry to pull image from
:param target_registry: registry to push image to
:param tags: list of tags to tag image with before pushing it to target registry
:return: None
"""
if not hasattr(tags, '__iter__'):
raise RuntimeError("argument tags is not iterable")
d = DockerTasker()
try:
final_tag = d.pull_image(image_id, source_registry)
for tag in tags:
d.tag_and_push_image(final_tag, tag, reg_uri=target_registry)
except Exception as ex:
return {"error": repr(ex.message)}
else:
return {"error": None}
@shared_task
def submit_results(result):
"""
TODO: implement this
"""
# 2 requests, one for 'finished', other for data
print(result)
| {
"repo_name": "DBuildService/dbs-server",
"path": "dbs/tasks.py",
"copies": "1",
"size": "4933",
"license": "bsd-3-clause",
"hash": 7735021961795390000,
"line_mean": 38.464,
"line_max": 125,
"alpha_frac": 0.6851814312,
"autogenerated": false,
"ratio": 3.930677290836653,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5115858722036654,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import re
from django import forms
from django.db import models
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
from localflavor.cz.forms import CZBirthNumberField, CZPostalCodeField
from ..conf import settings
from ..utils import import_string
class ColorInput(forms.TextInput):
input_type = 'color'
class ColorField(models.CharField):
default_validators = [RegexValidator(
re.compile('^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$'),
_('Enter a valid hex color.'),
'invalid',
)]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 10
super(ColorField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs['widget'] = ColorInput
return super(ColorField, self).formfield(**kwargs)
class PriceField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('decimal_places', settings.PRICE_DECIMAL_PLACES)
kwargs.setdefault('max_digits', settings.PRICE_MAX_DIGITS)
super(PriceField, self).__init__(*args, **kwargs)
DAY_OF_WEEK = {
1: _('Monday'),
2: _('Tuesday'),
3: _('Wednesday'),
4: _('Thursday'),
5: _('Friday'),
6: _('Saturday'),
7: _('Sunday'),
}
class DayOfWeekField(models.IntegerField):
def __init__(self, *args, **kwargs):
defaults = {
'choices' : tuple(sorted(DAY_OF_WEEK.items())),
}
defaults.update(kwargs)
super(DayOfWeekField,self).__init__(*args, **defaults)
class _BirthNumberField(CZBirthNumberField, forms.CharField):
'''
CZBirthNumberField derived from CharField instead of just Field
to support max_length
'''
class BirthNumberField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(BirthNumberField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(BirthNumberField, self).deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
defaults = {'form_class': _BirthNumberField}
defaults.update(kwargs)
return super(BirthNumberField, self).formfield(**defaults)
def clean(self, value, model_instance):
return super(BirthNumberField, self).clean(
value[6]=='/' and value or '{}/{}'.format(value[:6], value[6:]),
model_instance)
class _PostalCodeField(CZPostalCodeField, forms.CharField):
'''
CZPostalCodeField derived from CharField instead of just Field
to support max_length
'''
class PostalCodeField(models.CharField):
def __init__(self, *args, **kwargs):
defaults = {'max_length': 6}
defaults.update(kwargs)
super(PostalCodeField, self).__init__(*args, **defaults)
def deconstruct(self):
name, path, args, kwargs = super(PostalCodeField, self).deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
defaults = {'form_class': _PostalCodeField}
defaults.update(kwargs)
return super(PostalCodeField, self).formfield(**defaults)
def clean(self, value, model_instance):
return super(PostalCodeField, self).clean(
value[3]==' ' and value or '{} {}'.format(value[:3], value[3:]),
model_instance)
| {
"repo_name": "misli/django-domecek",
"path": "domecek/models/fields.py",
"copies": "1",
"size": "3577",
"license": "bsd-3-clause",
"hash": -4109636664914488000,
"line_mean": 29.0588235294,
"line_max": 125,
"alpha_frac": 0.6346100084,
"autogenerated": false,
"ratio": 3.79723991507431,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49318499234743096,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, print_function
from bisect import bisect_right
import sys
import inspect
import tokenize
import py
cpy_compile = compile
try:
import _ast
from _ast import PyCF_ONLY_AST as _AST_FLAG
except ImportError:
_AST_FLAG = 0
_ast = None
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, py.builtin._basestring):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
__hash__ = None
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start:key.stop]
return newsource
def __len__(self):
return len(self.lines)
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end - 1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [(indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent + line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
def syntax_checker(x):
return compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
# compile(source+'\n', "x", "exec")
syntax_checker(source + '\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" " * ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
import _pytest._code
try:
code = _pytest._code.Code(obj)
except TypeError:
try:
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except:
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
import _pytest._code
obj = _pytest._code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line) - len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
l = []
for x in ast.walk(node):
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
l.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
l.append(val[0].lineno - 1 - 1)
l.sort()
insert_index = bisect_right(l, lineno)
start = l[insert_index - 1]
if insert_index >= len(l):
end = None
else:
end = l[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
if sys.version_info < (2, 7):
content += "\n"
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno + 1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno + 1, len(source) + 1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
| {
"repo_name": "ryanmockabee/golfr",
"path": "flask/lib/python3.6/site-packages/_pytest/_code/source.py",
"copies": "3",
"size": "14170",
"license": "mit",
"hash": -893975535543502500,
"line_mean": 33.0625,
"line_max": 97,
"alpha_frac": 0.5643613267,
"autogenerated": false,
"ratio": 4.275799637899819,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005556731985096483,
"num_lines": 416
} |
from __future__ import absolute_import, division, generators, print_function
from bisect import bisect_right
import sys
import inspect, tokenize
import py
cpy_compile = compile
try:
import _ast
from _ast import PyCF_ONLY_AST as _AST_FLAG
except ImportError:
_AST_FLAG = 0
_ast = None
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, py.builtin._basestring):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
__hash__ = None
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start:key.stop]
return newsource
def __len__(self):
return len(self.lines)
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end-1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [ (indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent+line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
syntax_checker = lambda x: compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
#compile(source+'\n', "x", "exec")
syntax_checker(source+'\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" "*ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=
generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
import _pytest._code
try:
code = _pytest._code.Code(obj)
except TypeError:
try:
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except:
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
import _pytest._code
obj = _pytest._code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line)-len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
l = []
for x in ast.walk(node):
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
l.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
l.append(val[0].lineno - 1 - 1)
l.sort()
insert_index = bisect_right(l, lineno)
start = l[insert_index - 1]
if insert_index >= len(l):
end = None
else:
end = l[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
if sys.version_info < (2,7):
content += "\n"
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno+1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno+1, len(source)+1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
| {
"repo_name": "alexzoo/python",
"path": "selenium_tests/env/lib/python3.6/site-packages/_pytest/_code/source.py",
"copies": "2",
"size": "14136",
"license": "apache-2.0",
"hash": -1103510755923955700,
"line_mean": 33.1449275362,
"line_max": 90,
"alpha_frac": 0.56508206,
"autogenerated": false,
"ratio": 4.275862068965517,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5840944128965517,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, print_function
from bisect import bisect_right
import sys
import six
import inspect
import tokenize
import py
cpy_compile = compile
try:
import _ast
from _ast import PyCF_ONLY_AST as _AST_FLAG
except ImportError:
_AST_FLAG = 0
_ast = None
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, six.string_types):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
__hash__ = None
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start:key.stop]
return newsource
def __len__(self):
return len(self.lines)
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end - 1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [(indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent + line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
def syntax_checker(x):
return compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
# compile(source+'\n', "x", "exec")
syntax_checker(source + '\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" " * ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
import _pytest._code
try:
code = _pytest._code.Code(obj)
except TypeError:
try:
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except: # noqa
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
import _pytest._code
obj = _pytest._code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line) - len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
values = []
for x in ast.walk(node):
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
values.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
values.append(val[0].lineno - 1 - 1)
values.sort()
insert_index = bisect_right(values, lineno)
start = values[insert_index - 1]
if insert_index >= len(values):
end = None
else:
end = values[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno + 1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno + 1, len(source) + 1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
| {
"repo_name": "askeing/servo",
"path": "tests/wpt/web-platform-tests/tools/third_party/pytest/_pytest/_code/source.py",
"copies": "14",
"size": "14157",
"license": "mpl-2.0",
"hash": 8220667108794155000,
"line_mean": 33.113253012,
"line_max": 97,
"alpha_frac": 0.5664335664,
"autogenerated": false,
"ratio": 4.288700393820054,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, print_function
import ast
from ast import PyCF_ONLY_AST as _AST_FLAG
from bisect import bisect_right
import linecache
import sys
import six
import inspect
import tokenize
import py
cpy_compile = compile
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, six.string_types):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
__hash__ = None
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start:key.stop]
return newsource
def __len__(self):
return len(self.lines)
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end - 1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [(indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent + line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
def syntax_checker(x):
return compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
# compile(source+'\n', "x", "exec")
syntax_checker(source + '\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" " * ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if isinstance(source, ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
import _pytest._code
try:
code = _pytest._code.Code(obj)
except TypeError:
try:
fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = inspect.findsource(obj)
except py.builtin._sysex:
raise
except: # noqa
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
import _pytest._code
obj = _pytest._code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line) - len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
values = []
for x in ast.walk(node):
if isinstance(x, ast.stmt) or isinstance(x, ast.ExceptHandler):
values.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
values.append(val[0].lineno - 1 - 1)
values.sort()
insert_index = bisect_right(values, lineno)
start = values[insert_index - 1]
if insert_index >= len(values):
end = None
else:
end = values[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno + 1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno + 1, len(source) + 1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
| {
"repo_name": "tareqalayan/pytest",
"path": "_pytest/_code/source.py",
"copies": "1",
"size": "14033",
"license": "mit",
"hash": 8975180351506563000,
"line_mean": 33.1435523114,
"line_max": 97,
"alpha_frac": 0.5675194185,
"autogenerated": false,
"ratio": 4.311213517665131,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.537873293616513,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, generators, print_function
import ast
from ast import PyCF_ONLY_AST as _AST_FLAG
from bisect import bisect_right
import sys
import six
import inspect
import tokenize
import py
cpy_compile = compile
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, six.string_types):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
__hash__ = None
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start:key.stop]
return newsource
def __len__(self):
return len(self.lines)
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end - 1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [(indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent + line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
def syntax_checker(x):
return compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
# compile(source+'\n', "x", "exec")
syntax_checker(source + '\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" " * ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if isinstance(source, ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
import _pytest._code
try:
code = _pytest._code.Code(obj)
except TypeError:
try:
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except: # noqa
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
import _pytest._code
obj = _pytest._code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line) - len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
values = []
for x in ast.walk(node):
if isinstance(x, ast.stmt) or isinstance(x, ast.ExceptHandler):
values.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
values.append(val[0].lineno - 1 - 1)
values.sort()
insert_index = bisect_right(values, lineno)
start = values[insert_index - 1]
if insert_index >= len(values):
end = None
else:
end = values[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno + 1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno + 1, len(source) + 1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
| {
"repo_name": "rmfitzpatrick/pytest",
"path": "_pytest/_code/source.py",
"copies": "1",
"size": "14064",
"license": "mit",
"hash": -8636831744001722000,
"line_mean": 33.2189781022,
"line_max": 97,
"alpha_frac": 0.5666240046,
"autogenerated": false,
"ratio": 4.299602568022012,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001384857088505826,
"num_lines": 411
} |
from __future__ import absolute_import, division
__author__ = 'katharine'
from six import indexbytes
from six.moves import range
from libpebble2.events.mixin import EventSourceMixin
from libpebble2.exceptions import ScreenshotError
from libpebble2.protocol.screenshots import *
class Screenshot(EventSourceMixin):
"""
Takes a screenshot from the watch.
:param pebble: The pebble of which to take a screenshot.
:type pebble: .PebbleConnection
"""
def __init__(self, pebble):
self._pebble = pebble
super(Screenshot, self).__init__()
def grab_image(self):
"""
Takes a screenshot. Blocks until completion, or raises a :exc:`.ScreenshotError` on failure.
While this method is executing, "progress" events will periodically be emitted with the following signature: ::
(downloaded_so_far, total_size)
:return: A list of bytearrays in RGB8 format, where each bytearray is one row of the image.
"""
# We have to open this queue before we make the request, to ensure we don't miss the response.
queue = self._pebble.get_endpoint_queue(ScreenshotResponse)
self._pebble.send_packet(ScreenshotRequest())
return self._read_screenshot(queue)
def _read_screenshot(self, queue):
data = queue.get().data
header = ScreenshotHeader.parse(data)[0]
if header.response_code != ScreenshotHeader.ResponseCode.OK:
queue.close()
raise ScreenshotError("Screenshot failed: {!s}".format(header.response_code))
data = header.data
expected_size = self._get_expected_bytes(header)
while len(data) < expected_size:
data += queue.get().data
self._broadcast_event("progress", len(data), expected_size)
queue.close()
return self._decode_image(header, data)
@classmethod
def _get_expected_bytes(cls, header):
if header.version == 1:
return (header.width * header.height) // 8
elif header.version == 2:
return header.width * header.height
else:
raise ScreenshotError("Unknown screenshot version: {}".format(header.version))
@classmethod
def _decode_image(cls, header, data):
if header.version == 1:
return cls._decode_1bit(header, data)
elif header.version == 2:
return cls._decode_8bit(header, data)
@classmethod
def _decode_1bit(cls, header, data):
output = []
row_bytes = header.width // 8
for row in range(header.height):
row_values = []
for column in range(header.width):
pixel = (indexbytes(data, row*row_bytes + column//8) >> (column % 8)) & 1
row_values.extend([pixel * 255] * 3)
output.append(bytearray(row_values))
return output
@classmethod
def _decode_8bit(cls, header, data):
output = []
for row in range(header.height):
row_values = []
for column in range(header.width):
pixel = indexbytes(data, row*header.width + column)
row_values.extend([
((pixel >> 4) & 0b11) * 85,
((pixel >> 2) & 0b11) * 85,
((pixel >> 0) & 0b11) * 85,
])
output.append(bytearray(row_values))
return output
| {
"repo_name": "pebble/libpebble2",
"path": "libpebble2/services/screenshot.py",
"copies": "1",
"size": "3413",
"license": "mit",
"hash": -8456470222026615000,
"line_mean": 35.6989247312,
"line_max": 119,
"alpha_frac": 0.6003515968,
"autogenerated": false,
"ratio": 4.077658303464755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007650999409425175,
"num_lines": 93
} |
from __future__ import absolute_import, division
# Copyright (c) 2010-2019 openpyxl
from openpyxl.descriptors import (
Float,
Set,
Alias,
NoneSet,
Sequence,
Integer,
MinMax,
)
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.compat import safe_string
from .colors import ColorDescriptor, Color
from openpyxl.xml.functions import Element, localname
from openpyxl.xml.constants import SHEET_MAIN_NS
FILL_NONE = 'none'
FILL_SOLID = 'solid'
FILL_PATTERN_DARKDOWN = 'darkDown'
FILL_PATTERN_DARKGRAY = 'darkGray'
FILL_PATTERN_DARKGRID = 'darkGrid'
FILL_PATTERN_DARKHORIZONTAL = 'darkHorizontal'
FILL_PATTERN_DARKTRELLIS = 'darkTrellis'
FILL_PATTERN_DARKUP = 'darkUp'
FILL_PATTERN_DARKVERTICAL = 'darkVertical'
FILL_PATTERN_GRAY0625 = 'gray0625'
FILL_PATTERN_GRAY125 = 'gray125'
FILL_PATTERN_LIGHTDOWN = 'lightDown'
FILL_PATTERN_LIGHTGRAY = 'lightGray'
FILL_PATTERN_LIGHTGRID = 'lightGrid'
FILL_PATTERN_LIGHTHORIZONTAL = 'lightHorizontal'
FILL_PATTERN_LIGHTTRELLIS = 'lightTrellis'
FILL_PATTERN_LIGHTUP = 'lightUp'
FILL_PATTERN_LIGHTVERTICAL = 'lightVertical'
FILL_PATTERN_MEDIUMGRAY = 'mediumGray'
fills = (FILL_SOLID, FILL_PATTERN_DARKDOWN, FILL_PATTERN_DARKGRAY,
FILL_PATTERN_DARKGRID, FILL_PATTERN_DARKHORIZONTAL, FILL_PATTERN_DARKTRELLIS,
FILL_PATTERN_DARKUP, FILL_PATTERN_DARKVERTICAL, FILL_PATTERN_GRAY0625,
FILL_PATTERN_GRAY125, FILL_PATTERN_LIGHTDOWN, FILL_PATTERN_LIGHTGRAY,
FILL_PATTERN_LIGHTGRID, FILL_PATTERN_LIGHTHORIZONTAL,
FILL_PATTERN_LIGHTTRELLIS, FILL_PATTERN_LIGHTUP, FILL_PATTERN_LIGHTVERTICAL,
FILL_PATTERN_MEDIUMGRAY)
class Fill(Serialisable):
"""Base class"""
tagname = "fill"
@classmethod
def from_tree(cls, el):
children = [c for c in el]
if not children:
return
child = children[0]
if "patternFill" in child.tag:
return PatternFill._from_tree(child)
return super(Fill, GradientFill).from_tree(child)
class PatternFill(Fill):
"""Area fill patterns for use in styles.
Caution: if you do not specify a fill_type, other attributes will have
no effect !"""
tagname = "patternFill"
__elements__ = ('fgColor', 'bgColor')
patternType = NoneSet(values=fills)
fill_type = Alias("patternType")
fgColor = ColorDescriptor()
start_color = Alias("fgColor")
bgColor = ColorDescriptor()
end_color = Alias("bgColor")
def __init__(self, patternType=None, fgColor=Color(), bgColor=Color(),
fill_type=None, start_color=None, end_color=None):
if fill_type is not None:
patternType = fill_type
self.patternType = patternType
if start_color is not None:
fgColor = start_color
self.fgColor = fgColor
if end_color is not None:
bgColor = end_color
self.bgColor = bgColor
@classmethod
def _from_tree(cls, el):
attrib = dict(el.attrib)
for child in el:
desc = localname(child)
attrib[desc] = Color.from_tree(child)
return cls(**attrib)
def to_tree(self, tagname=None, idx=None):
parent = Element("fill")
el = Element(self.tagname)
if self.patternType is not None:
el.set('patternType', self.patternType)
for c in self.__elements__:
value = getattr(self, c)
if value != Color():
el.append(value.to_tree(c))
parent.append(el)
return parent
DEFAULT_EMPTY_FILL = PatternFill()
DEFAULT_GRAY_FILL = PatternFill(patternType='gray125')
class Stop(Serialisable):
tagname = "stop"
position = MinMax(min=0, max=1)
color = ColorDescriptor()
def __init__(self, color, position):
self.position = position
self.color = color
def _assign_position(values):
"""
Automatically assign positions if a list of colours is provided.
It is not permitted to mix colours and stops
"""
n_values = len(values)
n_stops = sum(isinstance(value, Stop) for value in values)
if n_stops == 0:
interval = 1
if n_values > 2:
interval = 1 / (n_values - 1)
values = [Stop(value, i * interval)
for i, value in enumerate(values)]
elif n_stops < n_values:
raise ValueError('Cannot interpret mix of Stops and Colors in GradientFill')
pos = set()
for stop in values:
if stop.position in pos:
raise ValueError("Duplicate position {0}".format(stop.position))
pos.add(stop.position)
return values
class StopList(Sequence):
expected_type = Stop
def __set__(self, obj, values):
values = _assign_position(values)
super(StopList, self).__set__(obj, values)
class GradientFill(Fill):
"""Fill areas with gradient
Two types of gradient fill are supported:
- A type='linear' gradient interpolates colours between
a set of specified Stops, across the length of an area.
The gradient is left-to-right by default, but this
orientation can be modified with the degree
attribute. A list of Colors can be provided instead
and they will be positioned with equal distance between them.
- A type='path' gradient applies a linear gradient from each
edge of the area. Attributes top, right, bottom, left specify
the extent of fill from the respective borders. Thus top="0.2"
will fill the top 20% of the cell.
"""
tagname = "gradientFill"
type = Set(values=('linear', 'path'))
fill_type = Alias("type")
degree = Float()
left = Float()
right = Float()
top = Float()
bottom = Float()
stop = StopList()
def __init__(self, type="linear", degree=0, left=0, right=0, top=0,
bottom=0, stop=()):
self.degree = degree
self.left = left
self.right = right
self.top = top
self.bottom = bottom
self.stop = stop
self.type = type
def __iter__(self):
for attr in self.__attrs__:
value = getattr(self, attr)
if value:
yield attr, safe_string(value)
def to_tree(self, tagname=None, namespace=None, idx=None):
parent = Element("fill")
el = super(GradientFill, self).to_tree()
parent.append(el)
return parent
| {
"repo_name": "kawamon/hue",
"path": "desktop/core/ext-py/openpyxl-2.6.4/openpyxl/styles/fills.py",
"copies": "2",
"size": "6460",
"license": "apache-2.0",
"hash": 7208573675097643000,
"line_mean": 27.8392857143,
"line_max": 86,
"alpha_frac": 0.6351393189,
"autogenerated": false,
"ratio": 3.641488162344983,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5276627481244983,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
# %env CUDA_VISIBLE_DEVICES=0
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from torch_deform_conv.layers import ConvOffset2D
from torch_deform_conv.cnn import get_cnn, get_deform_cnn
from torch_deform_conv.mnist import get_gen
from torch_deform_conv.utils import transfer_weights
batch_size = 32
n_train = 60000
n_test = 10000
steps_per_epoch = int(np.ceil(n_train / batch_size))
validation_steps = int(np.ceil(n_test / batch_size))
train_gen = get_gen(
'train', batch_size=batch_size,
scale=(1.0, 1.0), translate=0.0,
shuffle=True
)
test_gen = get_gen(
'test', batch_size=batch_size,
scale=(1.0, 1.0), translate=0.0,
shuffle=False
)
train_scaled_gen = get_gen(
'train', batch_size=batch_size,
scale=(1.0, 2.5), translate=0.2,
shuffle=True
)
test_scaled_gen = get_gen(
'test', batch_size=batch_size,
scale=(1.0, 2.5), translate=0.2,
shuffle=False
)
def train(model, generator, batch_num, epoch):
model.train()
for batch_idx in range(batch_num):
data, target = next(generator)
data, target = torch.from_numpy(data), torch.from_numpy(target)
# convert BHWC to BCHW
data = data.permute(0, 3, 1, 2)
data, target = data.float().cuda(), target.long().cuda()
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = model(data)
loss = F.cross_entropy(output, target)
loss.backward()
optimizer.step()
print('Train Epoch: {}\tLoss: {:.6f}'.format(epoch, loss.data[0]))
def test(model, generator, batch_num, epoch):
model.eval()
test_loss = 0
correct = 0
for batch_idx in range(batch_num):
data, target = next(generator)
data, target = torch.from_numpy(data), torch.from_numpy(target)
# convert BHWC to BCHW
data = data.permute(0, 3, 1, 2)
data, target = data.float().cuda(), target.long().cuda()
data, target = Variable(data), Variable(target)
output = model(data)
test_loss += F.cross_entropy(output, target).data[0]
pred = output.data.max(1)[1] # get the index of the max log-probability
correct += pred.eq(target.data).cpu().sum()
test_loss /= batch_num# loss function already averages over batch size
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\n'.format(
test_loss, correct, n_test, 100. * correct / n_test))
# ---
# Normal CNN
model = get_cnn()
model = model.cuda()
optimizer = optim.Adam(model.parameters(), lr=1e-3)
for epoch in range(10):
test(model, test_gen, validation_steps, epoch)
train(model, train_gen, steps_per_epoch, epoch)
torch.save(model, 'models/cnn.th')
# ---
# Evaluate normal CNN
print('Evaluate normal CNN')
model_cnn = torch.load('models/cnn.th')
test(model_cnn, test_gen, validation_steps, epoch)
# 99.27%
test(model_cnn, test_scaled_gen, validation_steps, epoch)
# 58.83%
# ---
# Deformable CNN
print('Finetune deformable CNN (ConvOffset2D and BatchNorm)')
model = get_deform_cnn(trainable=False)
model = model.cuda()
transfer_weights(model_cnn, model)
optimizer = optim.Adam(model.parameters(), lr=1e-3)
for epoch in range(20):
test(model, test_scaled_gen, validation_steps, epoch)
train(model, train_scaled_gen, steps_per_epoch, epoch)
torch.save(model, 'models/deform_cnn.th')
# ---
# Evaluate deformable CNN
print('Evaluate deformable CNN')
model = torch.load('models/deform_cnn.th')
test(model, test_gen, validation_steps, epoch)
# xx%
test(model, test_scaled_gen, validation_steps, epoch)
# xx% | {
"repo_name": "oeway/pytorch-deform-conv",
"path": "scaled_mnist.py",
"copies": "1",
"size": "3702",
"license": "mit",
"hash": -1828525015868989000,
"line_mean": 27.053030303,
"line_max": 81,
"alpha_frac": 0.6615343058,
"autogenerated": false,
"ratio": 3.0344262295081967,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4195960535308197,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from copy import copy
from sfepy.base.base import output, get_default, Struct
from sfepy.applications import PDESolverApp, Application
from .coefs_base import MiniAppBase, CoefEval
from sfepy.discrete.evaluate import eval_equations
import sfepy.base.multiproc as multi
import numpy as nm
import six
from six.moves import range
def insert_sub_reqs(reqs, levels, req_info):
"""Recursively build all requirements in correct order."""
all_reqs = []
for _, req in enumerate(reqs):
# Coefficients are referenced as 'c.<name>'...
areq = req[2:] if req.startswith('c.') else req
try:
rargs = req_info[areq]
except KeyError:
raise ValueError('requirement "%s" is not defined!' % req)
sub_reqs = rargs.get('requires', [])
if req in levels:
raise ValueError('circular requirement "%s"!' % (req))
if sub_reqs:
levels.append(req)
sreqs = insert_sub_reqs(sub_reqs, levels, req_info)
all_reqs += [ii for ii in sreqs if ii not in all_reqs]
levels.pop()
if req not in all_reqs:
all_reqs.append(req)
return all_reqs
def get_dict_idxval(dict_array, idx):
return {k: v[idx] for k, v in six.iteritems(dict_array)}
def rm_multi(s):
idx = s.rfind('|multiprocessing_')
return s[:idx] if idx > 0 else s
class CoefVolume(MiniAppBase):
def __call__(self, volume, problem=None, data=None):
problem = get_default(problem, self.problem)
term_mode = self.term_mode
equations, variables = problem.create_evaluable(self.expression,
term_mode=term_mode)
return eval_equations(equations, variables, term_mode=term_mode)
class HomogenizationWorker(object):
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_coors, store_micro_idxs, time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters
----------
problem : problem
The problem definition - microscopic problem.
opts : struct
The options of the homogenization application.
post_process_hook : function
The postprocessing hook.
req_info : dict
The definition of correctors.
coef_info : dict
The definition of homogenized coefficients.
micro_coors : array
The configurations of multiple microstructures.
store_micro_idxs : list of int
The indices of microstructures whose results are to be stored.
time_tag : str
The label corresponding to the actual time step and iteration,
used in the corrector file names.
Returns
-------
dependencies : dict
The computed correctors and coefficients.
sd_names : list
The names of computed dependencies.
"""
dependencies = {}
sd_names = {}
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
for name in sorted_names:
if not name.startswith('c.'):
if micro_coors is not None:
req_info[name]['store_idxs'] = (store_micro_idxs, 0)
val = self.calculate_req(problem, options, post_process_hook,
name, req_info, coef_info, sd_names,
dependencies, micro_coors,
time_tag)
dependencies[name] = val
return dependencies, sd_names
@staticmethod
def get_sorted_dependencies(req_info, coef_info, compute_only):
"Make corrs and coefs list sorted according to the dependencies."
reqcoef_info = copy(coef_info)
reqcoef_info.update(req_info)
compute_names = set(get_default(compute_only, list(coef_info.keys())))
compute_names = ['c.' + key for key in compute_names]
dep_names = []
for coef_name in compute_names:
requires = coef_info[coef_name[2:]].get('requires', [])
deps = insert_sub_reqs(copy(requires), [], reqcoef_info)\
+ [coef_name]
for dep in deps:
if dep not in dep_names:
dep_names.append(dep)
return dep_names
@staticmethod
def calculate(mini_app, problem, dependencies, dep_requires,
sd_names, micro_coors, chunk_tab, mode, proc_id):
if micro_coors is None:
data = {key: dependencies[key] for key in dep_requires
if 'Volume_' not in key}
volume = {key[9:]: dependencies[key]
for key in dep_requires if 'Volume_' in key}
mini_app.requires = [ii for ii in mini_app.requires
if 'c.Volume_' not in ii]
if mode == 'coefs':
val = mini_app(volume, data=data)
else:
if mini_app.save_name is not None:
sd_names['s.' + mini_app.name] =\
mini_app.get_save_name_base()
if mini_app.dump_name is not None:
sd_names['d.' + mini_app.name] =\
mini_app.get_dump_name_base()
val = mini_app(data=data)
else:
data = {rm_multi(key): dependencies[key]
for key in dep_requires if 'Volume_' not in key}
volume = {rm_multi(key[9:]): dependencies[key]
for key in dep_requires if 'Volume_' in key}
mini_app.requires = [ii for ii in mini_app.requires
if 'c.Volume_' not in ii]
if '|multiprocessing_' in mini_app.name\
and chunk_tab is not None:
chunk_id = int(mini_app.name[-3:])
chunk_tag = '-%d' % (chunk_id + 1)
micro_coors = micro_coors[chunk_tab[chunk_id]]
else:
chunk_tag = ''
val = []
if hasattr(mini_app, 'store_idxs') and mode == 'reqs':
save_name = mini_app.save_name
dump_name = mini_app.dump_name
for im in range(micro_coors.shape[0]):
output('== micro %s%s-%d =='
% (proc_id, chunk_tag, im + 1))
problem.set_mesh_coors(micro_coors[im], update_fields=True,
clear_all=False, actual=True)
if mode == 'coefs':
val.append(mini_app(get_dict_idxval(volume, im),
data=get_dict_idxval(data, im)))
else:
if hasattr(mini_app, 'store_idxs')\
and im in mini_app.store_idxs[0]:
store_id = '_%04d'\
% (mini_app.store_idxs[1] + im)
if save_name is not None:
mini_app.save_name = save_name + store_id
key = 's.' + mini_app.name
if key in sd_names:
sd_names[key].append(
mini_app.get_save_name_base())
else:
sd_names[key] =\
[mini_app.get_save_name_base()]
if dump_name is not None:
mini_app.dump_name = dump_name + store_id
key = 'd.' + mini_app.name
if key in sd_names:
sd_names[key].append(
mini_app.get_dump_name_base())
else:
sd_names[key] =\
[mini_app.get_dump_name_base()]
else:
mini_app.save_name = None
mini_app.dump_name = None
val.append(mini_app(data=get_dict_idxval(data, im)))
return val
@staticmethod
def calculate_req(problem, opts, post_process_hook,
name, req_info, coef_info, sd_names, dependencies,
micro_coors, time_tag='', chunk_tab=None, proc_id='0'):
"""Calculate a requirement, i.e. correctors or coefficients.
Parameters
----------
problem : problem
The problem definition related to the microstructure.
opts : struct
The options of the homogenization application.
post_process_hook : function
The postprocessing hook.
name : str
The name of the requirement.
req_info : dict
The definition of correctors.
coef_info : dict
The definition of homogenized coefficients.
sd_names : dict
The dictionary containing names of saved/dumped correctors.
dependencies : dict
The dependencies required by the correctors/coefficients.
micro_coors : array
The configurations of multiple microstructures.
time_tag : str
The label corresponding to the actual time step and iteration,
used in the corrector file names.
chunk_tab : list
In the case of multiprocessing the requirements are divided into
several chunks that are solved in parallel.
proc_id : int
The id number of the processor (core) which is solving the actual
chunk.
Returns
-------
val : coefficient/corrector or list of coefficients/correctors
The resulting homogenized coefficients or correctors.
"""
# compute coefficient
if name.startswith('c.'):
coef_name = name[2:]
output('computing %s...' % coef_name)
cargs = coef_info[coef_name]
mini_app = MiniAppBase.any_from_conf(coef_name, problem, cargs)
problem.clear_equations()
# Pass only the direct dependencies, not the indirect ones.
dep_requires = cargs.get('requires', [])
val = HomogenizationWorker.calculate(mini_app, problem,
dependencies, dep_requires,
sd_names, micro_coors,
chunk_tab, 'coefs', proc_id)
output('...done')
# compute corrector(s)
else:
output('computing dependency %s...' % name)
rargs = req_info[name]
mini_app = MiniAppBase.any_from_conf(name, problem, rargs)
mini_app.setup_output(save_format=opts.save_format,
dump_format=opts.dump_format,
post_process_hook=post_process_hook,
file_per_var=opts.file_per_var)
if mini_app.save_name is not None:
mini_app.save_name += time_tag
if mini_app.dump_name is not None:
mini_app.dump_name += time_tag
problem.clear_equations()
# Pass only the direct dependencies, not the indirect ones.
dep_requires = rargs.get('requires', [])
val = HomogenizationWorker.calculate(mini_app, problem,
dependencies, dep_requires,
sd_names, micro_coors,
chunk_tab, 'reqs', proc_id)
output('...done')
return val
class HomogenizationWorkerMulti(HomogenizationWorker):
def __init__(self, num_workers):
self.num_workers = num_workers
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_coors, store_micro_idxs, chunks_per_worker,
time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters
----------
The same parameters as :class:`HomogenizationWorker`, extended by:
chunks_per_worker : int
The number of chunks per one worker.
Returns
-------
The same returns as :class:`HomogenizationWorker`.
"""
multiproc = multi.multiproc_proc
dependencies = multiproc.get_dict('dependecies', clear=True)
sd_names = multiproc.get_dict('sd_names', clear=True)
numdeps = multiproc.get_dict('numdeps', clear=True)
remaining = multiproc.get_int_value('remaining', 0)
tasks = multiproc.get_queue('tasks')
lock = multiproc.get_lock('lock')
if micro_coors is not None:
micro_chunk_tab, req_info, coef_info = \
self.chunk_micro_coors(self.num_workers, micro_coors.shape[0],
req_info, coef_info,
chunks_per_worker, store_micro_idxs)
else:
micro_chunk_tab = None
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
remaining.value = len(sorted_names)
# calculate number of dependencies and inverse map
inverse_deps = {}
for name in sorted_names:
if name.startswith('c.'):
reqs = coef_info[name[2:]].get('requires', [])
else:
reqs = req_info[name].get('requires', [])
numdeps[name] = len(reqs)
if len(reqs) > 0:
for req in reqs:
if req in inverse_deps:
inverse_deps[req].append(name)
else:
inverse_deps[req] = [name]
for name in sorted_names:
if numdeps[name] == 0:
tasks.put(name)
workers = []
for ii in range(self.num_workers):
args = (tasks, lock, remaining, numdeps, inverse_deps,
problem, options, post_process_hook, req_info,
coef_info, sd_names, dependencies, micro_coors,
time_tag, micro_chunk_tab, str(ii + 1))
w = multiproc.Process(target=self.calculate_req_multi,
args=args)
w.start()
workers.append(w)
# block until all workes are terminated
for w in workers:
w.join()
if micro_coors is not None:
dependencies = self.dechunk_reqs_coefs(dependencies,
len(micro_chunk_tab))
return dependencies, sd_names
@staticmethod
def calculate_req_multi(tasks, lock, remaining, numdeps, inverse_deps,
problem, opts, post_process_hook,
req_info, coef_info, sd_names, dependencies,
micro_coors, time_tag, chunk_tab, proc_id):
"""Calculate a requirement in parallel.
Parameters
----------
tasks : queue
The queue of requirements to be solved.
lock : lock
The multiprocessing lock used to ensure save access to the global
variables.
remaining : int
The number of remaining requirements.
numdeps : dict
The number of dependencies for the each requirement.
inverse_deps : dict
The inverse dependencies - which requirements depend
on a given one.
For the definition of other parameters see 'calculate_req'.
"""
while remaining.value > 0:
name = tasks.get()
if name is None:
continue
sd_names_loc = {}
val = HomogenizationWorker.calculate_req(problem, opts,
post_process_hook, name, req_info, coef_info, sd_names_loc,
dependencies, micro_coors, time_tag, chunk_tab, proc_id)
lock.acquire()
dependencies[name] = val
remaining.value -= 1
if name in inverse_deps:
for iname in inverse_deps[name]:
numdeps[iname] -= 1 # iname depends on name
if numdeps[iname] == 0: # computed all direct dependecies?
tasks.put(iname) # yes, put iname to queue
sd_names.update(sd_names_loc)
lock.release()
@staticmethod
def process_reqs_coefs(old, num_workers, store_idxs=[]):
new = {}
for k, v in six.iteritems(old):
if k == 'filenames':
new[k] = v.copy()
continue
for ii in range(num_workers):
lab = '|multiprocessing_%03d' % ii
key = k + lab
new[key] = v.copy()
val = new[key]
if 'requires' in val:
val['requires'] = [jj + lab for jj in val['requires']]
if len(store_idxs) > 0:
if len(store_idxs[ii][0]) > 0:
val['store_idxs'] = store_idxs[ii]
else:
val['save_name'] = None
val['dump_name'] = None
return new
@staticmethod
def chunk_micro_coors(num_workers, num_micro, reqs, coefs,
chunks_per_worker=1, store_micro_idxs=[]):
"""
Split multiple microproblems into several chunks
that can be processed in parallel.
Parameters
----------
num_workers : int
The number of available CPUs.
num_micro : int
The number of microstructures.
reqs : dict
The requirement definitions.
coefs : dict
The coefficient definitions.
chunks_per_worker : int
The number of chunks per one worker.
store_micro_idxs : list of int
The indices of microstructures whose results are to be stored.
Returns
-------
micro_tab : list of slices
The indices of microproblems contained in each chunk.
new_reqs : dict
The new requirement definitions.
new_coefs : dict
The new coefficient definitions.
"""
chsize = int(nm.ceil(float(num_micro)
/ (num_workers * chunks_per_worker)))
micro_tab = []
store_idxs = []
for ii in range(0, num_micro, chsize):
jj = chsize + ii
chunk_end = num_micro if jj > num_micro else jj
micro_tab.append(slice(ii, chunk_end))
if len(store_micro_idxs) > 0:
store_idxs.append(([k - ii for k in store_micro_idxs
if k >= ii and k < jj], ii))
nw = len(micro_tab)
self = HomogenizationWorkerMulti
new_reqs = self.process_reqs_coefs(reqs, nw, store_idxs)
new_coefs = self.process_reqs_coefs(coefs, nw)
return micro_tab, new_reqs, new_coefs
@staticmethod
def dechunk_reqs_coefs(deps, num_chunks):
"""
Merge the results related to the multiple microproblems.
Parameters
----------
deps : dict
The calculated dependencies.
num_chunks : int
The number of chunks.
Returns
-------
new_deps : dict
The merged dependencies.
"""
new_deps = {}
for ii in range(num_chunks):
ilab = '_%03d' % ii
for k in deps.keys():
idx = k.rfind('|multiprocessing_')
if idx > 0:
if not(k[-4:] == ilab):
continue
key = k[:idx]
if key in new_deps:
new_deps[key] += deps[k]
else:
new_deps[key] = deps[k]
else:
new_deps[k] = deps[k]
return new_deps
class HomogenizationWorkerMultiMPI(HomogenizationWorkerMulti):
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_coors, store_micro_idxs, chunks_per_worker,
time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters and Returns
----------------------
The same parameters and returns as :class:`HomogenizationWorkerMulti`.
"""
multiproc = multi.multiproc_mpi
dependencies = multiproc.get_dict('dependecies', clear=True)
sd_names = multiproc.get_dict('sd_names', clear=True)
numdeps = multiproc.get_dict('numdeps', mutable=True, clear=True)
remaining = multiproc.get_int_value('remaining', 0)
tasks = multiproc.get_queue('tasks')
if micro_coors is not None:
micro_chunk_tab, req_info, coef_info = \
self.chunk_micro_coors(self.num_workers,
micro_coors.shape[0],
req_info, coef_info,
chunks_per_worker, store_micro_idxs)
else:
micro_chunk_tab = None
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
# calculate number of dependencies and inverse map
inverse_deps = {}
loc_numdeps = {}
for name in sorted_names:
if name.startswith('c.'):
reqs = coef_info[name[2:]].get('requires', [])
else:
reqs = req_info[name].get('requires', [])
loc_numdeps[name] = len(reqs)
if len(reqs) > 0:
for req in reqs:
if req in inverse_deps:
inverse_deps[req].append(name)
else:
inverse_deps[req] = [name]
if multiproc.mpi_rank == multiproc.mpi_master: # master node
for k, v in six.iteritems(loc_numdeps):
numdeps[k] = v
remaining.value = len(sorted_names)
for name in sorted_names:
if numdeps[name] == 0:
tasks.put(name)
multiproc.master_loop()
multiproc.master_send_continue()
if micro_coors is not None:
dependencies = self.dechunk_reqs_coefs(dependencies,
len(micro_chunk_tab))
multiproc.master_send_task('deps', dependencies)
multiproc.master_send_continue()
return dependencies, sd_names
else: # slave node
lock = multiproc.RemoteLock()
multiproc.slave_get_task('engine')
self.calculate_req_multi(tasks, lock, remaining, numdeps,
inverse_deps, problem, options,
post_process_hook, req_info,
coef_info, sd_names, dependencies,
micro_coors,
time_tag, micro_chunk_tab,
str(multiproc.mpi_rank + 1))
multiproc.slave_task_done('engine')
multiproc.wait_for_tag(multiproc.tags.CONTINUE)
task, deps = multiproc.slave_get_task('get_deps')
multiproc.wait_for_tag(multiproc.tags.CONTINUE)
return deps, None
class HomogenizationEngine(PDESolverApp):
@staticmethod
def process_options(options):
get = options.get
return Struct(coefs=get('coefs', None,
'missing "coefs" in options!'),
requirements=get('requirements', None,
'missing "requirements" in options!'),
compute_only=get('compute_only', None),
multiprocessing=get('multiprocessing', True),
use_mpi=get('use_mpi', False),
store_micro_idxs=get('store_micro_idxs', []),
chunks_per_worker=get('chunks_per_worker', 1),
save_format=get('save_format', 'vtk'),
dump_format=get('dump_format', 'h5'),
coefs_info=get('coefs_info', None))
def __init__(self, problem, options, app_options=None,
volumes=None, output_prefix='he:', **kwargs):
"""Bypasses PDESolverApp.__init__()!"""
Application.__init__(self, problem.conf, options, output_prefix,
**kwargs)
self.problem = problem
self.setup_options(app_options=app_options)
self.setup_output_info(self.problem, self.options)
self.volumes = volumes
self.micro_coors = None
def setup_options(self, app_options=None):
PDESolverApp.setup_options(self)
app_options = get_default(app_options, self.conf.options)
po = HomogenizationEngine.process_options
self.app_options += po(app_options)
def set_micro_coors(self, ncoors):
self.micro_coors = ncoors
@staticmethod
def define_volume_coef(coef_info, volumes):
"""
Define volume coefficients and make all other dependent on them.
Parameters
----------
coef_info : dict
The coefficient definitions.
volumes : dict
The definitions of volumes.
Returns
-------
coef_info : dict
The coefficient definitions extended by the volume coefficients.
"""
vcfkeys = []
cf_vols = {}
for vk, vv in six.iteritems(volumes):
cfkey = 'Volume_%s' % vk
vcfkeys.append('c.' + cfkey)
if 'value' in vv:
cf_vols[cfkey] = {'expression': '%e' % float(vv['value']),
'class': CoefEval}
else:
cf_vols[cfkey] = {'expression': vv['expression'],
'class': CoefVolume}
for cf in six.itervalues(coef_info):
if 'requires' in cf:
cf['requires'] += vcfkeys
else:
cf['requires'] = vcfkeys
coef_info.update(cf_vols)
return coef_info
def call(self, ret_all=False, time_tag=''):
problem = self.problem
opts = self.app_options
# Some coefficients can require other coefficients - resolve their
# order here.
req_info = getattr(self.conf, opts.requirements, {})
coef_info = getattr(self.conf, opts.coefs, {})
coef_info = self.define_volume_coef(coef_info, self.volumes)
is_store_filenames = coef_info.pop('filenames', None) is not None
multiproc_mode = None
if opts.multiprocessing and multi.use_multiprocessing:
multiproc, multiproc_mode = multi.get_multiproc(mpi=opts.use_mpi)
if multiproc_mode == 'mpi':
HomogWorkerMulti = HomogenizationWorkerMultiMPI
elif multiproc_mode == 'proc':
HomogWorkerMulti = HomogenizationWorkerMulti
else:
multiproc_mode = None
if multiproc_mode is not None:
num_workers = multi.get_num_workers()
worker = HomogWorkerMulti(num_workers)
dependencies, sd_names = worker(problem, opts,
self.post_process_hook,
req_info, coef_info,
self.micro_coors,
self.app_options.store_micro_idxs,
self.app_options.chunks_per_worker,
time_tag)
else: # no multiprocessing
worker = HomogenizationWorker()
dependencies, sd_names = worker(problem, opts,
self.post_process_hook,
req_info, coef_info,
self.micro_coors,
self.app_options.store_micro_idxs,
time_tag)
deps = {}
if sd_names is None and dependencies is not None: # slave mode
coefs = None
for name in dependencies.keys():
data = dependencies[name]
if not name.startswith('c.'):
deps[name] = data
else:
coefs = Struct()
for name in dependencies.keys():
data = dependencies[name]
if name.startswith('c.'):
coef_name = name[2:]
cstat = coef_info[coef_name].get('status', 'main')
# remove "auxiliary" coefs
if not cstat == 'auxiliary':
setattr(coefs, coef_name, data)
else:
deps[name] = data
# Store filenames of all requirements as a "coefficient".
if is_store_filenames:
for name in sd_names.keys():
if '|multiprocessing_' in name:
mname = rm_multi(name)
if mname in sd_names:
sd_names[mname] += sd_names[name]
else:
sd_names[mname] = sd_names[name]
del(sd_names[name])
save_names = {}
dump_names = {}
for name in sd_names.keys():
val = sd_names[name]
if name.startswith('s.'):
save_names[name[2:]] = val
elif name.startswith('d.'):
dump_names[name[2:]] = val
coefs.save_names = save_names
coefs.dump_names = dump_names
if opts.coefs_info is not None:
coefs.info = opts.coefs_info
if ret_all:
return coefs, deps
else:
return coefs
| {
"repo_name": "lokik/sfepy",
"path": "sfepy/homogenization/engine.py",
"copies": "1",
"size": "30697",
"license": "bsd-3-clause",
"hash": 8691757098780742000,
"line_mean": 37.1803482587,
"line_max": 79,
"alpha_frac": 0.4992996058,
"autogenerated": false,
"ratio": 4.5396332446021885,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000033403637902771646,
"num_lines": 804
} |
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from math import pi, fabs, sin, e
from Model import Model, Decisions, Objectives
from sys import stdout, maxint
class DTLZ7(Model):
def __init__(self, decisionSpace=10, objectiveSpace=2):
self.decisions = []
self.candidates = []
self.objectives = []
self.decisionSpace = decisionSpace
self.objectiveSpace = objectiveSpace
for i in xrange(self.decisionSpace):
self.decisions.append(Decisions(0, 1))
self.objectives.append(Objectives(0, 1))
self.objectives.append(Objectives(5, 20))
self.any()
# self.findMinMax()
def copy(self, other):
self.decisions = other.decisions[:]
self.candidates = other.candidates[:]
self.decisionSpace = other.decisionSpace
self.objectiveSpace = other.objectiveSpace
def score(self):
# use sum of objectives as score
res = self.fi()
val = 0.0
for i in xrange(self.objectiveSpace - 1):
val += self.energy(res[i], self.objectives[i].lo, self.objectives[i].hi)
# print(val)
return fabs(val/self.objectiveSpace)
def fm(self, objectives):
g = 1 + 9 / (self.decisionSpace - self.objectiveSpace + 1) * sum(self.candidates[self.objectiveSpace : ])
h = self.objectiveSpace
for x in range(self.objectiveSpace - 1):
h += (objectives[x] / (1 + g)) * (1 + sin(3 * pi * objectives[x]))
objectives.append((1 + g) * h)
def fi(self):
objectives = []
# for fis before the last one
for i in xrange(self.objectiveSpace - 1):
objectives.append(self.candidates[i])
# calculate and append the last f
self.fm(objectives)
# return
return objectives
def cdom(self, other):
def loss(xl, yl):
n = len(xl)
# allloss = [pow((xi-yi)/n,2) for xi,yi in zip(xl,yl)]
allloss = [-1 * e**(-1 * (xi - yi) / n) for xi,yi in zip(xl,yl)]
return sum(allloss)/n
x_objs = self.fi()
y_objs = other.fi()
# print(x_objs)
# print(y_objs)
l1 = loss(x_objs, y_objs)
l2 = loss(y_objs, x_objs)
return l2 - l1
def findMinMax(self):
for i in xrange(self.objectiveSpace):
self.objectives.append(Objectives())
for i in xrange(1000):
self.any()
res = self.fi()
# print(res)
for j in xrange(self.objectiveSpace):
if (self.objectives[j].hi < res[j]):
self.objectives[j].hi = res[j]
if (self.objectives[j].lo > res[j]):
self.objectives[j].lo = res[j]
def energy(self, eval, min, max):
# print(min, max)
return (eval - min) / (max - min)
if __name__ == "__main__":
DTLZ7 = DTLZ7()
print(DTLZ7.candidates)
print(DTLZ7.fi())
DTLZ7.findMinMax()
print(DTLZ7.score())
print(DTLZ7.objectives[0].lo,DTLZ7.objectives[0].hi, DTLZ7.objectives[1].lo, DTLZ7.objectives[1].hi)
| {
"repo_name": "gbtimmon/ase16GBT",
"path": "code/7/DTLZ7.py",
"copies": "1",
"size": "3185",
"license": "unlicense",
"hash": -3704237717351534000,
"line_mean": 30.85,
"line_max": 113,
"alpha_frac": 0.5635792779,
"autogenerated": false,
"ratio": 3.3350785340314135,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43986578119314135,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
import os.path
import subprocess
import sys
def get_bash_path():
try:
return subprocess.check_output(['which', 'bash']).strip()
except subprocess.CalledProcessError:
sys.stderr.write('Bash not found, please install bash')
sys.exit(1)
def is_sysegg_in_buildout():
return (
'recipe= syseggrecipe' in
subprocess.check_output(['bin/buildout', 'annotate']))
def link():
result_file = 'buildout.cfg'
if not os.path.exists(result_file):
subprocess.call(['ln', '-sf', 'development.cfg', result_file])
def bootstrap():
result_file = 'bin/buildout'
if not os.path.exists(result_file):
subprocess.call(['python', 'bootstrap.py'])
def check_sysegg():
if is_sysegg_in_buildout():
subprocess.call(['bin/buildout', 'sysegg:force-sysegg=false',
'install', 'sysegg'])
def buildout():
bash = get_bash_path()
subprocess.call([bash, '-c', 'bin/buildout'])
def main():
"""Run all commands."""
link()
bootstrap()
check_sysegg()
buildout()
| {
"repo_name": "nens/nensbuild",
"path": "nensbuild/build.py",
"copies": "1",
"size": "1188",
"license": "bsd-2-clause",
"hash": -9182661981137374000,
"line_mean": 21.4150943396,
"line_max": 70,
"alpha_frac": 0.6228956229,
"autogenerated": false,
"ratio": 3.5357142857142856,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4658609908614285,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import,division
from geode import *
def force_test(force,X,dx_scale=1e-5,tolerance=1e-5,iterations=10,verbose=False,ignore_hessian=False,single_input=None,definite=True):
'''Test a force's methods against numerical differentiation'''
try:
structure = SolidMatrixStructure(len(X))
force.structure(structure)
matrix = SolidMatrix[X.shape[1]](structure)
except NotImplementedError:
matrix = None
def rand(scale):
U = scale*random.randn(*X.shape).astype(real)
if single_input is not None:
U[:single_input] = U[single_input:][1:] = 0
return U
def elastic_energy(X):
force.update_position(X,False)
return force.elastic_energy()
def elastic_force(X):
F = zeros_like(X)
force.update_position(X,False)
force.add_elastic_force(F)
return F
def elastic_differential(X,dX):
dF = zeros_like(X)
force.update_position(X,False)
force.add_elastic_differential(dF,dX)
return dF
def elastic_gradient_block_diagonal_times(X,dX):
force.update_position(X,False)
return force.elastic_gradient_block_diagonal_times(dX)
def damping_energy(X,V):
force.update_position(X,False)
return force.damping_energy(V)
def damping_force(X,V):
F = zeros_like(X)
force.update_position(X,False)
force.add_damping_force(F,V)
return F
def update_elastic_gradient(X,definite=False):
matrix.zero()
force.update_position(X,definite)
force.add_elastic_gradient(matrix)
def elastic_gradient_times(X,dX):
update_elastic_gradient(X)
F = empty_like(X)
matrix.multiply(dX,F)
return F
def damping_gradient_times(X,dV):
matrix.zero()
force.update_position(X,False)
force.add_damping_gradient(matrix)
F = empty_like(X)
matrix.multiply(dV,F)
return F
V = rand(1)
U0 = elastic_energy(X)
Fe0 = elastic_force(X)
K0 = damping_energy(X,V)
Fd0 = damping_force(X,V)
for _ in xrange(iterations):
# Test elastic force
dX = rand(dx_scale)
U2 = elastic_energy(X+dX)
Fe1 = elastic_force(X+dX/2)
e = relative_error(tensordot(Fe1,dX,axes=2),U0-U2)
if verbose:
print '|Fe1| %r, U0 %r, U2 %r'%(maxabs(Fe1),U0,U2)
print 'elastic force error =',e
assert e < tolerance
# Test elastic differentials
if not ignore_hessian:
Fe2 = elastic_force(X+dX)
dFe = elastic_differential(X+dX/2,dX)
e = relative_error(dFe,Fe2-Fe0)
if verbose:
print '|dFe| %r, |Fe2| %r, |Fe0| %r, |Fe2-Fe0| %r'%(maxabs(dFe),maxabs(Fe2),maxabs(Fe0),maxabs(Fe2-Fe0))
print 'elastic differential error =',e
assert e < tolerance
# Test elastic gradient
if matrix:
dF2 = elastic_gradient_times(X,dX)
dF = elastic_differential(X,dX)
e = relative_error(dF,dF2)
if verbose:
if len(dF)<10:
print 'elastic gradient force errors = %s'%magnitudes(dF-dF2)
print 'elastic gradient error =',e
assert e < tolerance
# Test definiteness
if definite:
try:
update_elastic_gradient(X,definite=True)
A = matrix.dense()
w = linalg.eigvalsh(matrix.dense())
if verbose:
set_printoptions(linewidth=250)
if 0:
print 'A =\n%s'%A
print 'eigenvalues = %s'%sort(w)
assert w.max()<=tolerance*maxabs(w)
except NotImplementedError:
pass
# Test elastic gradient block diagonal
i = random.randint(len(X))
dXi = zeros_like(dX)
dXi[i] = dX[i]
dFi = elastic_differential(X,dXi)
try:
dFi2 = elastic_gradient_block_diagonal_times(X,dXi)
e = relative_error(dFi[i],dFi2[i])
if verbose:
print 'elastic gradient block diagonal error =',e
assert e < tolerance
except NotImplementedError:
pass
# Test damping force
dV = rand(tolerance)
K2 = damping_energy(X,V+dV)
Fd1 = damping_force(X,V+dV/2)
e = relative_error(tensordot(Fd1,dV,axes=2),K0-K2)
if verbose:
print 'damping force error =',e
assert e < tolerance
# Test damping linearity
V2 = rand(1)
Fd2 = damping_force(X,V2)
Fd3 = damping_force(X,V+V2)
e = relative_error(Fd0+Fd2,Fd3)
if verbose:
print 'damping linearity error =',e
assert e < tolerance
# Test damping gradient
if matrix:
Fd2 = damping_gradient_times(X,V)
e = relative_error(Fd0,Fd2)
if verbose:
print 'damping gradient error =',e
assert e < tolerance
| {
"repo_name": "mikest/geode",
"path": "geode/force/force_test.py",
"copies": "3",
"size": "4520",
"license": "bsd-3-clause",
"hash": -8357758889183860000,
"line_mean": 29.9589041096,
"line_max": 134,
"alpha_frac": 0.6331858407,
"autogenerated": false,
"ratio": 3.108665749656121,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.027050030313980733,
"num_lines": 146
} |
from __future__ import absolute_import, division
from httplib import BadStatusLine
from .FiltrackerNotifications import FiltrackerMsgDict, FiltrackerPrinterProcessDict, FiltrackerPrintingStatusDict, FiltrackerPrinterStatusDict, FiltrackerSlicingStatusDict
import octoprint.plugin
from octoprint.slicing import SlicingManager, UnknownProfile
from octoprint.server import printerProfileManager
from octoprint.settings import settings
import requests
import flask
from flask import request
import json
import hashlib
import os
from shutil import copyfile
import urllib
from urlparse import urlsplit
Layer = 0
uid = "55de667a295efb62093205e4"
# url = "http://192.168.0.34:3000"
#url = "http://api.locbit.com:8888/endpoint"
url = "http://0.0.0.0:8001/event"
status_url = 'https://test-api.locbit.com/statusByLid'
HTTP_REQUEST_TIMEOUT=50
LAYER_HEIGHT_THRESHOLD=0.1
class FiltrackerPlugin(octoprint.plugin.StartupPlugin,
octoprint.plugin.BlueprintPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.EventHandlerPlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.SimpleApiPlugin,
octoprint.plugin.WizardPlugin):
def get_api_commands(self):
return dict(
command1=[],
command2=["some_parameter"]
)
def on_api_command(self, command, data):
import flask
if command == "command1":
parameter = "unset"
if "parameter" in data:
parameter = "set"
self._logger.info("command1 called, parameter is {parameter}".format(**locals()))
elif command == "command2":
self._logger.info("command2 called, some_parameter is {some_parameter}".format(**data))
#_post_spool_data
#overrrides the current length of the spool on the server,
#self, spool_data
def _post_spool_data(self, spool_data):
self._send_printer_status()
post_data = {"MUID":spool_data['muid'],
"Material":spool_data['material'],
"Color":spool_data['color'],
"Diameter":spool_data['diameter'],
"Length":spool_data['length']}
post_result = requests.post(url, json=post_data, timeout=HTTP_REQUEST_TIMEOUT)
post_result.raise_for_status()
post_result_data = post_result.json()
if not post_result_data['success']:
raise Exception("Post data: {}, response data: {}".format(str(post_data), str(post_result_data)))
#_get_spool_length
#grabs the current length of the spool.
#self, muid
def _get_spool_length(self, muid):
SD3D_api_key = self._settings.get(['SD3DAPIKey'])
SD3D_access_id = self._settings.get(['SD3DAccessID'])
if len(SD3D_api_key) == 0 or len(SD3D_access_id) == 0:
raise Exception("Cannot get stored spool length, either Filtracker api key or access ID is missing from settings")
request_uri = "{}/{}/SD3DPrinter".format(status_url, muid)
query_params = {'api': SD3D_api_key, 'access': SD3D_access_id}
response = requests.get(request_uri, params=query_params, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
response_data = response.json()
if 'measurements' in response_data and 'Length' in response_data['measurements']:
length = response_data['measurements']['Length'].get('status')
return length
elif 'success' in response_data and \
not response_data['success'] and \
response_data['message'] == 'Device is not found':
return None
else:
return None
#_get_spool_settings
#grabs the current spool data from the settings. Color, muid, material, length, initial_length, jobprogress
#settings dict
def _get_spool_settings(self):
setting_keys = ['muid', 'material', 'color', 'diameter', 'length', 'initial_length', 'jobProgress']
setting_dict = {}
for setting_key in setting_keys:
setting_value = self._settings.get([setting_key])
setting_dict[setting_key] = setting_value
return setting_dict
#_get_printer_job_info
#checks the localhost for information on the current job.
#self
#response.json
def _get_printer_job_info(self):
job_uri = 'http://localhost/api/job'
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(job_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
#_get_slice_profile
#retrieves the slicing profile.
#self, slicer, slice_profile_name
#response.json
def _get_slice_profile(self, slicer, slice_profile_name):
profile_uri = "http://localhost/api/slicing/{}/profiles/{}".format(slicer, slice_profile_name)
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
#_get_printer_profile
#retrieves the current printer profile.
#self, printer_profile_id
#json_response['profiles'][printer_profile_id]
def _get_printer_profile(self, printer_profile_id):
profile_uri = "http://localhost/api/printerprofiles"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response['profiles'][printer_profile_id]
#_get_current_printer_profile
#grabs the current profile from the localhost.
#self
def _get_current_printer_profile(self):
profile_uri = "http://localhost/api/printerprofiles"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
printers = response.json()['profiles']
for printer in printers:
if printers[printer]['current']:
return printers[printer]
#_get_default_slice_profile
#grabs the default slicing profile
#self, slicer
def _get_default_slice_profile(self, slicer):
profile_uri = "http://localhost/api/slicing/{}/profiles".format(slicer)
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
profiles = response.json()
for profile in profiles:
if profiles[profile]['default']:
return profile
#_get_local_file_metadata
#gathers metadata from the local host
#self, local_file_name
def _get_local_file_metadata(self, local_file_name):
local_file_uri = "http://localhost/api/files/local/{}".format(urllib.quote_plus(local_file_name))
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(local_file_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response
#_get_current_job
#get the current job info from the localhost
#self
def _get_current_job(self):
job_uri = "http://localhost/api/job"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(job_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
job = response.json()
return job
#_update_spool_length
#Alters the spool length throghout the different events.
#self, update_remote=False
def _update_spool_length(self, update_remote=False):
try:
current_spool_settings = self._get_spool_settings()
printer_job_info = self._get_printer_job_info()
initial_length = float(current_spool_settings['initial_length'])
job_obj = printer_job_info.get('job')
filament_obj = None
tool0_obj = None
estimated_job_length = None
job_completion_percent = None
if job_obj is not None:
filament_obj = job_obj.get('filament')
if filament_obj is not None:
tool0_obj = filament_obj.get('tool0')
if tool0_obj is not None:
estimated_job_length = tool0_obj['length']
progress_obj = printer_job_info.get('progress')
if progress_obj is not None:
job_completion_percent = progress_obj['completion']
internal_progress = current_spool_settings.get('jobProgress')
if internal_progress != '':
internal_progress = float(internal_progress)
if job_completion_percent is not None:
# If a job reset has been detected, set initial length to length
if internal_progress != '' and internal_progress > job_completion_percent:
initial_length = float(current_spool_settings['length'])
current_spool_settings['initial_length'] = str(current_spool_settings['length'])
# Job filament length is in millimeters, so must convert to meters
length_job_used = (job_completion_percent / 100) * (float(estimated_job_length) / 1000)
new_length = initial_length - length_job_used
current_spool_settings['length'] = new_length
current_spool_settings['length'] = str(current_spool_settings['length'])
current_spool_settings['jobProgress'] = job_completion_percent
octoprint.plugin.SettingsPlugin.on_settings_save(self, current_spool_settings)
# If a job reset has been detected, set initial length to length
elif job_completion_percent is None and internal_progress != '':
current_spool_settings['initial_length'] = str(current_spool_settings['length'])
current_spool_settings['jobProgress'] = ''
octoprint.plugin.SettingsPlugin.on_settings_save(self, current_spool_settings)
if update_remote:
current_spool_settings['length'] = float(current_spool_settings['length'])
self._post_spool_data(current_spool_settings)
except Exception as e:
self._logger.error("Could not update length: {}".format(str(e)))
#_set_default_slice_profile
#sets the definitions for the slicing variables.
#self, profile_name
def _set_default_slice_profile(self, profile_name):
slice_profile_path = settings().get(['folder', 'slicingProfiles'])
slice_manager = SlicingManager(slice_profile_path, printerProfileManager)
slice_manager.reload_slicers()
default_slicer = slice_manager.default_slicer
slice_manager.set_default_profile(default_slicer, profile_name, require_exists=True)
#on_api_get
#Creates triggers for install, settings, and auto-print. Then gives permission and executes the qr reader.
#self, request
def on_api_get(self, request):
if request.args.get('install') == '1':
try:
fill_percent = request.args.get('fill')
self.install_dependencies(fill_percent)
return flask.jsonify(result='')
except Exception as e:
return flask.jsonify(error=str(e))
if request.args.get('settings') == '1':
return_result = {}
for qr_data_key in ['material', 'diameter', 'color', 'length', 'muid']:
return_result[qr_data_key] = self._settings.get([qr_data_key])
try:
return_result['length'] = "{0:.3f}".format(float(return_result['length']))
except Exception as e:
self._logger.info('Could not return length')
return flask.jsonify(result=return_result)
if request.args.get('autoprint_setting') == '1':
return flask.jsonify(result=self._settings.get(['autoPrintMode']))
# grant permission to the file before execute it
commands = ['/bin/chmod +x /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/qr.py']
import subprocess
for command in commands:
subprocess.check_call("/bin/bash -c 'sudo {}'".format(command), shell=True)
qr_script_path = '/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/qr.py'
subprocess_args = [qr_script_path]
output = ''
subprocess_args.append('-u')
current_url = request.url
split_url = urlsplit(current_url)
split_url_port = ''
if split_url.port is not None:
split_url_port = ":{}".format(split_url.port)
subprocess_args.append("{}://{}{}".format(split_url.scheme, split_url.hostname, split_url_port))
output = subprocess.check_output(subprocess_args)
json_output = json.loads(output)
if 'error' in json_output:
return flask.jsonify(error=json_output['error'])
else:
qr_result = json_output.get('result')
if qr_result is None:
return flask.jsonify(error="QR code read failure. Uknown error.")
qr_result = qr_result.split(",")
if len(qr_result) == 5:
return_result = {'material': qr_result[0],
'diameter': qr_result[1],
'color': qr_result[2],
'length': qr_result[3],
'muid': qr_result[4]}
# Initialize plugin settings with data from QR code
octoprint.plugin.SettingsPlugin.on_settings_save(self, return_result)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'initial_length': return_result['length']})
try:
stored_length = self._get_spool_length(return_result['muid'])
# If the length of the spool already exists, update the settings,
# otherwise, post the initial spool data
if stored_length is not None:
return_result['length'] = stored_length
octoprint.plugin.SettingsPlugin.on_settings_save(self, return_result)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'initial_length': return_result['length']})
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'printProgress': ''})
else:
self._post_spool_data(return_result)
except Exception as e:
return flask.jsonify(result=return_result, Filtracker_error=str(e))
try:
self._set_default_slice_profile(return_result['muid'][0:7])
except Exception as e:
return flask.jsonify(result=return_result, Filtracker_error="Setting profile {} as default failed, check to see if it exists".format(return_result['muid']))
return_result['length'] = "{0:.3f}".format(float(return_result['length']))
return flask.jsonify(result=return_result)
else:
return flask.jsonify(error="Invalid QR code")
#_update_profile_event_stats
#when events happen, this changes them stats and sends them to the cloud profile.
#self, printer_event
def _update_profile_event_stats(self, printer_event):
sharing_mode = self._settings.get(['sharingMode'])
if not sharing_mode:
self._logger.info('Sharing Mode turned off, skipping profile stat update')
return
current_printer = self._get_current_printer_profile()
printer_make = current_printer['id']
printer_model = current_printer['model']
nozzle_size = current_printer['extruder']['nozzleDiameter']
muid = self._settings.get(['muid'])[0:7]
current_job = self._get_current_job()
gcode_file_name = current_job['job']['file']['name']
gcode_file_metadata = self._get_local_file_metadata(gcode_file_name)
gcode_identifier = gcode_file_metadata['hash']
#layer_height = None
#try:
# layer_height = int( self._settings.get(['layerHeight']) )
# assert layer_height > 0
#except Exception as e:
# self._logger.error('Cannot make profile stat request, layer height must be non-zero positive integer')
# return
tmp_json = {
'printer_event': printer_event,
'muid': muid,
'gcode_identifier': gcode_identifier,
'printer_make': printer_make,
'printer_model': printer_model,
'nozzle_size': nozzle_size
}
if len(str(self._settings.get(['diameter']))) > 0:
tmp_json["diameter"] = float("{0:.3f}".format(float(self._settings.get(['diameter']))))
profile_update_data =json.dumps(tmp_json)
self._logger.info('WORKING UPDATE' * 5 + str(profile_update_data))
Filtracker_info_share_event_uri = 'https://sd3d.locbit.com/event'
SD3D_api_key = self._settings.get(['SD3DAPIKey'])
SD3D_access_id = self._settings.get(['SD3DAccessID'])
if len(SD3D_api_key) == 0 or len(SD3D_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping stat update")
return
query_params = {'api': SD3D_api_key, 'access': SD3D_access_id}
# make try/except to avoid the unhandle error to break the code
try:
response = requests.post(Filtracker_info_share_event_uri, params=query_params, headers={'Content-Type': 'application/json'}, data=profile_update_data).json()
self._logger.info('EVENT STAT RESPONSE' * 3 + str(response))
# TODO: check if response has the key called data
if printer_event == 'PrintStarted' and not response['success']:
self._logger.error("Profile stats update failed: %s".format(response['data']))
self._send_client_alert("Could not update profile stats on PrintStart: %s" % response['data'])
except:
self._logger.info('Got error for send to SD3D.locbit.com');
#_download_best_profile
#if cloud mode is active, it will downoad the best profil available.
#self
def _download_best_profile(self):
cloud_mode = self._settings.get(['cloudMode'])
if not cloud_mode:
self._logger.info('Cloud Mode turned off, skipping best profile download')
return
current_printer = self._get_current_printer_profile()
printer_make = current_printer['id']
printer_model = current_printer['model']
nozzle_size = current_printer['extruder']['nozzleDiameter']
muid = self._settings.get(['muid'])[0:7]
material_diameter = float("{0:.3f}".format(float(self._settings.get(['diameter']))))
layer_height = None
layer_height_threshold = LAYER_HEIGHT_THRESHOLD
try:
layer_height = float(self._settings.get(['layerHeight']))
except Exception as e:
self._logger.error("Could not parse layer height {}, skipping best profile download".format(layer_height))
return
best_profile = self._get_best_profile(printer_make, printer_model, nozzle_size, muid, layer_height, layer_height_threshold, material_diameter)
if best_profile['success']:
print("best profile data:" + str(best_profile))
best_profile['data']['slicing_profile']['key'] = 'Filtracker' + best_profile['data']['slicing_profile']['key']
best_profile['data']['slicing_profile']['default'] = False
self._upload_new_profile(best_profile['data']['slicing_profile'])
self._set_best_or_default_profile(best_profile['data']['slicing_profile']['key'])
else:
self._logger.error("Error getting best profile, skipping best profile download")
muid_prefix = self._settings.get(['muid'])[0:7]
try:
self._set_default_slice_profile(muid_prefix)
except Exception as e:
self._logger.error("Could not set default muid profile %s".format(muid_prefix))
self._send_client_alert("Could not get best profile and setting default slice profile for muid %s failed" % muid_prefix)
def _send_client_alert(self, message):
self._plugin_manager.send_plugin_message(self._identifier, message)
#_set_best_or_default_profiles
#chooses the best profile, or chooese the default.
#self, best_profile_name
def _set_best_or_default_profile(self, best_profile_name):
muid_prefix = self._settings.get(['muid'])[0:7]
try:
self._set_default_slice_profile(best_profile_name)
except Exception as e:
try:
self._set_default_slice_profile(muid_prefix)
except Exception as e:
self._logger.error("Could not set best profile %s, nor default muid profile %s, check if either one exists".format(best_profile_name, muid_prefix))
self._send_client_alert("Could not set best profile %s, nor default muid profile %s, check if either one exists" % (best_profile_name, muid_prefix))
#_get_best_profile
#Compares the best cloud profiles based off comlpetion percentage.
#self, printer_make, printer_model, nozzle_size, muid, layer_height, layer_height_threshold, material_diameter
#response.json
def _get_best_profile(self, printer_make, printer_model, nozzle_size, muid, layer_height, layer_height_threshold, material_diameter):
#printer_make = urllib.quote(printer_make)
#printer_model = urllib.quote(printer_model)
SD3D_api_key = self._settings.get(['SD3DAPIKey'])
SD3D_access_id = self._settings.get(['SD3DAccessID'])
query_data = {
'printer_make': printer_make,
'printer_model': printer_model,
'nozzle_size': nozzle_size,
'muid': muid,
'layer_height': layer_height,
'layer_height_threshold': layer_height_threshold,
'material_diameter': material_diameter,
'api': SD3D_api_key,
'access': SD3D_access_id
}
query_str = urllib.urlencode(query_data)
if len(SD3D_api_key) == 0 or len(SD3D_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping getting best profile")
return
Filtracker_uri = 'https://sd3d.locbit.com/slicing_profile'
self._logger.info('GET BEST PROFILE REQUEST' * 3 + str(query_data))
response = requests.get(Filtracker_uri, params=query_data)
self._logger.info('GET BEST PROFILE RESPONSE' * 3 + str(response.json()) + str(response.url))
return response.json()
#_upload_new_profile
#Uploads a profile frfom local to the cloud.
#self, profile
#response.json() --> json object that conrains the profile uri and the api-key.
def _upload_new_profile(self, profile):
profile_uri = "http://localhost/api/slicing/cura/profiles/{}".format(profile['key'])
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.put(profile_uri, headers = { "X-Api-Key" : octoprint_api_key}, json=profile, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
#_associate_profile_gcode
#Adds athe proper files to the right profiles.
#self, gcode_indentifier, slicer, slicing_profile_name, printer_profile_id
def _associate_profile_gcode(self, gcode_identifier, slicer, slicing_profile_name, printer_profile_id):
self._logger.info('ASSOCIATE PROFILE' * 4 + slicing_profile_name)
slicing_profile = self._get_slice_profile(slicer, slicing_profile_name)
printer_profile = {}
printer_profile = self._get_printer_profile(printer_profile_id)
#layer_height = None
#try:
# layer_height = int( self._settings.get(['layerHeight']) )
# assert layer_height > 0
#except Exception as e:
# self._logger.error('Cannot make gcode association request, layer height must be non-zero positive integer')
# return
request_data = json.dumps({'muid': self._settings.get(['muid'])[0:7], 'gcode_identifier': gcode_identifier,
'slicing_profile': slicing_profile, 'printer_make': printer_profile_id,
'printer_model': printer_profile['model'], 'nozzle_size': printer_profile['extruder']['nozzleDiameter'],
'material_diameter': float("{0:.3f}".format(float(self._settings.get(['diameter'])))) })
#'layer_height': layer_height})
self._logger.info('PROFILE ASSOCIATION REQUEST' * 3 + str(request_data))
Filtracker_info_share_uri = 'https://sd3d.locbit.com/profile'
SD3D_api_key = self._settings.get(['SD3DAPIKey'])
SD3D_access_id = self._settings.get(['SD3DAccessID'])
if len(SD3D_api_key) == 0 or len(SD3D_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping profile update")
return
query_params = {'api': SD3D_api_key, 'access': SD3D_access_id}
response = requests.post(Filtracker_info_share_uri, params=query_params, headers={'Content-Type': 'application/json'}, data=request_data).json()
self._logger.info('PROFILE ASSOCIATION RESPONSE' * 3 + str(response))
#_auto_provision_printer
#Automatically provisions a printer to the locbit platform.
#self
def _auto_provision_printer(self):
from uuid import getnode as get_mac
SD3D_api_key = self._settings.get(['SD3DAPIKey'])
SD3D_access_id = self._settings.get(['SD3DAccessID'])
query_params = {'api': SD3D_api_key, 'access': SD3D_access_id}
did = self._settings.get(['did'])
lid = get_mac()
printer_oem = self._get_current_printer_profile()['name']
printer_model = self._get_current_printer_profile()['model']
pretxt = 'Printer: '
posttxt = '('
closetxt = ')'
printer_dname = "%s %s %s %s %s %s" % (pretxt, printer_oem, printer_model, posttxt, lid, closetxt)
provision_post_data = json.dumps({
'translator': 'SD3DPrinter',
'DeviceName': printer_dname,
'lid': lid,
'deviceDescriptionId': '56db96454a7a901f59815541',
'locationId': '13',
'userId': '116'})
self._logger.info('PRINTER AUTO PROVISION REQUEST' * 3 + str(provision_post_data))
response = requests.post('https://api.locbit.com/provision', params=query_params, headers={'Content-Type': 'application/json'}, data=provision_post_data).json()
self._logger.info('PRINTER AUTO PROVISION RESPONSE' * 3 + str(response))
if 'success' in response and response['success']:
provision_did = response['message']['did']
activation_post_data = json.dumps({'did': provision_did,
'connectivity': True,
'services': True
})
self._logger.info('PRINTER ACTIVATION REQUEST' * 3 + str(activation_post_data))
activate_response = requests.post('https://billing.locbit.com/charge', params=query_params, headers={'Content-Type': 'application/json'}, data=activation_post_data).json()
self._logger.info('PRINTER ACTIVATION RESPONSE' * 3 + str(activate_response))
#install_dependencies
#If a printer is installing the plugin for the first time, this is a series of commands to setup up the plugin automatically.
#self, fill_density
def install_dependencies(self, fill_density):
import subprocess, sys, os
from uuid import getnode as get_mac
settings().set(['folder', 'slicingProfiles'], '/home/pi/.octoprint/slicingProfiles')
settings().set(['slicing', 'defaultSlicer'], 'cura', force=True)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'macAddress': get_mac()})
try:
fill_density_percentage = int(fill_density)
assert fill_density_percentage > 0 and fill_density_percentage <= 100
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'fillDensity': fill_density})
except Exception as e:
raise Exception("Fill density setting {} is invalid, must be percentage (integer)".format(fill_density))
commands = [
'/usr/bin/apt-get update',
'/usr/bin/apt-get install -y ipython python-opencv python-scipy python-numpy python-setuptools python-pip python-pygame python-zbar',
'/bin/chmod +x /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/qr.py',
'/usr/bin/pip install --upgrade pip'
]
import os
if not os.path.exists("/usr/local/lib/python2.7/dist-packages/SimpleCV"):
commands.append('/usr/local/bin/pip --no-cache-dir install timeout-decorator svgwrite https://github.com/sightmachine/SimpleCV/zipball/master')
if os.path.exists('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/edge_set.py'):
commands.append('/bin/mv /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/edge_set.py /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/edge_set.sh')
commands.append('/bin/chmod 755 ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker/edge_set.sh')
commands.append('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/edge_set.sh')
if os.path.exists('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/zip_check.py'):
commands.append('/bin/mv /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/zip_check.py /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/zip_check.sh')
commands.append('/bin/chmod 755 ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker/zip_check.sh')
commands.append('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/zip_check.sh')
commands.append('/usr/bin/wget -P ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker https://github.com/Locbit/locbit-edge/archive/master.zip')
commands.append('/usr/bin/unzip ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker/master.zip -d ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker')
commands.append('/bin/mv /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/locbit-edge-master/config.js.default /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/locbit-edge-master/config.js')
if os.path.exists('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/shell.py'):
commands.append('/bin/mv /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/shell.py /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/locbit-edge-master/shell.sh')
commands.append('/bin/chmod 755 ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker/locbit-edge-master/shell.sh')
commands.append('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/locbit-edge-master/shell.sh')
if not os.path.exists('/etc/init.d/pm_check.sh'):
commands.append('/bin/cp /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.py /etc/init.d/pm_check.sh')
commands.append('/bin/chmod 755 /etc/init.d/pm_check.sh')
commands.append('update-rc.d pm_check.sh defaults')
# commands.append('/etc/init.d/pm_check.sh')
for command in commands:
subprocess.check_call("/bin/bash -c 'sudo {}'".format(command), shell=True)
#on_after_startup
#A series of commands and instructions to execute after the server starts up.
#self
#slice monkey patch
def on_after_startup(self):
import subprocess, sys, os
from uuid import getnode as get_mac
self._logger.info("MAC: {}".format(get_mac()))
current_printer_name = self._get_current_printer_profile()['id']
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'did': current_printer_name})
current_printer_oem = self._get_current_printer_profile()['name']
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'oem': current_printer_oem})
current_printer_model = self._get_current_printer_profile()['model']
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'model': current_printer_model})
self._logger.info("Hello world! I am: %s" % self._settings.get(["did"]))
self._auto_provision_printer()
# commands = [
# ]
# if os.path.exists('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh'):
# commands.append('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh')
# if not os.path.exists('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh'):
# commands.append('/bin/cp /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.py /home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh')
# commands.append('/bin/chmod 755 ~/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh')
# commands.append('/home/pi/oprint/lib/python2.7/site-packages/octoprint_Filtracker/pm_check.sh')
# for command in commands:
# subprocess.check_call("/bin/bash -c 'sudo {}'".format(command), shell=True)
self._send_printer_status_with_timer()
def slice_monkey_patch_gen(slice_func):
def slice_monkey_patch(*args, **kwargs):
original_callback = args[5]
def patched_callback(*callbackargs, **callbackkwargs):
if 'callback_args' in kwargs and 'callback_kwargs' in kwargs:
original_callback(*kwargs['callback_args'], **kwargs['callback_kwargs'])
elif 'callback_args' in kwargs and 'callback_kwargs' not in kwargs:
gco_file = None
for arg in kwargs['callback_args']:
if arg.endswith('gco') and arg != args[3]:
gco_file = arg
break
original_callback(*kwargs['callback_args'])
if gco_file is not None:
gco_hash = self._get_local_file_metadata(gco_file)['hash']
self._associate_profile_gcode(gco_hash, args[1], args[4], kwargs['printer_profile_id'])
elif 'callback_args' not in kwargs and 'callback_kwargs' in kwargs:
original_callback(*kwargs['callback_kwargs'])
elif 'callback_args' not in kwargs and 'callback_kwargs' not in kwargs:
original_callback()
sharing_mode = self._settings.get(['sharingMode'])
if sharing_mode:
arg_list = list(args)
arg_list[5] = patched_callback
args = tuple(arg_list)
slice_func(*args, **kwargs)
return slice_monkey_patch
octoprint.slicing.SlicingManager.slice = slice_monkey_patch_gen(octoprint.slicing.SlicingManager.slice)
self._send_printer_status_with_timer()
#get_settins_def
#Declares the settings variables
#self
def get_settings_defaults(self):
return dict(did='',
oem='',
model='',
material='',
diameter='',
color='',
initial_length='',
length='',
muid='',
SD3DAPIKey='yCX9PgjsvzGuaKTT9yuUIJFehPHjMknU',
SD3DAccessID='DxM7QlAsDo43Z0SJW1qwLh4FBXGQlaGU',
jobProgress='',
layerHeight='0.25',
sharingMode=True,
cloudMode=True,
autoPrintMode=True,
macAddress='',
fillDensity='20',
updateInterval= 5,
PrintingStatus= 'Unknown',
PrinterStatus= 'Unknown',
PrinterProcess= 'Unknown',
SlicingStatus= 'Unknown'
)
def get_template_configs(self):
return [
dict(type="navbar", custom_bindings=False),
dict(type="settings", custom_bindings=False)
]
def get_assets(self):
return dict(js=["js/Filtracker.js"])
#_auto_print
#Exclusive to the "upload" event. Automatically slices stl --> GCODE and starts printing.
#self, file_info --> data from the uploaded file that will be printed.
#JSON respones
def _auto_print(self, file_info):
global did
global uid
if not self._settings.get(['autoPrintMode']):
return
fill_density_percentage = self._settings.get(['fillDensity'])
try:
fill_density_percentage = int(fill_density_percentage)
assert fill_density_percentage > 0 and fill_density_percentage <= 100
except Exception as e:
self._logger.error("Fill density setting {} is invalid, must be percentage (integer)".format(str(fill_density_percentage)))
fill_density_percentage = None
file_name = file_info['name']
file_path = file_info['path']
file_target = file_info['target']
#This is where the slice happens
if file_name.lower().endswith('.stl') and file_target == 'local':
octoprint_api_key = settings().get(['api', 'key'])
layerHeight = float(self._settings.get(['layerHeight']))
auto_print_uri = "http://localhost/api/files/local/{}".format(urllib.quote_plus(file_path))
#default_slice_profile_name = self._get_default_slice_profile('cura')['key']
default_slice_profile_name = self._get_default_slice_profile('cura')
print('&' * 30 + str(default_slice_profile_name))
printer_profile_name = self._get_current_printer_profile()['id']
print('Q' * 30 + str(printer_profile_name))
slice_data = {
'command': 'slice',
'profile': default_slice_profile_name,
'printerProfile': printer_profile_name,
'profile.layer_height': layerHeight
}
if fill_density_percentage is not None:
slice_data['profile.infill'] = fill_density_percentage
slice_data['print'] = True
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.post(auto_print_uri, headers = { "X-Api-Key" : octoprint_api_key }, json=slice_data, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response
#on_event
#a series of if statements containg the different events filtracker listens for.
#self, event, payload, **kwargs
#return
def on_event(self, event, payload, **kwargs):
global Layer
global uid
global url
did = self._settings.get(["did"])
self.checkPrinterStatus()
self._logger.info("event change123:")
self._logger.info(event)
self._logger.info(payload)
event_body = {}
if event == "PrintStarted":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._download_best_profile()
self._send_printer_status()
elif event == "PrintFailed":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._send_printer_status()
elif event == "PrintCancelled":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._send_printer_status()
elif event == "PrintDone":
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._send_printer_status()
elif event == "PrintPaused":
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._send_printer_status()
elif event == "PrintResumed":
self._update_profile_event_stats(event)
self._send_printer_status()
elif event == "Upload":
self._auto_print(payload)
self._download_best_profile()
if event in FiltrackerPrintingStatusDict:
self._logger.info("saving printing status event", FiltrackerPrintingStatusDict[event])
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrintingStatusDict[event])
self._send_printer_status()
if event == "Startup" and self._printer.is_printing() is not True:
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrintingStatusDict["Idle"])
self._send_printer_status()
if event == "PrinterStateChanged" and "state_id" in payload.keys() and FiltrackerPrintingStatusDict.has_key(payload["state_id"]):
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrintingStatusDict[payload["state_id"]])
self._send_printer_status()
if event in FiltrackerPrinterProcessDict:
self._logger.info("saving printing process event", FiltrackerPrinterProcessDict[event])
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrinterProcessDict[event])
self._send_printer_status()
if event == "PrinterStateChanged" and "state_id" in payload.keys() and FiltrackerPrinterProcessDict.has_key(payload["state_id"]):
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrinterProcessDict[payload["state_id"]])
self._send_printer_status()
if event in FiltrackerMsgDict:
event_body = {
'uid' : uid,
'did' : did,
'event' : FiltrackerMsgDict[event]['name'],
'status' : FiltrackerMsgDict[event]['value']
}
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
elif event == 'FileSelected':
event_body = {
'uid' : uid,
'did' : did,
'event' : 'File',
'status' : payload['filename']
}
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
elif event == 'ZChange':
Layer += 1
event_body = {
'uid' : uid,
'did' : did,
'event' : 'Layer',
'status' : Layer
}
self._update_spool_length(update_remote=True)
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
else:
event_body = {
'uid' : uid,
'did' : did,
'event': event
}
self._update_spool_length(update_remote=False)
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
return
self._logger.info("Filtracker: Recording event " + event)
#sendLayerStatus
#A post to the locbit-edge url with the current layer information
#self, layer
def sendLayerStatus(self, layer):
global uid
global url
did = self._settings.get(["did"])
event_body = {
'uid' : uid,
'did' : did,
'event' : 'Layer',
'status' : layer
}
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
#CheckPrinterStatus
#sends a get request to the localhost/api/printer for the current status of the printer
#self
def checkPrinterStatus(self):
url = "http://localhost/api/printer"
apiKey = settings().get(['api', 'key'])
try:
r = requests.get(url, headers = { "X-Api-Key" : apiKey })
self._logger.info(r.text)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
#is_wizard_required
#Checks the config.yaml file for the mac address. Does a setup if macaddress not there.
#self
#true if no macaddress in the config.yaml file.
def is_wizard_required(self):
mac_address = self._settings.get(['macAddress'])
if not mac_address:
return True
print('5' * 20 + "{}".format(mac_address))
#_set_events
#Sets the events and their data to be transmitted trough locbit-edge.
#self
#Event_dict(muidname, material, color, diameter, length)
def _set_events(self):
from datetime import datetime
from pytz import timezone
import time
#This auto-converts the timezone.
hold_zone = str(time.timezone / 3600.0)
hold = ''
if float(hold_zone) > 0:
hold = "%a, %m-%d-%y %H:%M:%S UTC+" + hold_zone
else:
hold = "%a, %m-%d-%y %H:%M:%S UTC-" + hold_zone
#Instanciates the variables that will respond to the platform.
datetime_str = datetime.now().strftime(hold)
muidName = str(self._settings.get(["muid"]))
material = str(self._get_spool_settings()["material"])
color = str(self._get_spool_settings()["color"])
diameter = self._get_spool_settings()["diameter"]
length = str(self._get_spool_settings()["length"])
jobProgress = str(self._get_spool_settings()["jobProgress"])
from uuid import getnode as get_mac
did = get_mac()
printer_status = "Disconnected"
printer_connection = self._printer.get_current_connection()
if printer_connection[0] is not "Closed":
printer_status = "Connected"
# Each when this function trigger, it will check the latest printing status
if FiltrackerPrintingStatusDict.has_key(self._printer.get_state_id()):
octoprint.plugin.SettingsPlugin.on_settings_save(self, FiltrackerPrintingStatusDict[self._printer.get_state_id()])
event_dict = {
"did" : did,
"PrinterStatus" : printer_status,
"PrintingStatus" : self._settings.get(["PrintingStatus"]),
"PrinterProcess" : self._settings.get(["PrinterProcess"]),
"Message" : "",
"LastPingTime" : datetime_str
}
if len(muidName) > 0:
event_dict["MUIDName"] = muidName
if len(material) > 0:
event_dict["Material"] = material
if len(color) > 0:
event_dict["Color"] = color
if len(diameter) > 0:
event_dict["Diameter"] = diameter
# Convert the value to float if the value is not int, that would fix the problem on the length which is not int
if len(length) > 0 and float(length) >= 0:
event_dict['Length'] = length
return event_dict
#_send_printer_status
#Respnds to locbit-edge with the current state of the printer.
#self
def _send_printer_status(self):
from uuid import getnode as get_mac
global url
current_event = self._set_events()
headers = {'protocol': 'octoprint','protocol-identifier':str(get_mac()),'protocol-payload':settings().get(['api', 'key'])}
try:
response = requests.post(url, data = current_event, headers=headers)
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
#_send_printer_with_timer
#Responds to a call with the set events and a time stamp.
#self
def _send_printer_status_with_timer(self):
import threading
global url
self._send_printer_status()
int_time = float(self._settings.get(["updateInterval"]))
try:
t = threading.Timer(int_time, self._send_printer_status_with_timer)
t.start()
except BadStatusLine:
self._logger.info("Filtracker: Bad Status")
#action
#It allows the Locbit cloud to trigger commands on filtracker.
#self
#flask response('command sent')
@octoprint.plugin.BlueprintPlugin.route("/action", methods=["GET"])
def action(self):
if request.args.get('command') == 'ping':
self._send_printer_status()
if request.args.get('command') == 'StartPrint':
self._printer.start_print()
if request.args.get('command') == 'ResumePrint':
self._printer.resume_print()
if request.args.get('command') == 'PausePrint':
self._printer.pause_print()
if request.args.get('command') == 'CancelPrint':
self._printer.cancel_print()
if request.args.get('command') == 'CancelPrint':
self._printer.cancel_print()
return flask.make_response("Command Sent.", 200)
__plugin_name__ = "Filtracker"
__plugin_implementation__ = FiltrackerPlugin()
| {
"repo_name": "SD3D/Filtracker",
"path": "octoprint_Filtracker/__init__.py",
"copies": "1",
"size": "59519",
"license": "mit",
"hash": -4329257607207053300,
"line_mean": 48.8901927913,
"line_max": 246,
"alpha_frac": 0.5242023556,
"autogenerated": false,
"ratio": 4.389306784660767,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02289519484332415,
"num_lines": 1193
} |
from __future__ import absolute_import, division
from httplib import BadStatusLine
from .sd3dNotifications import sd3dMsgDict
import octoprint.plugin
from octoprint.slicing import SlicingManager, UnknownProfile
from octoprint.server import printerProfileManager
from octoprint.settings import settings
import requests
import flask
from flask import request
import json
import hashlib
import os
from shutil import copyfile
import urllib
from urlparse import urlsplit
Layer = 0
uid = "55de667a295efb62093205e4"
# url = "http://192.168.0.34:3000"
#url = "http://api.locbit.com:8888/endpoint"
url = "https://test-api.locbit.com/endpoint"
status_url = 'https://test-api.locbit.com/statusByLid'
HTTP_REQUEST_TIMEOUT=50
LAYER_HEIGHT_THRESHOLD=0.25
class SD3DPlugin(octoprint.plugin.StartupPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.EventHandlerPlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.SimpleApiPlugin,
octoprint.plugin.WizardPlugin):
def get_api_commands(self):
return dict(
command1=[],
command2=["some_parameter"]
)
def on_api_command(self, command, data):
import flask
if command == "command1":
parameter = "unset"
if "parameter" in data:
parameter = "set"
self._logger.info("command1 called, parameter is {parameter}".format(**locals()))
elif command == "command2":
self._logger.info("command2 called, some_parameter is {some_parameter}".format(**data))
def _post_spool_data(self, spool_data):
post_data = {"MUID":spool_data['muid'],
"Material":spool_data['material'],
"Color":spool_data['color'],
"Diameter":spool_data['diameter'],
"Length":spool_data['length']}
post_result = requests.post(url, json=post_data, timeout=HTTP_REQUEST_TIMEOUT)
post_result.raise_for_status()
post_result_data = post_result.json()
if not post_result_data['success']:
raise Exception("Post data: {}, response data: {}".format(str(post_data), str(post_result_data)))
def _get_spool_length(self, muid):
sd3d_api_key = self._settings.get(['sd3dAPIKey'])
sd3d_access_id = self._settings.get(['sd3dAccessID'])
if len(sd3d_api_key) == 0 or len(sd3d_access_id) == 0:
raise Exception("Cannot get stored spool length, either sd3d api key or access ID is missing from settings")
request_uri = "{}/{}/SD3DPrinter".format(status_url, muid)
query_params = {'api': sd3d_api_key, 'access': sd3d_access_id}
response = requests.get(request_uri, params=query_params, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
response_data = response.json()
if 'measurements' in response_data and 'Length' in response_data['measurements']:
length = response_data['measurements']['Length'].get('status')
return length
elif 'success' in response_data and \
not response_data['success'] and \
response_data['message'] == 'Device is not found':
return None
else:
return None
def _get_spool_settings(self):
setting_keys = ['muid', 'material', 'color', 'diameter', 'length', 'initial_length', 'jobProgress']
setting_dict = {}
for setting_key in setting_keys:
setting_value = self._settings.get([setting_key])
setting_dict[setting_key] = setting_value
return setting_dict
def _get_printer_job_info(self):
job_uri = 'http://localhost/api/job'
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(job_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
def _get_slice_profile(self, slicer, slice_profile_name):
profile_uri = "http://localhost/api/slicing/{}/profiles/{}".format(slicer, slice_profile_name)
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
def _get_printer_profile(self, printer_profile_id):
profile_uri = "http://localhost/api/printerprofiles"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response['profiles'][printer_profile_id]
def _get_current_printer_profile(self):
profile_uri = "http://localhost/api/printerprofiles"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
printers = response.json()['profiles']
for printer in printers:
if printers[printer]['current']:
return printers[printer]
def _get_default_slice_profile(self, slicer):
profile_uri = "http://localhost/api/slicing/{}/profiles".format(slicer)
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(profile_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
profiles = response.json()
for profile in profiles:
if profiles[profile]['default']:
return profile
def _get_local_file_metadata(self, local_file_name):
local_file_uri = "http://localhost/api/files/local/{}".format(urllib.quote_plus(local_file_name))
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(local_file_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response
def _get_current_job(self):
job_uri = "http://localhost/api/job"
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.get(job_uri, headers = { "X-Api-Key" : octoprint_api_key }, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
job = response.json()
return job
def _update_spool_length(self, update_remote=False):
try:
current_spool_settings = self._get_spool_settings()
printer_job_info = self._get_printer_job_info()
initial_length = float(current_spool_settings['initial_length'])
job_obj = printer_job_info.get('job')
filament_obj = None
tool0_obj = None
estimated_job_length = None
job_completion_percent = None
if job_obj is not None:
filament_obj = job_obj.get('filament')
if filament_obj is not None:
tool0_obj = filament_obj.get('tool0')
if tool0_obj is not None:
estimated_job_length = tool0_obj['length']
progress_obj = printer_job_info.get('progress')
if progress_obj is not None:
job_completion_percent = progress_obj['completion']
internal_progress = current_spool_settings.get('jobProgress')
if internal_progress != '':
internal_progress = float(internal_progress)
if job_completion_percent is not None:
# If a job reset has been detected, set initial length to length
if internal_progress != '' and internal_progress > job_completion_percent:
initial_length = float(current_spool_settings['length'])
current_spool_settings['initial_length'] = str(current_spool_settings['length'])
# Job filament length is in millimeters, so must convert to meters
length_job_used = (job_completion_percent / 100) * (estimated_job_length / 1000)
new_length = initial_length - length_job_used
current_spool_settings['length'] = new_length
current_spool_settings['length'] = str(current_spool_settings['length'])
current_spool_settings['jobProgress'] = job_completion_percent
octoprint.plugin.SettingsPlugin.on_settings_save(self, current_spool_settings)
# If a job reset has been detected, set initial length to length
elif job_completion_percent is None and internal_progress != '':
current_spool_settings['initial_length'] = str(current_spool_settings['length'])
current_spool_settings['jobProgress'] = ''
octoprint.plugin.SettingsPlugin.on_settings_save(self, current_spool_settings)
if update_remote:
current_spool_settings['length'] = float(current_spool_settings['length'])
self._post_spool_data(current_spool_settings)
except Exception as e:
self._logger.error("Could not update length: {}".format(str(e)))
def _set_default_slice_profile(self, profile_name):
slice_profile_path = settings().get(['folder', 'slicingProfiles'])
slice_manager = SlicingManager(slice_profile_path, printerProfileManager)
slice_manager.reload_slicers()
default_slicer = slice_manager.default_slicer
slice_manager.set_default_profile(default_slicer, profile_name, require_exists=True)
def on_api_get(self, request):
if request.args.get('install') == '1':
try:
fill_percent = request.args.get('fill')
self.install_dependencies(fill_percent)
return flask.jsonify(result='')
except Exception as e:
return flask.jsonify(error=str(e))
if request.args.get('settings') == '1':
return_result = {}
for qr_data_key in ['material', 'diameter', 'color', 'length', 'muid']:
return_result[qr_data_key] = self._settings.get([qr_data_key])
try:
return_result['length'] = "{0:.3f}".format(float(return_result['length']))
except Exception as e:
self._logger.info('Could not return length')
return flask.jsonify(result=return_result)
if request.args.get('autoprint_setting') == '1':
return flask.jsonify(result=self._settings.get(['autoPrintMode']))
import subprocess
qr_script_path = '/home/pi/oprint/lib/python2.7/site-packages/octoprint_SD3D/qr.py'
subprocess_args = [qr_script_path]
output = ''
subprocess_args.append('-u')
current_url = request.url
split_url = urlsplit(current_url)
split_url_port = ''
if split_url.port is not None:
split_url_port = ":{}".format(split_url.port)
subprocess_args.append("{}://{}{}".format(split_url.scheme, split_url.hostname, split_url_port))
output = subprocess.check_output(subprocess_args)
json_output = json.loads(output)
if 'error' in json_output:
return flask.jsonify(error=json_output['error'])
else:
qr_result = json_output.get('result')
if qr_result is None:
return flask.jsonify(error="QR code read failure. Uknown error.")
qr_result = qr_result.split(",")
if len(qr_result) == 5:
return_result = {'material': qr_result[0],
'diameter': qr_result[1],
'color': qr_result[2],
'length': qr_result[3],
'muid': qr_result[4]}
# Initialize plugin settings with data from QR code
octoprint.plugin.SettingsPlugin.on_settings_save(self, return_result)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'initial_length': return_result['length']})
try:
stored_length = self._get_spool_length(return_result['muid'])
# If the length of the spool already exists, update the settings,
# otherwise, post the initial spool data
if stored_length is not None:
return_result['length'] = stored_length
octoprint.plugin.SettingsPlugin.on_settings_save(self, return_result)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'initial_length': return_result['length']})
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'printProgress': ''})
else:
self._post_spool_data(return_result)
except Exception as e:
return flask.jsonify(result=return_result, sd3d_error=str(e))
try:
self._set_default_slice_profile(return_result['muid'][0:7])
except Exception as e:
return flask.jsonify(result=return_result, sd3d_error="Setting profile {} as default failed, check to see if it exists".format(return_result['muid']))
return_result['length'] = "{0:.3f}".format(float(return_result['length']))
return flask.jsonify(result=return_result)
else:
return flask.jsonify(error="Invalid QR code")
def _update_profile_event_stats(self, printer_event):
sharing_mode = self._settings.get(['sharingMode'])
if not sharing_mode:
self._logger.info('Sharing Mode turned off, skipping profile stat update')
return
current_printer = self._get_current_printer_profile()
printer_make = current_printer['id']
printer_model = current_printer['model']
nozzle_size = current_printer['extruder']['nozzleDiameter']
muid = self._settings.get(['muid'])[0:7]
current_job = self._get_current_job()
gcode_file_name = current_job['job']['file']['name']
gcode_file_metadata = self._get_local_file_metadata(gcode_file_name)
gcode_identifier = gcode_file_metadata['hash']
#layer_height = None
#try:
# layer_height = int( self._settings.get(['layerHeight']) )
# assert layer_height > 0
#except Exception as e:
# self._logger.error('Cannot make profile stat request, layer height must be non-zero positive integer')
# return
profile_update_data =json.dumps( {
'printer_event': printer_event,
'muid': muid,
'gcode_identifier': gcode_identifier,
'printer_make': printer_make,
'printer_model': printer_model,
'nozzle_size': nozzle_size,
'material_diameter': float("{0:.3f}".format(float(self._settings.get(['diameter']))))
#'layer_height': layer_height
}
)
self._logger.info('UPDATE' * 5 + str(profile_update_data))
sd3d_info_share_event_uri = 'https://sd3d.locbit.com/event'
sd3d_api_key = self._settings.get(['sd3dAPIKey'])
sd3d_access_id = self._settings.get(['sd3dAccessID'])
if len(sd3d_api_key) == 0 or len(sd3d_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping stat update")
return
query_params = {'api': sd3d_api_key, 'access': sd3d_access_id}
response = requests.post(sd3d_info_share_event_uri, params=query_params, headers={'Content-Type': 'application/json'}, data=profile_update_data).json()
self._logger.info('EVENT STAT RESPONSE' * 3 + str(response))
if printer_event == 'PrintStarted' and not response['success']:
self._logger.error("Profile stats update failed: %s".format(response['data']))
self._send_client_alert("Could not update profile stats on PrintStart: %s" % response['data'])
def _download_best_profile(self):
cloud_mode = self._settings.get(['cloudMode'])
if not cloud_mode:
self._logger.info('Cloud Mode turned off, skipping best profile download')
return
current_printer = self._get_current_printer_profile()
printer_make = current_printer['id']
printer_model = current_printer['model']
nozzle_size = current_printer['extruder']['nozzleDiameter']
muid = self._settings.get(['muid'])[0:7]
material_diameter = float("{0:.3f}".format(float(self._settings.get(['diameter']))))
layer_height = None
layer_height_threshold = LAYER_HEIGHT_THRESHOLD
try:
layer_height = float(self._settings.get(['layerHeight']))
except Exception as e:
self._logger.error("Could not parse layer height {}, skipping best profile download".format(layer_height))
return
best_profile = self._get_best_profile(printer_make, printer_model, nozzle_size, muid, layer_height, layer_height_threshold, material_diameter)
if best_profile['success']:
print("best profile data:" + str(best_profile))
best_profile['data']['slicing_profile']['key'] = 'SD3D' + best_profile['data']['slicing_profile']['key']
best_profile['data']['slicing_profile']['default'] = False
self._upload_new_profile(best_profile['data']['slicing_profile'])
self._set_best_or_default_profile(best_profile['data']['slicing_profile']['key'])
else:
self._logger.error("Error getting best profile, skipping best profile download")
muid_prefix = self._settings.get(['muid'])[0:7]
try:
self._set_default_slice_profile(muid_prefix)
except Exception as e:
self._logger.error("Could not set default muid profile %s".format(muid_prefix))
self._send_client_alert("Could not get best profile and setting default slice profile for muid %s failed" % muid_prefix)
def _send_client_alert(self, message):
self._plugin_manager.send_plugin_message(self._identifier, message)
def _set_best_or_default_profile(self, best_profile_name):
muid_prefix = self._settings.get(['muid'])[0:7]
try:
self._set_default_slice_profile(best_profile_name)
except Exception as e:
try:
self._set_default_slice_profile(muid_prefix)
except Exception as e:
self._logger.error("Could not set best profile %s, nor default muid profile %s, check if either one exists".format(best_profile_name, muid_prefix))
self._send_client_alert("Could not set best profile %s, nor default muid profile %s, check if either one exists" % (best_profile_name, muid_prefix))
def _get_best_profile(self, printer_make, printer_model, nozzle_size, muid, layer_height, layer_height_threshold, material_diameter):
#printer_make = urllib.quote(printer_make)
#printer_model = urllib.quote(printer_model)
sd3d_api_key = self._settings.get(['sd3dAPIKey'])
sd3d_access_id = self._settings.get(['sd3dAccessID'])
query_data = {
'printer_make': printer_make,
'printer_model': printer_model,
'nozzle_size': nozzle_size,
'muid': muid,
'layer_height': layer_height,
'layer_height_threshold': layer_height_threshold,
'material_diameter': material_diameter,
'api': sd3d_api_key,
'access': sd3d_access_id
}
query_str = urllib.urlencode(query_data)
if len(sd3d_api_key) == 0 or len(sd3d_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping getting best profile")
return
sd3d_uri = 'https://sd3d.locbit.com/slicing_profile'
self._logger.info('GET BEST PROFILE REQUEST' * 3 + str(query_data))
response = requests.get(sd3d_uri, params=query_data)
self._logger.info('GET BEST PROFILE RESPONSE' * 3 + str(response.json()) + str(response.url))
return response.json()
def _upload_new_profile(self, profile):
profile_uri = "http://localhost/api/slicing/cura/profiles/{}".format(profile['key'])
octoprint_api_key = settings().get(['api', 'key'])
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.put(profile_uri, headers = { "X-Api-Key" : octoprint_api_key}, json=profile, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
return response.json()
def _associate_profile_gcode(self, gcode_identifier, slicer, slicing_profile_name, printer_profile_id):
self._logger.info('ASSOCIATE PROFILE' * 4 + slicing_profile_name)
slicing_profile = self._get_slice_profile(slicer, slicing_profile_name)
printer_profile = {}
printer_profile = self._get_printer_profile(printer_profile_id)
#layer_height = None
#try:
# layer_height = int( self._settings.get(['layerHeight']) )
# assert layer_height > 0
#except Exception as e:
# self._logger.error('Cannot make gcode association request, layer height must be non-zero positive integer')
# return
request_data = json.dumps({'muid': self._settings.get(['muid'])[0:7], 'gcode_identifier': gcode_identifier,
'slicing_profile': slicing_profile, 'printer_make': printer_profile_id,
'printer_model': printer_profile['model'], 'nozzle_size': printer_profile['extruder']['nozzleDiameter'],
'material_diameter': float("{0:.3f}".format(float(self._settings.get(['diameter'])))) })
#'layer_height': layer_height})
self._logger.info('PROFILE ASSOCIATION REQUEST' * 3 + str(request_data))
sd3d_info_share_uri = 'https://sd3d.locbit.com/profile'
sd3d_api_key = self._settings.get(['sd3dAPIKey'])
sd3d_access_id = self._settings.get(['sd3dAccessID'])
if len(sd3d_api_key) == 0 or len(sd3d_access_id) == 0:
self._logger.error("No API key or access key in settings. Skipping profile update")
return
query_params = {'api': sd3d_api_key, 'access': sd3d_access_id}
response = requests.post(sd3d_info_share_uri, params=query_params, headers={'Content-Type': 'application/json'}, data=request_data).json()
self._logger.info('PROFILE ASSOCIATION RESPONSE' * 3 + str(response))
def _auto_provision_printer(self):
sd3d_api_key = self._settings.get(['sd3dAPIKey'])
sd3d_access_id = self._settings.get(['sd3dAccessID'])
query_params = {'api': sd3d_api_key, 'access': sd3d_access_id}
did = self._settings.get(['did'])
lid = self._settings.get(['macAddress'])
provision_post_data = json.dumps({
'translator': 'SD3DPrinter',
'DeviceName': did,
'lid': lid,
'deviceDescriptionId': '559aeaf5d763cb2a02bb196d',
'locationId': '13',
'userId': '116'})
self._logger.info('PRINTER AUTO PROVISION REQUEST' * 3 + str(provision_post_data))
response = requests.post('https://test-api.locbit.com/provision', params=query_params, headers={'Content-Type': 'application/json'}, data=provision_post_data).json()
self._logger.info('PRINTER AUTO PROVISION RESPONSE' * 3 + str(response))
if 'success' in response and response['success']:
provision_did = response['message']['did']
activation_post_data = json.dumps({'did': provision_did,
'connectivity': True,
'services': True
})
self._logger.info('PRINTER ACTIVATION REQUEST' * 3 + str(activation_post_data))
activate_response = requests.post('https://dev-billing.locbit.com/charge', params=query_params, headers={'Content-Type': 'application/json'}, data=activation_post_data).json()
self._logger.info('PRINTER ACTIVATION RESPONSE' * 3 + str(activate_response))
def install_dependencies(self, fill_density):
import subprocess
from uuid import getnode as get_mac
settings().set(['folder', 'slicingProfiles'], '/home/pi/.octoprint/slicingProfiles')
settings().set(['slicing', 'defaultSlicer'], 'cura', force=True)
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'macAddress': get_mac()})
try:
fill_density_percentage = int(fill_density)
assert fill_density_percentage > 0 and fill_density_percentage <= 100
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'fillDensity': fill_density})
except Exception as e:
raise Exception("Fill density setting {} is invalid, must be percentage (integer)".format(fill_density))
commands = ['/usr/bin/apt-get update',
'/usr/bin/apt-get install -y ipython python-opencv python-scipy python-numpy python-setuptools python-pip python-pygame python-zbar',
'/bin/chmod +x /home/pi/oprint/lib/python2.7/site-packages/octoprint_SD3D/qr.py',
'/usr/bin/pip install --upgrade pip',
'/usr/local/bin/pip --no-cache-dir install timeout-decorator svgwrite https://github.com/sightmachine/SimpleCV/zipball/master'
]
for command in commands:
subprocess.check_call("/bin/bash -c 'sudo {}'".format(command), shell=True)
def on_after_startup(self):
from uuid import getnode as get_mac
self._logger.info("MAC: {}".format(get_mac()))
current_printer_name = self._get_current_printer_profile()['id']
octoprint.plugin.SettingsPlugin.on_settings_save(self, {'did': current_printer_name})
self._logger.info("Hello world! I am: %s" % self._settings.get(["did"]))
self._auto_provision_printer()
def slice_monkey_patch_gen(slice_func):
def slice_monkey_patch(*args, **kwargs):
original_callback = args[5]
def patched_callback(*callbackargs, **callbackkwargs):
if 'callback_args' in kwargs and 'callback_kwargs' in kwargs:
original_callback(*kwargs['callback_args'], **kwargs['callback_kwargs'])
elif 'callback_args' in kwargs and 'callback_kwargs' not in kwargs:
gco_file = None
for arg in kwargs['callback_args']:
if arg.endswith('gco') and arg != args[3]:
gco_file = arg
break
original_callback(*kwargs['callback_args'])
if gco_file is not None:
gco_hash = self._get_local_file_metadata(gco_file)['hash']
self._associate_profile_gcode(gco_hash, args[1], args[4], kwargs['printer_profile_id'])
elif 'callback_args' not in kwargs and 'callback_kwargs' in kwargs:
original_callback(*kwargs['callback_kwargs'])
elif 'callback_args' not in kwargs and 'callback_kwargs' not in kwargs:
original_callback()
sharing_mode = self._settings.get(['sharingMode'])
if sharing_mode:
arg_list = list(args)
arg_list[5] = patched_callback
args = tuple(arg_list)
slice_func(*args, **kwargs)
return slice_monkey_patch
octoprint.slicing.SlicingManager.slice = slice_monkey_patch_gen(octoprint.slicing.SlicingManager.slice)
def get_settings_defaults(self):
return dict(did='',
material='',
diameter='',
color='',
initial_length='',
length='',
muid='',
sd3dAPIKey='yCX9PgjsvzGuaKTT9yuUIJFehPHjMknU',
sd3dAccessID='DxM7QlAsDo43Z0SJW1qwLh4FBXGQlaGU',
jobProgress='',
layerHeight='',
sharingMode=True,
cloudMode=False,
autoPrintMode=False,
macAddress='',
fillDensity='20'
)
def get_template_configs(self):
return [
dict(type="navbar", custom_bindings=False),
dict(type="settings", custom_bindings=False)
]
def get_assets(self):
return dict(js=["js/SD3D.js"])
def _auto_print(self, file_info):
if not self._settings.get(['autoPrintMode']):
return
fill_density_percentage = self._settings.get(['fillDensity'])
try:
fill_density_percentage = int(fill_density_percentage)
assert fill_density_percentage > 0 and fill_density_percentage <= 100
except Exception as e:
self._logger.error("Fill density setting {} is invalid, must be percentage (integer)".format(str(fill_density_percentage)))
fill_density_percentage = None
file_name = file_info['name']
file_path = file_info['path']
file_target = file_info['target']
if file_name.lower().endswith('.stl') and file_target == 'local':
auto_print_uri = "http://localhost/api/files/local/{}".format(urllib.quote_plus(file_path))
octoprint_api_key = settings().get(['api', 'key'])
#default_slice_profile_name = self._get_default_slice_profile('cura')['key']
default_slice_profile_name = self._get_default_slice_profile('cura')
print('&' * 30 + str(default_slice_profile_name))
printer_profile_name = self._get_current_printer_profile()['id']
print('Q' * 30 + str(printer_profile_name))
slice_data = {
'command': 'slice',
'print': True,
'profile': default_slice_profile_name,
'printerProfile': printer_profile_name
}
if fill_density_percentage is not None:
slice_data['profile.fill_density'] = fill_density_percentage
assert octoprint_api_key is not None and len(octoprint_api_key) > 0
response = requests.post(auto_print_uri, headers = { "X-Api-Key" : octoprint_api_key }, json=slice_data, timeout=HTTP_REQUEST_TIMEOUT)
response.raise_for_status()
json_response = response.json()
return json_response
def on_event(self, event, payload, **kwargs):
global Layer
global uid
global url
did = self._settings.get(["did"])
self.checkPrinterStatus()
if event == "PrintStarted":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
self._download_best_profile()
elif event == "PrintFailed":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
elif event == "PrintCancelled":
Layer = 0
self.sendLayerStatus(Layer)
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
elif event == "PrintDone":
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
elif event == "PrintPaused":
self._update_spool_length(update_remote=True)
self._update_profile_event_stats(event)
elif event == "PrintResumed":
self._update_profile_event_stats(event)
elif event == "Upload":
self._auto_print(payload)
self._download_best_profile()
if event in sd3dMsgDict:
event_body = {
'uid' : uid,
'did' : did,
'event' : sd3dMsgDict[event]['name'],
'status' : sd3dMsgDict[event]['value']
}
elif event == 'FileSelected':
event_body = {
'uid' : uid,
'did' : did,
'event' : 'File',
'status' : payload['filename']
}
elif event == 'ZChange':
Layer += 1
event_body = {
'uid' : uid,
'did' : did,
'event' : 'Layer',
'status' : Layer
}
self._update_spool_length(update_remote=True)
else:
event_body = {
'uid' : uid,
'did' : did,
'event': event
}
self._update_spool_length(update_remote=False)
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("SD3D: Bad Status")
self._logger.info("SD3D: Recording event " + event)
def sendLayerStatus(self, layer):
global uid
global url
did = self._settings.get(["did"])
event_body = {
'uid' : uid,
'did' : did,
'event' : 'Layer',
'status' : layer
}
try:
requests.post(url, data = event_body)
except BadStatusLine:
self._logger.info("SD3D: Bad Status")
def checkPrinterStatus(self):
url = "http://localhost/api/printer"
apiKey = settings().get(['api', 'key'])
try:
r = requests.get(url, headers = { "X-Api-Key" : apiKey })
self._logger.info(r.text)
except BadStatusLine:
self._logger.info("SD3D: Bad Status")
def is_wizard_required(self):
mac_address = self._settings.get(['macAddress'])
if not mac_address:
return True
print('5' * 20 + "{}".format(self._settings.get(['macAddress'])))
__plugin_name__ = "SD3D"
__plugin_implementation__ = SD3DPlugin()
| {
"repo_name": "dissipate/Octoprint-SD3D",
"path": "octoprint_SD3D/__init__.py",
"copies": "1",
"size": "41514",
"license": "mit",
"hash": 2949741954952106000,
"line_mean": 45.4362416107,
"line_max": 199,
"alpha_frac": 0.4966758202,
"autogenerated": false,
"ratio": 4.533085826599694,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02161091863716146,
"num_lines": 894
} |
from __future__ import absolute_import, division
# from io import StringIO
from glob import glob
import os.path as op
import tempfile
import simplejson as json
from pandas.api import types
import numpy as np
import pandas as pd
# from _common import cooler_cmp
from click.testing import CliRunner
import cooler
import pytest
### INGEST AND AGGREGATION ###
from cooler.cli.cload import pairs as cload_pairs
from cooler.cli.load import load
tmp = tempfile.gettempdir()
testdir = op.realpath(op.dirname(__file__))
datadir = op.join(testdir, "data")
def _run_cload_pairs(runner, binsize, extra_args):
args = [
op.join(datadir, "toy.chrom.sizes") + ":" + str(binsize),
op.join(datadir, "toy.pairs"),
"toy.{}.cool".format(binsize),
"-c1", "2",
"-p1", "3",
"-c2", "4",
"-p2", "5",
"--assembly", "toy",
"--chunksize", "10",
] + extra_args
return runner.invoke(cload_pairs, args)
def _cmp_pixels_2_bg(f_out, f_ref, one_based_ref=True):
# output, 1-based starts
out_df = cooler.Cooler(f_out).pixels(join=True)[:]
if one_based_ref:
out_df["start1"] += 1
out_df["start2"] += 1
# reference
ref_df = pd.read_csv(
f_ref,
sep="\t",
names=["chrom1", "start1", "end1", "chrom2", "start2", "end2", "count"],
)
assert np.all(out_df == ref_df)
# '--no-symmetric-upper'
# '--input-copy-status', 'unique|duplex',
@pytest.mark.parametrize(
"ref,extra_args",
[
("symm.upper", []), # reflect triu pairs
("symm.upper", ["--input-copy-status", "unique"]), # reflect triu pairs
("asymm", ["--no-symmetric-upper"]),
],
)
def test_cload_symm_asymm(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", op.join(datadir, "toy.{}.2.bg2".format(ref)))
# '--temp-dir', '',
# '--no-delete-temp',
# '--max-merge', '',
@pytest.mark.parametrize(
"ref,extra_args", [("symm.upper", ["--temp-dir", ".", "--no-delete-temp"])]
)
def test_cload_mergepass(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", op.join(datadir, "toy.{}.2.bg2".format(ref)))
assert len(cooler.fileops.list_coolers(glob("*.cool")[0])) > 0
# '--field', '',
# '--no-count', '',
def test_cload_field():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--field", "score=8"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_integer_dtype(
pixels.dtypes["count"]
)
assert "score" in pixels.columns and types.is_float_dtype(
pixels.dtypes["score"]
)
extra_args = ["--field", "count=8"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_integer_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0)
extra_args = ["--field", "count=8:dtype=float"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_float_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0.2)
extra_args = ["--field", "count=8:agg=min,dtype=float"]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
pixels = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels.columns and types.is_float_dtype(
pixels.dtypes["count"]
)
assert np.allclose(pixels["count"][:], 0.1)
## don't implement the --no-count for now
# extra_args = ['--field', 'score=7:dtype=float', '--no-count']
# result = _run_cload_pairs(runner, 2, extra_args)
# assert result.exit_code == 0
# pixels = cooler.Cooler('toy.2.cool').pixels()[:]
# assert 'count' not in pixels.columns
# assert 'score' in pixels.columns and types.is_float_dtype(pixels.dtypes['score'])
# '--metadata', '',
# '--zero-based',
# '--comment-char', '',
# '--storage-options', '',
def test_cload_other_options():
runner = CliRunner()
with runner.isolated_filesystem():
meta = {"foo": "bar", "number": 42}
with open("meta.json", "w") as f:
json.dump(meta, f)
extra_args = [
"--metadata",
"meta.json",
"--zero-based",
"--storage-options",
"shuffle=True,fletcher32=True,compression=lzf",
]
result = _run_cload_pairs(runner, 2, extra_args)
assert result.exit_code == 0
c = cooler.Cooler("toy.2.cool")
assert c.info["metadata"] == meta
with c.open("r") as h5:
dset = h5["bins/start"]
assert dset.shuffle
assert dset.fletcher32
assert dset.compression == "lzf"
def _run_load(runner, matrix_file, format, binsize, extra_args):
args = [
"-f",
format,
op.join(datadir, "toy.chrom.sizes") + ":" + str(binsize),
op.join(datadir, matrix_file),
"toy.{}.cool".format(binsize),
"--assembly",
"toy",
"--chunksize",
"10",
] + extra_args
return runner.invoke(load, args)
# '--no-symmetric-upper'
# '--input-copy-status', 'unique|duplex',
@pytest.mark.parametrize(
"ref,extra_args",
[
("symm.upper", []), # reflect tril pairs
("symm.upper", ["--one-based", "--input-copy-status", "unique"]), # reflect tril pairs
("asymm", ["--one-based", "--no-symmetric-upper"]),
],
)
def test_load_symm_asymm(ref, extra_args):
runner = CliRunner()
with runner.isolated_filesystem():
ref = op.join(datadir, "toy.{}.2.bg2".format(ref))
result = _run_load(runner, ref, "bg2", 2, extra_args)
assert result.exit_code == 0
_cmp_pixels_2_bg("toy.2.cool", ref)
# '--field', '',
def test_load_field():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--field", "count=7:dtype=float"]
result = _run_load(runner, "toy.symm.upper.2.bg2", "bg2", 2, extra_args)
assert result.exit_code == 0
pixels1 = cooler.Cooler(op.join(datadir, "toy.symm.upper.2.cool")).pixels()[:]
pixels2 = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels2.columns and types.is_float_dtype(
pixels2.dtypes["count"]
)
assert np.allclose(pixels1["count"][:], pixels2["count"][:])
def test_load_field2():
runner = CliRunner()
with runner.isolated_filesystem():
extra_args = ["--count-as-float"]
result = _run_load(runner, "toy.symm.upper.2.bg2", "bg2", 2, extra_args)
assert result.exit_code == 0
pixels1 = cooler.Cooler(op.join(datadir, "toy.symm.upper.2.cool")).pixels()[:]
pixels2 = cooler.Cooler("toy.2.cool").pixels()[:]
assert "count" in pixels2.columns and types.is_float_dtype(
pixels2.dtypes["count"]
)
assert np.allclose(pixels1["count"][:], pixels2["count"][:])
| {
"repo_name": "mirnylab/cooler",
"path": "tests/test_cli_ingest.py",
"copies": "1",
"size": "7671",
"license": "bsd-3-clause",
"hash": 9050571740419038000,
"line_mean": 32.352173913,
"line_max": 95,
"alpha_frac": 0.5716334246,
"autogenerated": false,
"ratio": 3.253180661577608,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9321130500743664,
"avg_score": 0.0007367170867887045,
"num_lines": 230
} |
from __future__ import absolute_import, division
from itertools import combinations
import numpy as np
from scipy.spatial.distance import pdist
from six.moves import map
def sq_to_dist(i, j, n):
"""Convert coordinate of square distance matrix to condensed matrix index.
The condensed version of a squareform, pairwise distance matrix is
a linearisation of the upper triangular, non-diagonal coordinates
of the squareform distance matrix. This function returns the [i, j]-th
coordinate of the condensed array.
eg. given a squareform matrix,
array([[ 0. , 10. , 22.36067977],
[ 10. , 0. , 14.14213562],
[ 22.36067977, 14.14213562, 0. ]])
The condensed version of this matrix is:
array([ 10. , 22.36067977, 14.14213562])
Parameters
----------
i : int
i-th coordinate.
j : int
j-th coordinate.
n : int
Dimension n of n*n distance matrix.
Returns
-------
index : int
Position of pairwise distance [i, j] in
condensed distance matrix.
Reference
---------
In the scipy.spatial.squareform documentation, it is shown that the
index in the condensed array is given by
{n choose 2} - {(n - i) choose 2} + (j - i - 1).
Some simple arithmetic shows that this can be expanded to the formula below.
The documentation can be found in the following link:
http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.spatial.distance.squareform.html
Examples
--------
>>> sq_to_dist(0, 1, 4)
0
>>> sq_to_dist(0, 3, 4)
2
>>> sq_to_dist(1, 2, 4)
3
"""
if i > j:
i, j = j, i
index = i * n + j - i * (i + 1) / 2 - i - 1
return int(index)
def mongo_group_by(collection, group_by):
"""
Group MongoDB collection according to specified field.
Sends aggregate query to MongoDB collection to group
all documents by a given field and returns dictionary
mapping the field to the corresponding (plate, well)
co-ordinate(s).
Parameters
----------
collection : pymongo collection
Pymongo object directing to collection.
group_by : string
Field to group collection by.
Returns
-------
query_dict : dict { string : list of tuple }
Query dictionary mapping the specified group_by field to a list of
(plate, well) co-ordinates.
"""
mongo_query = collection.aggregate([{
'$group' : {
# groups all documents according to specified field
'_id': '$' + group_by,
'coords': {
'$addToSet': {
# add plate and well for each document
# belonging to the group
'plate': '$plate',
'well': '$well'
}
}
}
}])
query_dict = {}
for doc in mongo_query:
query_dict[doc['_id']] = []
for coord in doc['coords']:
try:
new_coord = (int(coord['plate']), str(coord['well']))
query_dict[doc['_id']].append(new_coord)
except KeyError:
pass
return query_dict
def gene_distance_score(X, collection, metric='euclidean'):
"""Find intra/inter gene distance scores between samples.
Parameters
----------
X : Data frame, shape (n_samples, n_features)
Feature data frame.
metric : string, optional
Which distance measure to use when calculating distances.
Must be one of the options allowable in
scipy.spatial.distance.pdist. Default is euclidean distance.
Returns
-------
all_intragene_data : array
An 1D array with intra-gene distances (i.e. distances
between samples with the same gene knocked down).
all_intergene_data : array
An 1D array with inter-gene distances (i.e. distances
between samples with different gene knocked down).
"""
gene_dict = mongo_group_by(collection, 'gene_name')
nsamples = X.shape[0]
npairs = int(nsamples * (nsamples - 1) / 2)
all_intragene_index = []
for key in gene_dict:
if len(gene_dict[key]) > 1:
indices = (X.index.get_loc(coord) for coord in gene_dict[key]
if coord in X.index)
for i, j in combinations(indices, 2):
all_intragene_index.append(sq_to_dist(i, j, X.shape[0]))
all_intragene_index.sort()
all_intergene_index = np.setdiff1d(np.arange(npairs), all_intragene_index,
assume_unique=True)
distance = pdist(X, metric)
all_intragene_data = distance[all_intragene_index]
all_intergene_data = distance[all_intergene_index]
return all_intragene_data, all_intergene_data
def _partition_range(values1, values2, n):
"""Build a partition of bins over the entire range of values1 and values2.
Parameters
----------
values1, values2 : arrays
arrays to be concatenated
n : int
number of bins
Returns
-------
partition : array
A 1D array of bin edges, of length n+1
Examples
--------
>>> d1 = np.array([3, 3, 4, 5, 6])
>>> d2 = np.array([5, 5, 5, 6, 7])
>>> _partition_range(d1, d2, 5)
array([ 3., 4., 5., 6., 7.])
"""
eps = 1e-30
d_max = max(np.max(values1), np.max(values2)) + eps
d_min = min(np.min(values1), np.min(values2))
partition = np.linspace(d_min, d_max, n) #or n, check this
return partition
def _empirical_distribution(values, bins):
"""Return an EDF of an input array over a given array of bin edges
Note: returns a PDF, not a CDF
Parameters
----------
values : array of float
Values of distribution to be modelled
bins : array of float
Array of bin right edge values
Returns
-------
edf : array
A probability distribution over the range of bins
"""
ind = np.digitize(values, bins)
#Note: np.digitize bin index starts from index 1
#erray returns number of times each data point occurs
edf = np.bincount(ind, minlength = len(bins) + 1)
#normalize
edf = edf / np.sum(edf)
return edf
def bhattacharyya_distance(values0, values1, n):
"""Return the Bhattacharyya coefficient of 2 input arrays
BC of 2 distributions, f(x) and g(x) is given by [1]_:
$\sum_{k=1}^n{\sqrt(f(x_i)g(x_i))}$
Parameters
----------
values0, values1 : arrays
Return BC of these 2 arrays
n : int
number of bins to partition values0 and values1 over
Returns
-------
bc : real
Bhattacharyya coefficient of values0 and values1
References
----------
..[1] Bhattacharyya, A. (1943). "On a measure of divergence between two
statistical populations defined by their probability distributions"
Bulletin of the Calcutta Mathematical Society
Examples
--------
>>> d1 = np.array([3, 3, 4, 5, 6])
>>> d2 = np.array([5, 5, 5, 6, 7])
>>> d = bhattacharyya_distance(d1, d2, 5)
>>> abs(d - 0.546) < 1e-3
True
See Also
--------
_partition_range : function
_empirical_distribution : function
"""
bins = _partition_range(values0, values1, n)
d0 = _empirical_distribution(values0, bins)
d1 = _empirical_distribution(values1, bins)
bc = np.sum(np.sqrt(d0*d1))
return bc
| {
"repo_name": "starcalibre/microscopium",
"path": "microscopium/metrics.py",
"copies": "1",
"size": "7546",
"license": "bsd-3-clause",
"hash": 2517315052632505300,
"line_mean": 27.6920152091,
"line_max": 101,
"alpha_frac": 0.5832228995,
"autogenerated": false,
"ratio": 3.7374938088162457,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48207167083162455,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from pylab import plot, show, xlabel, ylabel, text, xlim, ylim, sign
from si import MakesNoSense
class Plot(object):
"""Glue-code wrapper between pylab (matplotlib) and si. One plot can only have one SI unit per dimension, so you can plot without having to worry about dimensions.
Does not yet enforce anything about to which window you plot. (You can have two objects simultaneously and show them at the same time.)
>>> from si.common import *
>>> p = Plot(s, m)
>>> t = [ms*_x for _x in xrange(200)] # numeric arrays not supported yet.
>>> g = 10*N/kg
>>> y = [90*cm - (_t**2 * g / 2) for _t in t]
>>> p.plot(t, y)
>>> # p.show()
"""
def __init__(self, xunit, yunit):
self.xunit = xunit
self.yunit = yunit
self.xlim = None
self.ylim = None
def _x2number(self, x):
if hasattr(self.xunit, "using"):
return x.using(self.xunit)
else:
return x/self.xunit
def _y2number(self, y):
if hasattr(self.yunit, "using"):
return y.using(self.yunit)
else:
return y/self.yunit
def plot(self, x, y, *args, **kwords):
"""Like pylab.plot()."""
try:
x = [self._x2number(_x) for _x in x]
y = [self._y2number(_y) for _y in y]
except MakesNoSense:
raise Exception, "units don't match."
plot(x,y,*args,**kwords)
def show(self):
"""Like pylab.show()."""
self.lim()
show()
def xlabel(self, label, unit = None):
"""Set x label; use unit to state that label contains an assertion about the unit.
>>> from si.common import *
>>> p = Plot(s, m)
>>> p.xlabel("Time (in hours)",h)
Traceback (most recent call last):
...
AssertionError: Plot unit does not match label unit.
"""
if unit: assert unit == self.xunit, "Plot unit does not match label unit."
xlabel(label)
def ylabel(self, label, unit = None):
"""Set y label; use unit to state that label contains an assertion about the unit.
>>> from si.common import *
>>> p = Plot(s, m)
>>> p.ylabel("Length (in metre)",m)
"""
if unit: assert unit == self.yunit, "Plot unit does not match label unit."
ylabel(label)
def text(self, x, y, label):
"""Like pylab.text()."""
try:
x = self._x2number(x)
y = self._y2number(y)
except MakesNoSense:
raise Exception, "units don't match."
text(x,y,label)
def arrowhead(self, (x,y), (dirx,diry)):
"""Plot an arrowhead with its tip in (x,y) coming from the direction of (dirx,diry) (without units, only direction).""" # FIXME: provide an interface for the non-pylab matplotlib api
self.lim()
xsize = (xlim()[1] - xlim()[0]) / 200
ysize = (ylim()[1] - ylim()[0]) / 200
if diry:
raise NotImplementedError, "only left/right supportet until ow"
x,y = self._x2number(x), self._y2number(y)
plot([x,x+xsize*sign(dirx)],[y,y+ysize], 'k-')
plot([x,x+xsize*sign(dirx)],[y,y-ysize], 'k-')
def lim(self, x=None, y=None):
"""Set the plotting range (like xlim and ylim together).
>>> from si.common import *
>>> p = Plot(s,m)
>>> p.lim(x=(1*min,5*min))"""
if x: self.xlim = (self._x2number(x[0]), self._x2number(x[1]))
if y: self.ylim = (self._y2number(y[0]), self._y2number(y[1]))
xlim(self.xlim)
ylim(self.ylim)
def xc0(self):
"""Return coordinate of left plot side (for drawing lines without changing the automatic scale)"""
self.lim()
return self.xunit * xlim()[0]
def yc0(self):
"""Return coordinate of bottom plot side (for drawing lines without changing the automatic scale)"""
self.lim()
return self.yunit * ylim()[0]
| {
"repo_name": "chrysn/si",
"path": "si/pylab/__init__.py",
"copies": "1",
"size": "4057",
"license": "bsd-2-clause",
"hash": 5942968282962505000,
"line_mean": 33.3813559322,
"line_max": 190,
"alpha_frac": 0.5536110426,
"autogenerated": false,
"ratio": 3.521701388888889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4575312431488889,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from sys import stdout as _stdout
from time import time as _time
import numpy as np
from numpy.fft import rfftn as np_rfftn, irfftn as np_irfftn
try:
from pyfftw import zeros_aligned
from pyfftw.builders import rfftn as rfftn_builder, irfftn as irfftn_builder
PYFFTW = True
except ImportError:
PYFFTW = False
try:
import pyopencl as cl
import pyopencl.array as cl_array
from . import pyclfft
from .kernels import Kernels
PYOPENCL = True
except ImportError:
PYOPENCL = False
from disvis import volume
from .pdb import PDB
from .libdisvis import (
dilate_points, distance_restraint, count_violations, count_interactions
)
from ._extensions import rotate_grid3d
class DisVis(object):
def __init__(self, fftw=True, print_callback=True):
# parameters to be defined
self.receptor = None
self.ligand = None
self.distance_restraints = []
# parameters with standard values that can be set by the user
self.rotations = np.asarray([[[1, 0, 0], [0, 1, 0], [0, 0, 1]]], dtype=np.float64)
self.weights = None
self.voxelspacing = 1.0
self.interaction_radius = 2.5
self.max_clash = 100
self.min_interaction = 300
self.receptor_interaction_selection = None
self.ligand_interaction_selection = None
self.interaction_distance = 10
self.interaction_restraints_cutoff = None
self.occupancy_analysis = False
self.queue = None
self._fftw = fftw and PYFFTW
self.print_callback = print_callback
# Output attributes
# Array containing number of complexes consistent with EXACTLY n
# restraints, no more, no less.
self.accessable_complexes = None
self.accessable_interaction_space = None
self.violations = None
self.occupancy_grids = None
self.interaction_matrix = None
def add_distance_restraint(self, receptor_selection, ligand_selection, mindis, maxdis):
distance_restraint = [receptor_selection, ligand_selection, mindis, maxdis]
self.distance_restraints.append(distance_restraint)
def search(self):
self._initialize()
if self.queue is None:
self._cpu_init()
self._cpu_search()
else:
self._gpu_init()
self._gpu_search()
# Set the results
self.accessible_interaction_space = volume.Volume(
self._access_interspace, self.voxelspacing,
self._origin)
self.accessible_complexes = self._accessible_complexes
self.violations = self._violations
if self.occupancy_analysis:
self.occupancy_grids = {}
for i in xrange(self._nrestraints + 1):
try:
occ_grid = self._occ_grid[i]
except KeyError:
occ_grid = None
if occ_grid is not None:
self.occupancy_grids[i] = volume.Volume(occ_grid,
self.voxelspacing, self._origin)
if self._interaction_analysis:
self.interaction_matrix = self._interaction_matrix
@staticmethod
def _minimal_volume_parameters(fixed_coor, scanning_coor, offset, voxelspacing):
# the minimal grid shape is the size of the fixed protein in
# each dimension and the longest diameter of the scanning chain
offset += np.linalg.norm(scanning_coor - scanning_coor.mean(axis=0), axis=1).max()
mindist = fixed_coor.min(axis=0) - offset
maxdist = fixed_coor.max(axis=0) + offset
shape = [volume.radix235(int(np.ceil(x)))
for x in (maxdist - mindist) / voxelspacing][::-1]
origin = mindist
return shape, origin
def _initialize(self):
# check if requirements are set
if any(x is None for x in (self.receptor, self.ligand)) or not self.distance_restraints:
raise ValueError("Not all requirements are met for a search")
if self.weights is None:
self.weights = np.ones(self.rotations.shape[0], dtype=np.float64)
if self.weights.size != self.rotations.shape[0]:
raise ValueError("Weight array has incorrect size.")
# Determine size for volume to hold the recepter and ligand densities
vdw_radii = self.receptor.vdw_radius
self._shape, self._origin = self._minimal_volume_parameters(self.receptor.coor,
self.ligand.coor, self.interaction_radius + vdw_radii.max(), self.voxelspacing)
# Calculate the interaction surface and core of the receptor
# Move the coordinates to the grid-frame
self._rgridcoor = (self.receptor.coor - self._origin) / self.voxelspacing
self._rcore = np.zeros(self._shape, dtype=np.float64)
self._rsurf = np.zeros(self._shape, dtype=np.float64)
radii = vdw_radii / self.voxelspacing
dilate_points(self._rgridcoor, radii, self._rcore)
radii += self.interaction_radius / self.voxelspacing
dilate_points(self._rgridcoor, radii, self._rsurf)
# Set ligand center to the origin of the grid and calculate the core
# shape. The coordinates are wrapped around in the density.
self._lgridcoor = (self.ligand.coor - self.ligand.center) / self.voxelspacing
radii = self.ligand.vdw_radius / self.voxelspacing
self._lcore = np.zeros(self._shape, dtype=np.float64)
dilate_points(self._lgridcoor, radii, self._lcore)
# Normalize the requirements for a complex in grid quantities
self._grid_max_clash = self.max_clash / self.voxelspacing**3
self._grid_min_interaction = self.min_interaction / self.voxelspacing**3
# Setup the distance restraints
self._nrestraints = len(self.distance_restraints)
self._grid_restraints = grid_restraints(self.distance_restraints,
self.voxelspacing, self._origin, self.ligand.center)
self._rrestraints = self._grid_restraints[:, 0:3]
self._lrestraints = self._grid_restraints[:, 3:6]
self._mindis = self._grid_restraints[:,6]
self._maxdis = self._grid_restraints[:,7]
self._accessible_complexes = np.zeros(self._nrestraints + 1, dtype=np.float64)
self._access_interspace = np.zeros(self._shape, dtype=np.int32)
self._violations = np.zeros((self._nrestraints, self._nrestraints), dtype=np.float64)
# Calculate the average occupancy grid only for complexes consistent
# with interaction_restraints_cutoff and more. By default, only
# analyze solutions that max violate 3 restraints
if self.interaction_restraints_cutoff is None:
# Do not calculate the interactions for complexes consistent
# with 0 restraints
cutoff = min(3, self._nrestraints - 1)
self.interaction_restraints_cutoff = self._nrestraints - cutoff
# Allocate an occupancy grid for all restraints that are being investigated
self._occ_grid = {}
if self.occupancy_analysis:
for i in xrange(self.interaction_restraints_cutoff, self._nrestraints + 1):
self._occ_grid[i] = np.zeros(self._shape, np.float64)
# Check if we want to do an interaction analysis, i.e. whether
# interface residues are given for both the ligand and receptor.
selection = (self.ligand_interaction_selection, self.receptor_interaction_selection)
self._interaction_analysis = any(x is not None for x in selection)
if self._interaction_analysis:
# Since calculating all interactions is costly, only analyze
# solutions that are consistent with more than N restraints.
self._lselect = (self.ligand_interaction_selection.coor -
self.ligand_interaction_selection.center) / self.voxelspacing
self._rselect = (self.receptor_interaction_selection.coor -
self._origin) / self.voxelspacing
shape = (self._nrestraints + 1 - self.interaction_restraints_cutoff,
self._lselect.shape[0], self._rselect.shape[0])
self._interaction_matrix = np.zeros(shape, dtype=np.float64)
self._sub_interaction_matrix = np.zeros(shape, dtype=np.int64)
# Calculate the longest distance in the lcore. This helps in making the
# grid rotation faster, as less points need to be considered for
# rotation
self._llength = int(np.ceil(
np.linalg.norm(self._lgridcoor, axis=1).max() +
self.ligand.vdw_radius.max() / self.voxelspacing
)) + 1
@staticmethod
def _allocate_array(shape, dtype, fftw):
if fftw:
arr = zeros_aligned(shape, dtype)
else:
arr = np.zeros(shape, dtype)
return arr
def rfftn(self, in_arr, out_arr):
"""Provide a similar interface to PyFFTW and numpy interface"""
if self._fftw:
out_arr = self._rfftn(in_arr, out_arr)
else:
out_arr = self._rfftn(in_arr)
return out_arr
def irfftn(self, in_arr, out_arr):
"""Provide a similar interface to PyFFTW and numpy interface"""
if self._fftw:
out_arr = self._irfftn(in_arr, out_arr)
else:
out_arr = self._irfftn(in_arr, s=self._shape)
return out_arr
def _cpu_init(self):
# Allocate arrays for FFT's
# Real arrays
for arr in 'rot_lcore clashvol intervol tmp'.split():
setattr(self, '_' + arr, self._allocate_array(self._shape, np.float64, self._fftw))
# Complex arrays
self._ft_shape = list(self._shape)[:-1] + [self._shape[-1] // 2 + 1]
for arr in 'lcore lcore_conj rcore rsurf tmp'.split():
setattr(self, '_ft_' + arr,
self._allocate_array(self._ft_shape, np.complex128, self._fftw))
# Integer arrays
for arr in 'interspace red_interspace access_interspace restspace'.split():
setattr(self, "_" + arr, np.zeros(self._shape, np.int32))
# Boolean arrays
for arr in 'not_clashing interacting'.split():
setattr(self, "_" + arr, np.zeros(self._shape, np.bool))
# Array for rotating points and restraint coordinates
self._restraints_center = np.zeros_like(self._grid_restraints[:,3:6])
self._rot_lrestraints = np.zeros_like(self._lrestraints)
if self._interaction_analysis:
self._rot_lselect = np.zeros_like(self._lselect)
# Build the FFT's if we are using pyfftw
if self._fftw:
self._rfftn = rfftn_builder(self._rcore)
self._irfftn = irfftn_builder(self._ft_rcore, s=self._shape)
else:
self._rfftn = np_rfftn
self._irfftn = np_irfftn
# initial calculations
self._ft_rcore = self.rfftn(self._rcore, self._ft_rcore)
self._ft_rsurf = self.rfftn(self._rsurf, self._ft_rsurf)
# Keep track of number of consistent complexes
self._tot_complex = 0
self._consistent_complexes = np.zeros(self._nrestraints + 1, dtype=np.float64)
def _rotate_lcore(self, rotmat):
rotate_grid3d(self._lcore, rotmat, self._llength,
self._rot_lcore, True)
def _get_interaction_space(self):
# Calculate the clashing and interaction volume
self._ft_lcore = self.rfftn(self._rot_lcore, self._ft_lcore)
np.conjugate(self._ft_lcore, self._ft_lcore_conj)
np.multiply(self._ft_lcore_conj, self._ft_rcore, self._ft_tmp)
self._clashvol = self.irfftn(self._ft_tmp, self._clashvol)
# Round up values, as they should be integers
np.round(self._clashvol, out=self._clashvol)
np.multiply(self._ft_lcore_conj, self._ft_rsurf, self._ft_tmp)
self._intervol = self.irfftn(self._ft_tmp, self._intervol)
np.round(self._intervol, out=self._intervol)
# Determine the interaction space, i.e. all the translations where
# the receptor and ligand are interacting and not clashing
np.less_equal(self._clashvol, self._grid_max_clash, self._not_clashing)
np.greater_equal(self._intervol, self._grid_min_interaction, self._interacting)
np.logical_and(self._not_clashing, self._interacting, self._interspace)
def _get_restraints_center(self, rotmat):
"""Rotate the restraints and determine the restraints center point"""
np.dot(self._lrestraints, rotmat.T, self._rot_lrestraints)
np.subtract(self._rrestraints, self._rot_lrestraints,
self._restraints_center)
def _get_restraint_space(self):
# Determine the consistent restraint space
self._restspace.fill(0)
distance_restraint(self._restraints_center, self._mindis, self._maxdis,
self._restspace)
def _get_reduced_interspace(self):
np.multiply(self._interspace, self._restspace, self._red_interspace)
def _count_complexes(self, weight):
self._tot_complex += weight * self._interspace.sum()
self._consistent_complexes += weight *\
np.bincount(self._red_interspace.ravel(),
minlength=(max(2, self._nrestraints + 1))
)
def _count_violations(self, weight):
# Count all sampled complexes
count_violations(self._restraints_center, self._mindis,
self._maxdis, self._red_interspace, weight,
self._violations)
def _get_access_interspace(self):
np.maximum(self._red_interspace, self._access_interspace,
self._access_interspace)
def _get_occupancy_grids(self, weight):
for i in xrange(self.interaction_restraints_cutoff, self._nrestraints + 1):
np.greater_equal(self._red_interspace, np.int32(i), self._tmp)
self._ft_tmp = self.rfftn(self._tmp, self._ft_tmp)
np.multiply(self._ft_tmp, self._ft_lcore, self._ft_tmp)
self._tmp = self.irfftn(self._ft_tmp, self._tmp)
np.round(self._tmp, out=self._tmp)
self._occ_grid[i] += weight * self._tmp
def _get_interaction_matrix(self, rotmat, weight):
# Rotate the ligand coordinates
self._rot_lselect = np.dot(self._lselect, rotmat.T)
count_interactions(self._red_interspace, self._rselect,
self._rot_lselect, np.float64(self.interaction_distance / self.voxelspacing),
weight, np.int32(self.interaction_restraints_cutoff),
self._interaction_matrix)
def _cpu_search(self):
time0 = _time()
for n in xrange(self.rotations.shape[0]):
rotmat = self.rotations[n]
weight = self.weights[n]
# Rotate the ligand grid
self._rotate_lcore(rotmat)
self._get_interaction_space()
# Rotate the restraints
self._get_restraints_center(rotmat)
# Determine the consistent restraint space
self._get_restraint_space()
# Calculate the reduced accessible interaction
self._get_reduced_interspace()
# Perform some statistics, such as the number of restraint
# violations and the number of accessible complexes consistent with
# exactly N restraints.
self._count_complexes(weight)
self._count_violations(weight)
# Calculate shapes for visual information, such as the highest
# number of consisistent restraints found at each grid point
self._get_access_interspace()
# Calculate an occupancy grid for complexes consistent with at
# least i restraints
if self.occupancy_analysis:
self._get_occupancy_grids(weight)
# Perform interaction analysis if required
if self._interaction_analysis:
self._get_interaction_matrix(rotmat, weight)
if self.print_callback is not None:
#self.print_callback(n, total, time0)
self._print_progress(n, self.rotations.shape[0], time0)
# Get the number of accessible complexes consistent with exactly N
# restraints. We need to correct the total number of complexes sampled
# for this.
self._accessible_complexes[:] = self._consistent_complexes
self._accessible_complexes[0] = self._tot_complex - self._accessible_complexes[1:].sum()
@staticmethod
def _print_progress(n, total, time0):
m = n + 1
pdone = m/total
t = _time() - time0
_stdout.write('\r{:d}/{:d} ({:.2%}, ETA: {:d}s) '\
.format(m, total, pdone,
int(t/pdone - t)))
_stdout.flush()
def _gpu_init(self):
q = self.queue
# Move arrays to GPU
self._cl_rcore = cl_array.to_device(q, self._rcore.astype(np.float32))
self._cl_rsurf = cl_array.to_device(q, self._rsurf.astype(np.float32))
self._cl_lcore = cl_array.to_device(q, self._lcore.astype(np.float32))
# Make the rotations float16 arrays
self._cl_rotations = np.zeros((self.rotations.shape[0], 16), dtype=np.float32)
self._cl_rotations[:, :9] = self.rotations.reshape(-1, 9)
# Allocate arrays
# Float32
self._cl_shape = tuple(self._shape)
arr_names = 'rot_lcore clashvol intervol tmp'.split()
for arr_name in arr_names:
setattr(self, '_cl_' + arr_name,
cl_array.zeros(q, self._cl_shape, dtype=np.float32)
)
# Int32
arr_names = 'interspace red_interspace restspace access_interspace'.split()
for arr_name in arr_names:
setattr(self, '_cl_' + arr_name,
cl_array.zeros(q, self._cl_shape, dtype=np.int32)
)
# Boolean
arr_names = 'not_clashing interacting'.split()
for arr_name in arr_names:
setattr(self, '_cl_' + arr_name,
cl_array.zeros(q, self._cl_shape, dtype=np.int32)
)
# Complex64
self._ft_shape = tuple([self._shape[0] // 2 + 1] + list(self._shape)[1:])
arr_names = 'lcore lcore_conj rcore rsurf tmp'.split()
for arr_name in arr_names:
setattr(self, '_cl_ft_' + arr_name,
cl_array.empty(q, self._ft_shape, dtype=np.complex64)
)
# Restraints arrays
self._cl_rrestraints = np.zeros((self._nrestraints, 4), dtype=np.float32)
self._cl_rrestraints[:, :3] = self._rrestraints
self._cl_rrestraints = cl_array.to_device(q, self._cl_rrestraints)
self._cl_lrestraints = np.zeros((self._nrestraints, 4), dtype=np.float32)
self._cl_lrestraints[:, :3] = self._lrestraints
self._cl_lrestraints = cl_array.to_device(q, self._cl_lrestraints)
self._cl_mindis = cl_array.to_device(q, self._mindis.astype(np.float32))
self._cl_maxdis = cl_array.to_device(q, self._maxdis.astype(np.float32))
self._cl_mindis2 = cl_array.to_device(q, self._mindis.astype(np.float32) ** 2)
self._cl_maxdis2 = cl_array.to_device(q, self._maxdis.astype(np.float32) ** 2)
self._cl_rot_lrestraints = cl_array.zeros_like(self._cl_rrestraints)
self._cl_restraints_center = cl_array.zeros_like(self._cl_rrestraints)
# kernels
self._kernel_constants = {'interaction_cutoff': 10,
'nrestraints': self._nrestraints,
'shape_x': self._shape[2],
'shape_y': self._shape[1],
'shape_z': self._shape[0],
'llength': self._llength,
'nreceptor_coor': 0,
'nligand_coor': 0,
}
# Counting arrays
self._cl_hist = cl_array.zeros(self.queue, self._nrestraints, dtype=np.int32)
self._cl_consistent_complexes = cl_array.zeros(self.queue,
self._nrestraints, dtype=np.float32)
self._cl_viol_hist = cl_array.zeros(self.queue, (self._nrestraints,
self._nrestraints), dtype=np.int32)
self._cl_violations = cl_array.zeros(self.queue, (self._nrestraints,
self._nrestraints), dtype=np.float32)
# Conversions
self._cl_grid_max_clash = np.float32(self._grid_max_clash)
self._cl_grid_min_interaction = np.float32(self._grid_min_interaction)
self._CL_ZERO = np.int32(0)
# Occupancy analysis
self._cl_occ_grid = {}
if self.occupancy_analysis:
for i in xrange(self.interaction_restraints_cutoff, self._nrestraints + 1):
self._cl_occ_grid[i] = cl_array.zeros(self.queue,
self._cl_shape, dtype=np.float32)
# Interaction analysis
if self._interaction_analysis:
shape = (self._lselect.shape[0], self._rselect.shape[0])
self._cl_interaction_hist = cl_array.zeros(self.queue, shape,
dtype=np.int32)
self._cl_interaction_matrix = {}
for i in xrange(self._nrestraints + 1 - self.interaction_restraints_cutoff):
self._cl_interaction_matrix[i] = cl_array.zeros(self.queue, shape,
dtype=np.float32)
# Coordinate arrays
self._cl_rselect = np.zeros((self._rselect.shape[0], 4), dtype=np.float32)
self._cl_rselect[:, :3] = self._rselect
self._cl_rselect = cl_array.to_device(q, self._cl_rselect)
self._cl_lselect = np.zeros((self._lselect.shape[0], 4), dtype=np.float32)
self._cl_lselect[:, :3] = self._lselect
self._cl_lselect = cl_array.to_device(q, self._cl_lselect)
self._cl_rot_lselect = cl_array.zeros_like(self._cl_lselect)
# Update kernel constants
self._kernel_constants['nreceptor_coor'] = self._cl_rselect.shape[0]
self._kernel_constants['nligand_coor'] = self._cl_lselect.shape[0]
self._cl_kernels = Kernels(q.context, self._kernel_constants)
self._cl_rfftn = pyclfft.RFFTn(q.context, self._shape)
self._cl_irfftn = pyclfft.iRFFTn(q.context, self._shape)
# Initial calculations
self._cl_rfftn(q, self._cl_rcore, self._cl_ft_rcore)
self._cl_rfftn(q, self._cl_rsurf, self._cl_ft_rsurf)
self._cl_tot_complex = cl_array.sum(self._cl_interspace, dtype=np.dtype(np.float32))
def _cl_rotate_lcore(self, rotmat):
self._cl_kernels.rotate_grid3d(self.queue, self._cl_lcore, rotmat,
self._cl_rot_lcore)
self.queue.finish()
def _cl_get_interaction_space(self):
k = self._cl_kernels
self._cl_rfftn(self.queue, self._cl_rot_lcore, self._cl_ft_lcore)
k.conj(self._cl_ft_lcore, self._cl_ft_lcore_conj)
k.cmultiply(self._cl_ft_lcore_conj, self._cl_ft_rcore, self._cl_ft_tmp)
self._cl_irfftn(self.queue, self._cl_ft_tmp, self._cl_clashvol)
k.round(self._cl_clashvol, self._cl_clashvol)
k.cmultiply(self._cl_ft_lcore_conj, self._cl_ft_rsurf, self._cl_ft_tmp)
self._cl_irfftn(self.queue, self._cl_ft_tmp, self._cl_intervol)
k.round(self._cl_intervol, self._cl_intervol)
k.less_equal(self._cl_clashvol, self._cl_grid_max_clash, self._cl_not_clashing)
k.greater_equal(self._cl_intervol, self._cl_grid_min_interaction, self._cl_interacting)
k.logical_and(self._cl_not_clashing, self._cl_interacting, self._cl_interspace)
self.queue.finish()
def _cl_get_restraints_center(self, rotmat):
k = self._cl_kernels
k.rotate_points3d(self.queue, self._cl_lrestraints, rotmat,
self._cl_rot_lrestraints)
k.subtract(self._cl_rrestraints, self._cl_rot_lrestraints,
self._cl_restraints_center)
self.queue.finish()
def _cl_get_restraint_space(self):
k = self._cl_kernels
k.set_to_i32(np.int32(0), self._cl_restspace)
k.dilate_point_add(self.queue, self._cl_restraints_center, self._cl_mindis,
self._cl_maxdis, self._cl_restspace)
self.queue.finish()
def _cl_get_reduced_interspace(self):
self._cl_kernels.multiply_int32(self._cl_restspace,
self._cl_interspace, self._cl_red_interspace)
self.queue.finish()
def _cl_count_complexes(self, weight):
# Count all sampled complexes
self._cl_tot_complex += cl_array.sum(self._cl_interspace,
dtype=np.dtype(np.float32)) * weight
self._cl_kernels.set_to_i32(np.int32(0), self._cl_hist)
self._cl_kernels.histogram(self.queue, self._cl_red_interspace, self._cl_hist)
self._cl_kernels.multiply_add(self._cl_hist, weight,
self._cl_consistent_complexes)
self.queue.finish()
def _cl_count_violations(self, weight):
self._cl_kernels.set_to_i32(np.int32(0), self._cl_viol_hist)
self._cl_kernels.count_violations(self.queue,
self._cl_restraints_center, self._cl_mindis2, self._cl_maxdis2,
self._cl_red_interspace, self._cl_viol_hist)
self._cl_kernels.multiply_add(self._cl_viol_hist, weight, self._cl_violations)
self.queue.finish()
def _cl_get_access_interspace(self):
cl_array.maximum(self._cl_red_interspace, self._cl_access_interspace,
self._cl_access_interspace)
self.queue.finish()
def _cl_get_interaction_matrix(self, rotmat, weight):
self._cl_kernels.rotate_points3d(self.queue, self._cl_lselect, rotmat,
self._cl_rot_lselect)
for nconsistent in np.arange(self.interaction_restraints_cutoff,
self._nrestraints + 1, dtype=np.int32):
self._cl_kernels.set_to_i32(np.int32(0), self._cl_interaction_hist)
self._cl_kernels.count_interactions(self.queue, self._cl_rselect,
self._cl_rot_lselect, self._cl_red_interspace, nconsistent,
self._cl_interaction_hist)
self._cl_kernels.multiply_add(self._cl_interaction_hist, weight,
self._cl_interaction_matrix[nconsistent - self.interaction_restraints_cutoff])
self.queue.finish()
def _cl_get_occupancy_grids(self, weight):
# Calculate an average occupancy grid to provide an average shape
# for several number of consistent restraints
k = self._cl_kernels
for i in xrange(self.interaction_restraints_cutoff, self._nrestraints + 1):
# Get a grid for all translations that are consistent with at least
# N restraints
k.greater_equal_iif(self._cl_red_interspace, np.int32(i), self._cl_tmp)
self._cl_rfftn(self.queue, self._cl_tmp, self._cl_ft_tmp)
k.cmultiply(self._cl_ft_tmp, self._cl_ft_lcore, self._cl_ft_tmp)
self._cl_irfftn(self.queue, self._cl_ft_tmp, self._cl_tmp)
k.round(self._cl_tmp, self._cl_tmp)
k.multiply_add2(self._cl_tmp, weight, self._cl_occ_grid[i])
self.queue.finish()
def _gpu_search(self):
time0 = _time()
for n in xrange(self.rotations.shape[0]):
rotmat = self._cl_rotations[n]
weight = np.float32(self.weights[n])
# Rotate the ligand
self._cl_rotate_lcore(rotmat)
# Calculate the clashing and interaction volume for each translation
self._cl_get_interaction_space()
# Rotate the restraints and determine the center point
self._cl_get_restraints_center(rotmat)
self._cl_get_restraint_space()
# Check for each complex how many restraints are consistent
self._cl_get_reduced_interspace()
# Do some analysis such as counting complexes and violations
self._cl_count_complexes(weight)
self._cl_count_violations(weight)
self._cl_get_access_interspace()
# Optional analyses
if self.occupancy_analysis:
self._cl_get_occupancy_grids(weight)
if self._interaction_analysis:
self._cl_get_interaction_matrix(rotmat, weight)
# Print progress
if _stdout.isatty():
self._print_progress(n, self.rotations.shape[0], time0)
self.queue.finish()
# Get the data from GPU
self._accessible_complexes = self._cl_consistent_complexes.get()
self._accessible_complexes = np.asarray([self._cl_tot_complex.get()] +
self._accessible_complexes.tolist(), dtype=np.float64)
self._accessible_complexes[0] -= self._accessible_complexes[1:].sum()
self._violations = self._cl_violations.get().astype(np.float64)
self._cl_access_interspace.get(ary=self._access_interspace)
if self.occupancy_analysis:
for i in xrange(self.interaction_restraints_cutoff, self._nrestraints + 1):
self._occ_grid[i] = self._cl_occ_grid[i].get().astype(np.float64)
if self._interaction_analysis:
for i in xrange(self._nrestraints + 1 - self.interaction_restraints_cutoff):
self._interaction_matrix[i] = self._cl_interaction_matrix[i].get().astype(np.float64)
def grid_restraints(restraints, voxelspacing, origin, lcenter):
nrestraints = len(restraints)
g_restraints = np.zeros((nrestraints, 8), dtype=np.float64)
for n in range(nrestraints):
r_sel, l_sel, mindis, maxdis = restraints[n]
r_pos = (r_sel.center - origin)/voxelspacing
l_pos = (l_sel.center - lcenter)/voxelspacing
g_restraints[n, 0:3] = r_pos
g_restraints[n, 3:6] = l_pos
g_restraints[n, 6] = mindis/voxelspacing
g_restraints[n, 7] = maxdis/voxelspacing
return g_restraints
| {
"repo_name": "haddocking/disvis",
"path": "disvis/disvis.py",
"copies": "1",
"size": "30230",
"license": "apache-2.0",
"hash": 1490764736799924700,
"line_mean": 43.2606149341,
"line_max": 101,
"alpha_frac": 0.6078398941,
"autogenerated": false,
"ratio": 3.5053339517625233,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.959638596266881,
"avg_score": 0.0033575766387428604,
"num_lines": 683
} |
from __future__ import absolute_import, division
from twisted.python.compat import urllib_parse, urlquote
from twisted.internet.protocol import ClientFactory
from twisted.internet.endpoints import connectProtocol
from twisted.web.http import HTTPFactory, HTTPClient, HTTPChannel, Request
from twisted.web.proxy import ProxyClient, ProxyClientFactory
from twisted.internet import reactor
from pyi2ptunnel import util
class _I2PProxyRequest(Request):
protocols = {
b'http' : ProxyClientFactory,
}
ports = {
b'http' : 80,
}
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
self.endpointFactory = channel.createEndpoint
def process(self):
parsed = urllib_parse.urlparse(self.uri)
protocol = parsed[0]
host = parsed[1].decode('ascii')
if protocol in self.ports:
port = self.ports[protocol]
else:
# handle
pass
if ':' in host:
host, port = host.split(':')
port = int(port)
rest = urllib_parse.urlunparse((b'', b'') + parsed[2:])
if not rest:
rest = rest + b'/'
if protocol in self.protocols:
factory = self.protocols[protocol]
headers = self.getAllHeaders().copy()
if b'host' not in headers:
headers[b'host'] = host.encode('ascii')
headers.pop(b'user-agent', None)
headers[b'user-agent'] = b'I2P'
self.content.seek(0, 0)
s = self.content.read()
client = factory(self.method, rest, self.clientproto, headers, s, self)
ep = self.endpointFactory(host, port)
connectProtocol(ep, client.buildProtocol(ep))
class _Proxy(HTTPChannel):
requestFactory = _I2PProxyRequest
def Proxy(createEndpoint, **kwargs):
factory = HTTPFactory()
factory.protocol = _Proxy
factory.protocol.createEndpoint = createEndpoint
return factory
| {
"repo_name": "str4d/i2p-tools",
"path": "pyi2ptunnel/pyi2ptunnel/tunnels/http.py",
"copies": "2",
"size": "2105",
"license": "mit",
"hash": -3816238259941584000,
"line_mean": 30.4179104478,
"line_max": 83,
"alpha_frac": 0.6057007126,
"autogenerated": false,
"ratio": 4.252525252525253,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5858225965125253,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from unittest import TestCase, SkipTest
import plotly.graph_objs as go
class TestGraphObjRepr(TestCase):
def test_trace_repr(self):
N = 100
scatt = go.Scatter(
y=list(range(N)),
marker={"color": "green", "opacity": [e / N for e in range(N)]},
)
expected = """\
Scatter({
'marker': {'color': 'green',
'opacity': [0.0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08,
0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17,
0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26,
0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35,
0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44,
0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53,
0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62,
0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71,
0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8,
0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89,
0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98,
0.99]},
'y': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
92, 93, 94, 95, 96, 97, 98, 99]
})"""
self.assertEqual(scatt.__repr__(), expected)
def test_trace_repr_elided(self):
N = 1000
scatt = go.Scatter(
y=list(range(N)),
marker={"color": "green", "opacity": [e / N for e in range(N)]},
)
expected = """\
Scatter({
'marker': {'color': 'green',
'opacity': [0.0, 0.001, 0.002, ..., 0.997, 0.998, 0.999]},
'y': [0, 1, 2, ..., 997, 998, 999]
})"""
self.assertEqual(scatt.__repr__(), expected)
| {
"repo_name": "plotly/plotly.py",
"path": "packages/python/plotly/plotly/tests/test_core/test_graph_objs/test_repr.py",
"copies": "2",
"size": "2282",
"license": "mit",
"hash": -576654546645670000,
"line_mean": 43.7450980392,
"line_max": 81,
"alpha_frac": 0.421121823,
"autogenerated": false,
"ratio": 2.544035674470457,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39651574974704573,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import bisect
import numpy as np
from scipy import misc, ndimage, sparse
'''
misc houses scientific tools and helpers
'''
class distributions:
uniform = (1 for _ in iter(int,1))
def flux(adj, values):
adj.data = abs(values[adj.col] - values[adj.row])
return adj.mean(axis=1).A1
def distances_to_neighbors(network):
output = [[] for i in range(network.order)]
for (t,h), d in zip(network.pairs, network.lengths):
output[h].append(d)
output[t].append(d)
return output
def filtered_distances(network, _filter, default):
all_distances = distances_to_neighbors(network)
distances = np.array([_filter(row) if row else default for row in all_distances])
return distances.squeeze()
distances_to_nearest_neighbors = lambda network: filtered_distances(network, min, 1E-6)
distances_to_furthest_neighbors = lambda network: filtered_distances(network, max, np.inf)
def match(p1, p2):
return [np.argmin(np.linalg.norm(p-p2, axis=1)) for p in p1]
| {
"repo_name": "RodericDay/MiniPNM",
"path": "minipnm/misc.py",
"copies": "1",
"size": "1044",
"license": "mit",
"hash": 5140158875070541000,
"line_mean": 31.625,
"line_max": 90,
"alpha_frac": 0.7049808429,
"autogenerated": false,
"ratio": 3.3037974683544302,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.450877831125443,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import gc
from copy import copy
from sfepy.base.base import output, get_default, Struct
from sfepy.applications import PDESolverApp, Application
from .coefs_base import MiniAppBase, CoefEval
from .utils import rm_multi
from sfepy.discrete.evaluate import eval_equations
import sfepy.base.multiproc as multi
import numpy as nm
import six
from six.moves import range
def insert_sub_reqs(reqs, levels, req_info):
"""Recursively build all requirements in correct order."""
all_reqs = []
for _, req in enumerate(reqs):
# Coefficients are referenced as 'c.<name>'...
areq = req[2:] if req.startswith('c.') else req
try:
rargs = req_info[areq]
except KeyError:
raise ValueError('requirement "%s" is not defined!' % req)
sub_reqs = rargs.get('requires', [])
if req in levels:
raise ValueError('circular requirement "%s"!' % (req))
if sub_reqs:
levels.append(req)
sreqs = insert_sub_reqs(sub_reqs, levels, req_info)
all_reqs += [ii for ii in sreqs if ii not in all_reqs]
levels.pop()
if req not in all_reqs:
all_reqs.append(req)
return all_reqs
def get_dict_idxval(dict_array, idx):
return {k: v[idx] for k, v in six.iteritems(dict_array)}
class CoefVolume(MiniAppBase):
def __call__(self, volume, problem=None, data=None):
problem = get_default(problem, self.problem)
term_mode = self.term_mode
equations, variables = problem.create_evaluable(self.expression,
term_mode=term_mode)
return eval_equations(equations, variables, term_mode=term_mode)
class HomogenizationWorker(object):
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_states, store_micro_idxs, time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters
----------
problem : problem
The problem definition - microscopic problem.
opts : struct
The options of the homogenization application.
post_process_hook : function
The postprocessing hook.
req_info : dict
The definition of correctors.
coef_info : dict
The definition of homogenized coefficients.
micro_states : array
The configurations of multiple microstructures.
store_micro_idxs : list of int
The indices of microstructures whose results are to be stored.
time_tag : str
The label corresponding to the actual time step and iteration,
used in the corrector file names.
Returns
-------
dependencies : dict
The computed correctors and coefficients.
save_names : list
The names of computed dependencies.
"""
dependencies = {}
save_names = {}
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
for name in sorted_names:
if not name.startswith('c.'):
if micro_states is not None:
req_info[name]['store_idxs'] = (store_micro_idxs, 0)
val = self.calculate_req(problem, options, post_process_hook,
name, req_info, coef_info, save_names,
dependencies, micro_states,
time_tag)
dependencies[name] = val
gc.collect()
return dependencies, save_names
@staticmethod
def get_sorted_dependencies(req_info, coef_info, compute_only):
"Make corrs and coefs list sorted according to the dependencies."
reqcoef_info = copy(coef_info)
reqcoef_info.update(req_info)
compute_names = set(get_default(compute_only, list(coef_info.keys())))
compute_names = ['c.' + key for key in compute_names]
dep_names = []
for coef_name in compute_names:
requires = coef_info[coef_name[2:]].get('requires', [])
deps = insert_sub_reqs(copy(requires), [], reqcoef_info)\
+ [coef_name]
for dep in deps:
if dep not in dep_names:
dep_names.append(dep)
return dep_names
@staticmethod
def calculate(mini_app, problem, dependencies, dep_requires,
save_names, micro_states, chunk_tab, mode, proc_id):
if micro_states is None:
data = {key: dependencies[key] for key in dep_requires
if 'Volume_' not in key}
volume = {key[9:]: dependencies[key]
for key in dep_requires if 'Volume_' in key}
mini_app.requires = [ii for ii in mini_app.requires
if 'c.Volume_' not in ii]
if mode == 'coefs':
val = mini_app(volume, data=data)
else:
if mini_app.save_name is not None:
save_names[mini_app.name] = mini_app.get_save_name_base()
val = mini_app(data=data)
else:
data = {rm_multi(key): dependencies[key]
for key in dep_requires if 'Volume_' not in key}
volume = {rm_multi(key[9:]): dependencies[key]
for key in dep_requires if 'Volume_' in key}
mini_app.requires = [ii for ii in mini_app.requires
if 'c.Volume_' not in ii]
if '|multiprocessing_' in mini_app.name\
and chunk_tab is not None:
chunk_id = int(mini_app.name[-3:])
chunk_tag = '-%d' % (chunk_id + 1)
local_state = \
{k: v[chunk_tab[chunk_id]] if v is not None else None
for k, v in six.iteritems(micro_states)}
else:
chunk_tag = ''
local_state = micro_states
val = []
if hasattr(mini_app, 'store_idxs') and mode == 'reqs':
save_name = mini_app.save_name
local_coors = local_state['coors']
for im in range(len(local_coors)):
output('== micro %s%s-%d =='
% (proc_id, chunk_tag, im + 1))
problem.micro_state = (local_state, im)
problem.set_mesh_coors(local_coors[im], update_fields=True,
clear_all=False, actual=True)
if mode == 'coefs':
val.append(mini_app(get_dict_idxval(volume, im),
data=get_dict_idxval(data, im)))
else:
if hasattr(mini_app, 'store_idxs')\
and im in mini_app.store_idxs[0]:
store_id = '_%04d'\
% (mini_app.store_idxs[1] + im)
if save_name is not None:
mini_app.save_name = save_name + store_id
key = mini_app.name
if key in save_names:
save_names[key].append(
mini_app.get_save_name_base())
else:
save_names[key] =\
[mini_app.get_save_name_base()]
else:
mini_app.save_name = None
val.append(mini_app(data=get_dict_idxval(data, im)))
return val
@staticmethod
def calculate_req(problem, opts, post_process_hook,
name, req_info, coef_info, save_names, dependencies,
micro_states, time_tag='', chunk_tab=None, proc_id='0'):
"""Calculate a requirement, i.e. correctors or coefficients.
Parameters
----------
problem : problem
The problem definition related to the microstructure.
opts : struct
The options of the homogenization application.
post_process_hook : function
The postprocessing hook.
name : str
The name of the requirement.
req_info : dict
The definition of correctors.
coef_info : dict
The definition of homogenized coefficients.
save_names : dict
The dictionary containing names of saved correctors.
dependencies : dict
The dependencies required by the correctors/coefficients.
micro_states : array
The configurations of multiple microstructures.
time_tag : str
The label corresponding to the actual time step and iteration,
used in the corrector file names.
chunk_tab : list
In the case of multiprocessing the requirements are divided into
several chunks that are solved in parallel.
proc_id : int
The id number of the processor (core) which is solving the actual
chunk.
Returns
-------
val : coefficient/corrector or list of coefficients/correctors
The resulting homogenized coefficients or correctors.
"""
# compute coefficient
if name.startswith('c.'):
coef_name = name[2:]
output('computing %s...' % coef_name)
cargs = coef_info[coef_name]
mini_app = MiniAppBase.any_from_conf(coef_name, problem, cargs)
problem.clear_equations()
# Pass only the direct dependencies, not the indirect ones.
dep_requires = cargs.get('requires', [])
val = HomogenizationWorker.calculate(mini_app, problem,
dependencies, dep_requires,
save_names, micro_states,
chunk_tab, 'coefs', proc_id)
output('...done')
# compute corrector(s)
else:
output('computing dependency %s...' % name)
rargs = req_info[name]
mini_app = MiniAppBase.any_from_conf(name, problem, rargs)
mini_app.setup_output(save_formats=opts.save_formats,
post_process_hook=post_process_hook,
file_per_var=opts.file_per_var)
if mini_app.save_name is not None:
mini_app.save_name += time_tag
problem.clear_equations()
# Pass only the direct dependencies, not the indirect ones.
dep_requires = rargs.get('requires', [])
val = HomogenizationWorker.calculate(mini_app, problem,
dependencies, dep_requires,
save_names, micro_states,
chunk_tab, 'reqs', proc_id)
output('...done')
return val
class HomogenizationWorkerMulti(HomogenizationWorker):
def __init__(self, num_workers):
self.num_workers = num_workers
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_states, store_micro_idxs, chunks_per_worker,
time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters
----------
The same parameters as :class:`HomogenizationWorker`, extended by:
chunks_per_worker : int
The number of chunks per one worker.
Returns
-------
The same returns as :class:`HomogenizationWorker`.
"""
multiproc = multi.multiproc_proc
dependencies = multiproc.get_dict('dependecies', clear=True)
save_names = multiproc.get_dict('save_names', clear=True)
numdeps = multiproc.get_dict('numdeps', clear=True)
remaining = multiproc.get_int_value('remaining', 0)
tasks = multiproc.get_queue('tasks')
lock = multiproc.get_lock('lock')
if micro_states is not None:
micro_chunk_tab, req_info, coef_info = \
self.chunk_micro_tasks(self.num_workers,
len(micro_states['coors']),
req_info, coef_info,
chunks_per_worker, store_micro_idxs)
else:
micro_chunk_tab = None
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
remaining.value = len(sorted_names)
# calculate number of dependencies and inverse map
inverse_deps = {}
for name in sorted_names:
if name.startswith('c.'):
reqs = coef_info[name[2:]].get('requires', [])
else:
reqs = req_info[name].get('requires', [])
numdeps[name] = len(reqs)
if len(reqs) > 0:
for req in reqs:
if req in inverse_deps:
inverse_deps[req].append(name)
else:
inverse_deps[req] = [name]
for name in sorted_names:
if numdeps[name] == 0:
tasks.put(name)
workers = []
for ii in range(self.num_workers):
args = (tasks, lock, remaining, numdeps, inverse_deps,
problem, options, post_process_hook, req_info,
coef_info, save_names, dependencies, micro_states,
time_tag, micro_chunk_tab, str(ii + 1))
w = multiproc.Process(target=self.calculate_req_multi,
args=args)
w.start()
workers.append(w)
# block until all workes are terminated
for w in workers:
w.join()
if micro_states is not None:
dependencies = self.dechunk_reqs_coefs(dependencies,
len(micro_chunk_tab))
return dependencies, save_names
@staticmethod
def calculate_req_multi(tasks, lock, remaining, numdeps, inverse_deps,
problem, opts, post_process_hook,
req_info, coef_info, save_names, dependencies,
micro_states, time_tag, chunk_tab, proc_id):
"""Calculate a requirement in parallel.
Parameters
----------
tasks : queue
The queue of requirements to be solved.
lock : lock
The multiprocessing lock used to ensure save access to the global
variables.
remaining : int
The number of remaining requirements.
numdeps : dict
The number of dependencies for the each requirement.
inverse_deps : dict
The inverse dependencies - which requirements depend
on a given one.
For the definition of other parameters see 'calculate_req'.
"""
while remaining.value > 0:
name = tasks.get()
if name is None:
continue
save_names_loc = {}
val = HomogenizationWorker.calculate_req(problem, opts,
post_process_hook, name, req_info, coef_info, save_names_loc,
dependencies, micro_states, time_tag, chunk_tab, proc_id)
lock.acquire()
dependencies[name] = val
remaining.value -= 1
if name in inverse_deps:
for iname in inverse_deps[name]:
numdeps[iname] -= 1 # iname depends on name
if numdeps[iname] == 0: # computed all direct dependecies?
tasks.put(iname) # yes, put iname to queue
save_names.update(save_names_loc)
lock.release()
@staticmethod
def process_reqs_coefs(old, num_workers, store_idxs=[]):
new = {}
for k, v in six.iteritems(old):
if k == 'filenames':
new[k] = v.copy()
continue
for ii in range(num_workers):
lab = '|multiprocessing_%03d' % ii
key = k + lab
new[key] = v.copy()
val = new[key]
if 'requires' in val:
val['requires'] = [jj + lab for jj in val['requires']]
if len(store_idxs) > 0:
if len(store_idxs[ii][0]) > 0:
val['store_idxs'] = store_idxs[ii]
else:
val['save_name'] = None
return new
@staticmethod
def chunk_micro_tasks(num_workers, num_micro, reqs, coefs,
chunks_per_worker=1, store_micro_idxs=[]):
"""
Split multiple microproblems into several chunks
that can be processed in parallel.
Parameters
----------
num_workers : int
The number of available CPUs.
num_micro : int
The number of microstructures.
reqs : dict
The requirement definitions.
coefs : dict
The coefficient definitions.
chunks_per_worker : int
The number of chunks per one worker.
store_micro_idxs : list of int
The indices of microstructures whose results are to be stored.
Returns
-------
micro_tab : list of slices
The indices of microproblems contained in each chunk.
new_reqs : dict
The new requirement definitions.
new_coefs : dict
The new coefficient definitions.
"""
chsize = int(nm.ceil(float(num_micro)
/ (num_workers * chunks_per_worker)))
micro_tab = []
store_idxs = []
for ii in range(0, num_micro, chsize):
jj = chsize + ii
chunk_end = num_micro if jj > num_micro else jj
micro_tab.append(slice(ii, chunk_end))
if len(store_micro_idxs) > 0:
store_idxs.append(([k - ii for k in store_micro_idxs
if k >= ii and k < jj], ii))
nw = len(micro_tab)
self = HomogenizationWorkerMulti
new_reqs = self.process_reqs_coefs(reqs, nw, store_idxs)
new_coefs = self.process_reqs_coefs(coefs, nw)
return micro_tab, new_reqs, new_coefs
@staticmethod
def dechunk_reqs_coefs(deps, num_chunks):
"""
Merge the results related to the multiple microproblems.
Parameters
----------
deps : dict
The calculated dependencies.
num_chunks : int
The number of chunks.
Returns
-------
new_deps : dict
The merged dependencies.
"""
new_deps = {}
for ii in range(num_chunks):
ilab = '_%03d' % ii
for k in deps.keys():
idx = k.rfind('|multiprocessing_')
if idx > 0:
if not(k[-4:] == ilab):
continue
key = k[:idx]
if key in new_deps:
new_deps[key] += deps[k]
else:
new_deps[key] = deps[k]
else:
new_deps[k] = deps[k]
return new_deps
class HomogenizationWorkerMultiMPI(HomogenizationWorkerMulti):
def __call__(self, problem, options, post_process_hook,
req_info, coef_info,
micro_states, store_micro_idxs, chunks_per_worker,
time_tag=''):
"""Calculate homogenized correctors and coefficients.
Parameters and Returns
----------------------
The same parameters and returns as :class:`HomogenizationWorkerMulti`.
"""
multiproc = multi.multiproc_mpi
dependencies = multiproc.get_dict('dependecies', clear=True)
save_names = multiproc.get_dict('save_names', clear=True)
numdeps = multiproc.get_dict('numdeps', mutable=True, clear=True)
remaining = multiproc.get_int_value('remaining', 0)
tasks = multiproc.get_queue('tasks')
if micro_states is not None:
micro_chunk_tab, req_info, coef_info = \
self.chunk_micro_tasks(self.num_workers,
len(micro_states['coors']),
req_info, coef_info,
chunks_per_worker, store_micro_idxs)
else:
micro_chunk_tab = None
sorted_names = self.get_sorted_dependencies(req_info, coef_info,
options.compute_only)
# calculate number of dependencies and inverse map
inverse_deps = {}
loc_numdeps = {}
for name in sorted_names:
if name.startswith('c.'):
reqs = coef_info[name[2:]].get('requires', [])
else:
reqs = req_info[name].get('requires', [])
loc_numdeps[name] = len(reqs)
if len(reqs) > 0:
for req in reqs:
if req in inverse_deps:
inverse_deps[req].append(name)
else:
inverse_deps[req] = [name]
if multiproc.mpi_rank == multiproc.mpi_master: # master node
for k, v in six.iteritems(loc_numdeps):
numdeps[k] = v
remaining.value = len(sorted_names)
for name in sorted_names:
if numdeps[name] == 0:
tasks.put(name)
multiproc.master_loop()
multiproc.master_send_continue()
if micro_states is not None:
dependencies = self.dechunk_reqs_coefs(dependencies,
len(micro_chunk_tab))
multiproc.master_send_task('deps', dependencies)
multiproc.master_send_continue()
return dependencies, save_names
else: # slave node
lock = multiproc.RemoteLock()
multiproc.slave_get_task('engine')
self.calculate_req_multi(tasks, lock, remaining, numdeps,
inverse_deps, problem, options,
post_process_hook, req_info,
coef_info, save_names, dependencies,
micro_states,
time_tag, micro_chunk_tab,
str(multiproc.mpi_rank + 1))
multiproc.slave_task_done('engine')
multiproc.wait_for_tag(multiproc.tags.CONTINUE)
task, deps = multiproc.slave_get_task('get_deps')
multiproc.wait_for_tag(multiproc.tags.CONTINUE)
return deps, None
class HomogenizationEngine(PDESolverApp):
@staticmethod
def process_options(options):
get = options.get
return Struct(coefs=get('coefs', None,
'missing "coefs" in options!'),
requirements=get('requirements', None,
'missing "requirements" in options!'),
compute_only=get('compute_only', None),
multiprocessing=get('multiprocessing', True),
use_mpi=get('use_mpi', False),
store_micro_idxs=get('store_micro_idxs', []),
chunks_per_worker=get('chunks_per_worker', 1),
save_formats=get('save_formats', ['vtk', 'h5']),
coefs_info=get('coefs_info', None))
def __init__(self, problem, options, app_options=None,
volumes=None, output_prefix='he:', **kwargs):
"""Bypasses PDESolverApp.__init__()!"""
Application.__init__(self, problem.conf, options, output_prefix,
**kwargs)
self.problem = problem
self.setup_options(app_options=app_options)
self.setup_output_info(self.problem, self.options)
self.volumes = volumes
self.micro_states = None
def setup_options(self, app_options=None):
PDESolverApp.setup_options(self)
app_options = get_default(app_options, self.conf.options)
po = HomogenizationEngine.process_options
self.app_options += po(app_options)
def set_micro_states(self, states):
self.micro_states = states
@staticmethod
def define_volume_coef(coef_info, volumes):
"""
Define volume coefficients and make all other dependent on them.
Parameters
----------
coef_info : dict
The coefficient definitions.
volumes : dict
The definitions of volumes.
Returns
-------
coef_info : dict
The coefficient definitions extended by the volume coefficients.
"""
vcfkeys = []
cf_vols = {}
for vk, vv in six.iteritems(volumes):
cfkey = 'Volume_%s' % vk
vcfkeys.append('c.' + cfkey)
if 'value' in vv:
cf_vols[cfkey] = {'expression': '%e' % float(vv['value']),
'class': CoefEval}
else:
cf_vols[cfkey] = {'expression': vv['expression'],
'class': CoefVolume}
for cf in six.itervalues(coef_info):
if 'requires' in cf:
cf['requires'] += vcfkeys
else:
cf['requires'] = vcfkeys
coef_info.update(cf_vols)
return coef_info
def call(self, ret_all=False, time_tag=''):
problem = self.problem
opts = self.app_options
# Some coefficients can require other coefficients - resolve their
# order here.
req_info = getattr(self.conf, opts.requirements, {})
coef_info = getattr(self.conf, opts.coefs, {})
coef_info = self.define_volume_coef(coef_info, self.volumes)
is_store_filenames = coef_info.pop('filenames', None) is not None
multiproc_mode = None
if opts.multiprocessing and multi.use_multiprocessing:
multiproc, multiproc_mode = multi.get_multiproc(mpi=opts.use_mpi)
if multiproc_mode == 'mpi':
HomogWorkerMulti = HomogenizationWorkerMultiMPI
elif multiproc_mode == 'proc':
HomogWorkerMulti = HomogenizationWorkerMulti
else:
multiproc_mode = None
if multiproc_mode is not None:
num_workers = multi.get_num_workers()
# if self.micro_states is not None:
# n_micro = len(self.micro_states['coors'])
# if num_workers > n_micro:
# num_workers = n_micro
worker = HomogWorkerMulti(num_workers)
dependencies, save_names = \
worker(problem, opts, self.post_process_hook,
req_info, coef_info, self.micro_states,
self.app_options.store_micro_idxs,
self.app_options.chunks_per_worker, time_tag)
else: # no multiprocessing
worker = HomogenizationWorker()
dependencies, save_names = \
worker(problem, opts, self.post_process_hook,
req_info, coef_info, self.micro_states,
self.app_options.store_micro_idxs, time_tag)
deps = {}
if save_names is None and dependencies is not None: # slave mode
coefs = None
for name in dependencies.keys():
data = dependencies[name]
if not name.startswith('c.'):
deps[name] = data
else:
coefs = Struct()
for name in dependencies.keys():
data = dependencies[name]
if name.startswith('c.'):
coef_name = name[2:]
cstat = coef_info[coef_name].get('status', 'main')
# remove "auxiliary" coefs
if not cstat == 'auxiliary':
setattr(coefs, coef_name, data)
else:
deps[name] = data
# Store filenames of all requirements as a "coefficient".
if is_store_filenames:
for name in save_names.keys():
if '|multiprocessing_' in name:
mname = rm_multi(name)
if mname in save_names:
save_names[mname] += save_names[name]
else:
save_names[mname] = save_names[name]
del(save_names[name])
if multiproc_mode == 'proc':
coefs.save_names = save_names._getvalue()
else:
coefs.save_names = save_names
if opts.coefs_info is not None:
coefs.info = opts.coefs_info
if ret_all:
return coefs, deps
else:
return coefs
| {
"repo_name": "sfepy/sfepy",
"path": "sfepy/homogenization/engine.py",
"copies": "4",
"size": "29613",
"license": "bsd-3-clause",
"hash": -8855984736244281000,
"line_mean": 36.8199233716,
"line_max": 79,
"alpha_frac": 0.5115996353,
"autogenerated": false,
"ratio": 4.523140369634947,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00008359481696583638,
"num_lines": 783
} |
from __future__ import absolute_import, division
import json
import re
import datetime
import dateutil.parser
import dateutil.tz
import six
_urls_to_objects = {}
def timestamp(dt):
"""
Parses a HipChat datetime value.
HipChat uses ISO 8601, optionally with the timezone attached. Except for when they use a timestamp.
"""
#'2013-12-05T22:42:18+00:00' <== History
#'2013-11-27T15:33:24' <== Rooms, Users
if dt is None:
return
if isinstance(dt, int):
rv = datetime.datetime.fromtimestamp(dt, dateutil.tz.tzutc())
elif dt.isdigit():
rv = datetime.datetime.fromtimestamp(int(dt), dateutil.tz.tzutc())
else:
rv = dateutil.parser.parse(dt)
if rv.tzinfo is None:
rv = rv.replace(tzinfo=dateutil.tz.tzutc())
return rv
def mktimestamp(dt):
"""
Prepares a datetime for sending to HipChat.
"""
if dt.tzinfo is None:
dt = dt.replace(tzinfo=dateutil.tz.tzutc())
return dt.isoformat(), dt.tzinfo.tzname(dt)
class Linker(object):
"""
Responsible for on-demand loading of JSON objects.
"""
url = None
def __init__(self, url, parent=None, _requests=None):
self.url = url
self.__parent = parent
self._requests = _requests or __import__('requests')
@staticmethod
def _obj_from_text(text, requests):
"""
Constructs objects (including our wrapper classes) from a JSON-formatted string
"""
def _object_hook(obj):
if 'links' in obj:
klass = RestObject
if 'self' in obj['links']:
for p, c in six.iteritems(_urls_to_objects):
if p.match(obj['links']['self']):
klass = c
break
rv = klass(obj)
rv._requests = requests
return rv
else:
return obj
return json.JSONDecoder(object_hook=_object_hook).decode(text)
def __call__(self, expand=None, **kwargs):
"""
Actually perform the request
"""
params = {}
if expand is not None:
if isinstance(expand, six.string_types):
params = {'expand': expand}
else:
params = {'expand': ','.join(expand)}
if kwargs:
merge_params = {}
for k, v in six.iteritems(kwargs):
merge_params[k.replace('_', '-')] = v
params.update(merge_params)
rv = self._obj_from_text(self._requests.get(self.url, params=params).text, self._requests)
rv._requests = self._requests
if self.__parent is not None:
rv.parent = self.__parent
return rv
def __repr__(self):
return "<%s url=%r>" % (type(self).__name__, self.url)
class RestObject(dict):
"""
Nice wrapper around the JSON objects and their links.
"""
def __getattr__(self, name):
if name in self.get('links', {}):
return Linker(self['links'][name], parent=self, _requests=self._requests)
elif name in self:
return self[name]
else:
raise AttributeError("%r object has no attribute %r" % (type(self).__name__, name))
@property
def url(self):
return self['links']['self']
def save(self):
return self._requests.put(self.url, data=self).json()
def delete(self):
self._requests.delete(self.url)
_at_mention = re.compile('@[\w]+(?: |$)')
class Room(RestObject):
def __init__(self, *p, **kw):
super(Room, self).__init__(*p, **kw)
if 'last_active' in self:
self['last_active'] = timestamp(self['last_active'])
if 'created' in self:
self['created'] = timestamp(self['created'])
def reply(self, message, parent_message_id):
"""
Send a reply to a message
"""
data = {'message': message, 'parentMessageId': parent_message_id}
self._requests.post(self.url+'/reply', data=data)
def message(self, message):
"""
Redirects to the /reply URL with an empty parentMessageId
"""
return self.reply(message, parent_message_id='')
def notification(self, message, color=None, notify=False, format=None):
"""
Send a message to a room.
"""
if not format:
if len(_at_mention.findall(message)) > 0:
format = 'text'
else:
format = 'html'
data = {'message': message, 'notify': notify, 'message_format': format}
if color:
data['color'] = color
self._requests.post(self.url+'/notification', data=data)
def topic(self, text):
"""
Set a room's topic. Useful for displaying statistics, important links, server status, you name it!
"""
self._requests.put(self.url+'/topic', data={
'topic': text,
})
def history(self, date='recent', maxResults=200):
"""
Requests the room history.
Note that if date is 'recent' (the default), HipChat will not return the complete history.
"""
tz = 'UTC'
if date != 'recent':
date, tz = mktimestamp(date)
params = {
'date':date,
'timezone': tz,
'max-results': maxResults,
}
resp = self._requests.get(self.url+'/history', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def latest(self, not_before=None, maxResults=200):
"""
Return the latest room history.
If ``not_before`` is provided, messages that precede the message id will not be returned
"""
params = {
"max-results": maxResults
}
if not_before is not None:
params["not-before"] = not_before
resp = self._requests.get(self.url+'/history/latest', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def invite(self, user, reason):
self._requests.post(self.url+'/invite/%s' % user['id'], data={
'reason': reason,
})
def create_webhook(self, url, event, pattern=None, name=None):
"""
Creates a new webhook.
"""
data={
'url': url,
'event': event,
'pattern': pattern,
'name': name,
}
resp = self._requests.post(self.url+'/webhook', data=data)
return Linker._obj_from_text(resp.text, self._requests)
def save(self):
data = {}
for key in ('name', 'privacy', 'is_archived', 'is_guest_accessible', 'topic'):
data[key] = self[key]
data['owner'] = {'id': self['owner']['id']}
headers = {'content-type': 'application/json'}
return self._requests.put(self.url, data=json.dumps(data), headers=headers)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+$')] = Room
class User(RestObject):
def __init__(self, *p, **kw):
super(User, self).__init__(*p, **kw)
if 'last_active' in self:
self['last_active'] = timestamp(self['last_active'])
if 'created' in self:
self['created'] = timestamp(self['created'])
def message(self, message, message_format='text', notify=False):
"""
Sends a user a private message.
"""
self._requests.post(self.url+'/message', data={
'message': message,
'message_format': message_format,
'notify': notify,
})
def history(self, maxResults=200, notBefore=None):
"""
Requests the users private message history.
::param not_before: the oldest message id to be returned. If not set the history is limited by maxResults only
"""
tz = 'UTC'
params = {
'timezone': tz,
'max-results': maxResults,
}
if notBefore is not None:
params["not-before"] = notBefore
resp = self._requests.get(self.url+'/history/latest', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def save(self):
data = {}
for key, value in six.iteritems(self):
if key == 'presence' and isinstance(value, dict):
p = {}
for k,v in six.iteritems(value):
if k in ('status', 'show'):
p[k] = v
if len(p) != 0:
data[key] = p
else:
data[key] = value
self._requests.put(self.url, data=data)
_urls_to_objects[re.compile(r'https://[^/]+/v2/user/[^/]+$')] = User
class Collection(object):
"""
Mixin for collections
"""
def contents(self, **kwargs):
page = self
ops = {}
if kwargs.get('expand'):
ops['expand'] = 'items'
while hasattr(page, 'next'):
for item in page['items']:
yield item
page = page.next(**ops)
# Last page handling
for item in page['items']:
yield item
class MemberCollection(RestObject, Collection):
def add(self, user):
"""
Adds a member to a private room.
"""
self._requests.put(self.url+'/%s' % user['id'])
def remove(self, user):
"""
Removes a member from a private room.
"""
self._requests.delete(self.url+'/%s' % user['id'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/member$')] = MemberCollection
class UserCollection(RestObject, Collection):
def create(self, name, email, title=None, mention_name=None, is_group_admin=False, timezone='UTC', password=None):
"""
Creates a new user.
"""
data={
'name': name,
'email': email,
'title': title,
'mention_name': mention_name,
'is_group_admin': is_group_admin,
'timezone': timezone, # TODO: Support timezone objects
'password': password,
}
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/user$')] = UserCollection
class RoomCollection(RestObject, Collection):
def create(self, name, owner=Ellipsis, privacy='public', guest_access=True):
"""
Creates a new room.
"""
data={
'name': name,
'privacy': privacy,
'guest_access': guest_access,
}
if owner is not Ellipsis:
if owner is None:
data['owner_user_id'] = owner
else:
data['owner_user_id'] = owner['id']
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room$')] = RoomCollection
class WebhookCollection(RestObject, Collection):
def create(self, url, event, pattern=None, name=None):
"""
Creates a new webhook.
"""
data={
'name': name,
'email': email,
'title': title,
'mention_name': mention_name,
'is_group_admin': is_group_admin,
'timezone': timezone, # TODO: Support timezone objects
'password': password,
}
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/webhook$')] = WebhookCollection
class EmoticonCollection(RestObject, Collection):
pass
_urls_to_objects[re.compile(r'https://[^/]+/v2/emoticon$')] = EmoticonCollection
class Webhook(RestObject):
def __init__(self, *p, **kw):
super(Webhook, self).__init__(*p, **kw)
if 'created' in self:
self['created'] = timestamp(self['created'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/webhook/[^/]+$')] = Webhook
class HistoryCollection(RestObject, Collection):
def __init__(self, *p, **kw):
super(HistoryCollection, self).__init__(*p, **kw)
for item in self['items']:
if 'date' in item:
item['date'] = timestamp(item['date'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/history$')] = HistoryCollection
| {
"repo_name": "dougkeen/HypChat",
"path": "hypchat/restobject.py",
"copies": "1",
"size": "10472",
"license": "mit",
"hash": -4708514733839157000,
"line_mean": 26.2,
"line_max": 115,
"alpha_frac": 0.6460084034,
"autogenerated": false,
"ratio": 2.995423340961098,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8811026555526607,
"avg_score": 0.06608103776689833,
"num_lines": 385
} |
from __future__ import absolute_import, division
import json
import re
import datetime
import dateutil.parser
import dateutil.tz
import six
_urls_to_objects = {}
def timestamp(dt):
"""
Parses a HipChat datetime value.
HipChat uses ISO 8601, optionally with the timezone attached. Except for when they use a timestamp.
"""
# '2013-12-05T22:42:18+00:00' <== History
#'2013-11-27T15:33:24' <== Rooms, Users
if dt is None:
return
if isinstance(dt, int):
rv = datetime.datetime.fromtimestamp(dt, dateutil.tz.tzutc())
elif dt.isdigit():
rv = datetime.datetime.fromtimestamp(int(dt), dateutil.tz.tzutc())
else:
rv = dateutil.parser.parse(dt)
if rv.tzinfo is None:
rv = rv.replace(tzinfo=dateutil.tz.tzutc())
return rv
def mktimestamp(dt):
"""
Prepares a datetime for sending to HipChat.
"""
if dt.tzinfo is None:
dt = dt.replace(tzinfo=dateutil.tz.tzutc())
return dt.isoformat(), dt.tzinfo.tzname(dt)
class Linker(object):
"""
Responsible for on-demand loading of JSON objects.
"""
url = None
def __init__(self, url, parent=None, _requests=None):
self.url = url
self.__parent = parent
self._requests = _requests or __import__('requests')
@staticmethod
def _obj_from_text(text, requests):
"""
Constructs objects (including our wrapper classes) from a JSON-formatted string
"""
def _object_hook(obj):
if 'links' in obj:
klass = RestObject
if 'self' in obj['links']:
for p, c in six.iteritems(_urls_to_objects):
if p.match(obj['links']['self']):
klass = c
break
rv = klass(obj)
rv._requests = requests
return rv
else:
return obj
return json.JSONDecoder(object_hook=_object_hook).decode(text)
def __call__(self, expand=None, **kwargs):
"""
Actually perform the request
"""
params = {}
if expand is not None:
if isinstance(expand, six.string_types):
params = {'expand': expand}
else:
params = {'expand': ','.join(expand)}
if kwargs:
merge_params = {}
for k, v in six.iteritems(kwargs):
merge_params[k.replace('_', '-')] = v
params.update(merge_params)
rv = self._obj_from_text(self._requests.get(self.url, params=params).text, self._requests)
rv._requests = self._requests
if self.__parent is not None:
rv.parent = self.__parent
return rv
def __repr__(self):
return "<%s url=%r>" % (type(self).__name__, self.url)
class RestObject(dict):
"""
Nice wrapper around the JSON objects and their links.
"""
def __getattr__(self, name):
if name in self.get('links', {}):
return Linker(self['links'][name], parent=self, _requests=self._requests)
elif name in self:
return self[name]
else:
raise AttributeError("%r object has no attribute %r" % (type(self).__name__, name))
@property
def url(self):
return self['links']['self']
def save(self):
return self._requests.put(self.url, data=self).json()
def delete(self):
self._requests.delete(self.url)
_at_mention = re.compile('@[\w]+(?: |$)')
class Room(RestObject):
def __init__(self, *p, **kw):
super(Room, self).__init__(*p, **kw)
if 'last_active' in self:
self['last_active'] = timestamp(self['last_active'])
if 'created' in self:
self['created'] = timestamp(self['created'])
def reply(self, message, parent_message_id):
"""
Send a reply to a message
"""
data = {'message': message, 'parentMessageId': parent_message_id}
self._requests.post(self.url + '/reply', data=data)
def message(self, message):
"""
Allows a user to send a message to a room.
"""
data = {'message': message}
self._requests.post(self.url + '/message', data=data)
def notification(self, message, color=None, notify=False, format=None):
"""
Send a message to a room.
"""
if not format:
if len(_at_mention.findall(message)) > 0:
format = 'text'
else:
format = 'html'
data = {'message': message, 'notify': notify, 'message_format': format}
if color:
data['color'] = color
self._requests.post(self.url + '/notification', data=data)
def topic(self, text):
"""
Set a room's topic. Useful for displaying statistics, important links, server status, you name it!
"""
self._requests.put(self.url + '/topic', data={
'topic': text,
})
def history(self, date='recent', maxResults=200):
"""
Requests the room history.
Note that if date is 'recent' (the default), HipChat will not return the complete history.
"""
tz = 'UTC'
if date != 'recent':
date, tz = mktimestamp(date)
params = {
'date': date,
'timezone': tz,
'max-results': maxResults,
}
resp = self._requests.get(self.url + '/history', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def latest(self, not_before=None, maxResults=200):
"""
Return the latest room history.
If ``not_before`` is provided, messages that precede the message id will not be returned
"""
params = {
"max-results": maxResults
}
if not_before is not None:
params["not-before"] = not_before
resp = self._requests.get(self.url + '/history/latest', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def invite(self, user, reason):
self._requests.post(self.url + '/invite/%s' % user['id'], data={
'reason': reason,
})
def create_webhook(self, url, event, pattern=None, name=None):
"""
Creates a new webhook.
"""
data = {
'url': url,
'event': event,
'pattern': pattern,
'name': name,
}
resp = self._requests.post(self.url + '/webhook', data=data)
return Linker._obj_from_text(resp.text, self._requests)
def save(self):
data = {}
for key in ('name', 'privacy', 'is_archived', 'is_guest_accessible', 'topic'):
data[key] = self[key]
data['owner'] = {'id': self['owner']['id']}
headers = {'content-type': 'application/json'}
return self._requests.put(self.url, data=json.dumps(data), headers=headers)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+$')] = Room
class User(RestObject):
def __init__(self, *p, **kw):
super(User, self).__init__(*p, **kw)
if 'last_active' in self:
self['last_active'] = timestamp(self['last_active'])
if 'created' in self:
self['created'] = timestamp(self['created'])
def message(self, message, message_format='text', notify=False):
"""
Sends a user a private message.
"""
self._requests.post(self.url + '/message', data={
'message': message,
'message_format': message_format,
'notify': notify,
})
def history(self, maxResults=200, notBefore=None):
"""
Requests the users private message history.
::param not_before: the oldest message id to be returned. If not set the history is limited by maxResults only
"""
tz = 'UTC'
params = {
'timezone': tz,
'max-results': maxResults,
}
if notBefore is not None:
params["not-before"] = notBefore
resp = self._requests.get(self.url + '/history/latest', params=params)
return Linker._obj_from_text(resp.text, self._requests)
def save(self):
data = {}
for key, value in six.iteritems(self):
if key == 'presence' and isinstance(value, dict):
p = {}
for k, v in six.iteritems(value):
if k in ('status', 'show'):
p[k] = v
if len(p) != 0:
data[key] = p
else:
data[key] = value
self._requests.put(self.url, data=data)
_urls_to_objects[re.compile(r'https://[^/]+/v2/user/[^/]+$')] = User
class Collection(object):
"""
Mixin for collections
"""
def contents(self, **kwargs):
page = self
ops = {}
if kwargs.get('expand'):
ops['expand'] = 'items'
while hasattr(page, 'next'):
for item in page['items']:
yield item
page = page.next(**ops)
# Last page handling
for item in page['items']:
yield item
class MemberCollection(RestObject, Collection):
def add(self, user):
"""
Adds a member to a private room.
"""
self._requests.put(self.url + '/%s' % user['id'])
def remove(self, user):
"""
Removes a member from a private room.
"""
self._requests.delete(self.url + '/%s' % user['id'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/member$')] = MemberCollection
class UserCollection(RestObject, Collection):
def create(self, name, email, title=None, mention_name=None, is_group_admin=False, timezone='UTC', password=None):
"""
Creates a new user.
"""
data = {
'name': name,
'email': email,
'title': title,
'mention_name': mention_name,
'is_group_admin': is_group_admin,
'timezone': timezone, # TODO: Support timezone objects
'password': password,
}
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/user$')] = UserCollection
class RoomCollection(RestObject, Collection):
def create(self, name, owner=Ellipsis, privacy='public', guest_access=True):
"""
Creates a new room.
"""
data = {
'name': name,
'privacy': privacy,
'guest_access': guest_access,
}
if owner is not Ellipsis:
if owner is None:
data['owner_user_id'] = owner
else:
data['owner_user_id'] = owner['id']
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room$')] = RoomCollection
class WebhookCollection(RestObject, Collection):
def create(self, url, event, pattern=None, name=None):
"""
Creates a new webhook.
"""
data = {
'name': name,
'email': email,
'title': title,
'mention_name': mention_name,
'is_group_admin': is_group_admin,
'timezone': timezone, # TODO: Support timezone objects
'password': password,
}
resp = self._requests.post(self.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/webhook$')] = WebhookCollection
class EmoticonCollection(RestObject, Collection):
pass
_urls_to_objects[re.compile(r'https://[^/]+/v2/emoticon$')] = EmoticonCollection
class Webhook(RestObject):
def __init__(self, *p, **kw):
super(Webhook, self).__init__(*p, **kw)
if 'created' in self:
self['created'] = timestamp(self['created'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/webhook/[^/]+$')] = Webhook
class HistoryCollection(RestObject, Collection):
def __init__(self, *p, **kw):
super(HistoryCollection, self).__init__(*p, **kw)
for item in self['items']:
if 'date' in item:
item['date'] = timestamp(item['date'])
_urls_to_objects[re.compile(r'https://[^/]+/v2/room/[^/]+/history$')] = HistoryCollection
| {
"repo_name": "RidersDiscountCom/HypChat",
"path": "hypchat/restobject.py",
"copies": "1",
"size": "12419",
"license": "mit",
"hash": -7492818376375031000,
"line_mean": 29.0702179177,
"line_max": 118,
"alpha_frac": 0.5449714148,
"autogenerated": false,
"ratio": 3.8556348959950326,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49006063107950326,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import json
import time
import warnings
import sys
import os
import datetime
import six
from .requests import Requests, BearerAuth, HttpTooManyRequests
from .restobject import Linker
class RateLimitWarning(Warning):
"""
This token has been rate limited. Waiting for the next reset.
"""
def jsonify(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, set):
return list(obj)
else:
raise TypeError("Can't JSONify objects of type %s" % type(obj).__name__)
class _requests(Requests):
def __init__(self, *p, **kw):
super(_requests, self).__init__(*p, **kw)
self.rl_remaining = 99999
self.rl_reset = 0
self.dump_reqs = '__HYPCHAT_DEBUG_REQUESTS__' in os.environ
@staticmethod
def _data(data, kwargs):
if isinstance(data, six.string_types):
return data
elif data is not None:
kwargs.setdefault('headers',{})['Content-Type'] = 'application/json'
rv = json.dumps(data, default=jsonify)
return rv
def _rl_sleep(self, until):
t = until - time.time()
if t > 0:
warnings.warn("HipChat has been rate limited; Waiting %0.1fs for the next reset." % t, RateLimitWarning)
time.sleep(t)
def request(self, method, url, **kwargs):
if self.dump_reqs:
print >> sys.stderr, "REQUEST", method, url
while True:
try:
if self.rl_remaining <= 0:
# We're out of requests, chill
self._rl_sleep(self.rl_reset)
resp = super(_requests, self).request(method, url, **kwargs)
except HttpTooManyRequests as e:
self.rl_remaining = int(e.response.headers['x-ratelimit-remaining'])
if not self.rl_remaining:
self.rl_reset = float(e.response.headers['x-ratelimit-reset'])
continue # Try the request again
else:
raise
else:
self.rl_remaining = int(resp.headers['x-ratelimit-remaining'])
self.rl_reset = float(resp.headers['x-ratelimit-reset'])
return resp
def post(self, url, data=None, **kwargs):
data = self._data(data, kwargs)
return super(_requests, self).post(url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
data = self._data(data, kwargs)
return super(_requests, self).patch(url, data=data, **kwargs)
def put(self, url, data=None, **kwargs):
data = self._data(data, kwargs)
return super(_requests, self).put(url, data=data, **kwargs)
__all__ = ('HypChat',)
class HypChat(object):
def __init__(self, token, endpoint='https://api.hipchat.com'):
self._requests = _requests(auth=BearerAuth(token))
self.capabilities = Linker('{0}/v2/capabilities'.format(endpoint), _requests=self._requests)
self.emoticons = Linker('{0}/v2/emoticon'.format(endpoint), _requests=self._requests)
self.rooms = Linker('{0}/v2/room'.format(endpoint), _requests=self._requests)
self.users_url = '{0}/v2/user'.format(endpoint)
self.endpoint = endpoint
def users(self, **ops):
"""users([guests=bool], [deleted=bool]) -> UserCollection
Returns a collection of users, with the following keyword options:
* guests: If True, return active guests
* deleted: If True, return deleted users
"""
params = {}
if ops.get('guests', False):
params['include-guests'] = 'true'
if ops.get('deleted', False):
params['include-deleted'] = 'true'
resp = self._requests.get(self.users_url, params=params)
return Linker._obj_from_text(resp.text, self._requests)
def fromurl(self, url, **kwargs):
return Linker(url, _requests=self._requests)(**kwargs)
def create_room(self, name, owner=Ellipsis, privacy='public', guest_access=True):
"""
Creates a new room.
"""
data={
'name': name,
'privacy': privacy,
'guest_access': guest_access,
}
if owner is not Ellipsis:
if owner is None:
data['owner_user_id'] = owner
else:
data['owner_user_id'] = owner['id']
resp = self._requests.post(self.rooms.url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
def create_user(self, name, email, title='', mention_name='', is_group_admin=False, timezone='UTC', password=''):
"""
Creates a new user.
"""
data={
'name': name,
'email': email,
'title': title,
'mention_name': mention_name,
'is_group_admin': is_group_admin,
'timezone': timezone, # TODO: Support timezone objects
'password': password,
}
resp = self._requests.post(self.users_url, data=data)
return Linker._obj_from_text(resp.text, self._requests)
def get_room(self, id_or_name, **kwargs):
return self.fromurl('{0}/v2/room/{1}'.format(self.endpoint, id_or_name), **kwargs)
def get_user(self, id_or_email, **kwargs):
return self.fromurl('{0}/v2/user/{1}'.format(self.endpoint, id_or_email), **kwargs)
def get_emoticon(self, id_or_shortcut, **kwargs):
return self.fromurl('{0}/v2/emoticon/{1}'.format(self.endpoint, id_or_shortcut), **kwargs)
| {
"repo_name": "nprapps/HypChat",
"path": "hypchat/__init__.py",
"copies": "1",
"size": "4808",
"license": "mit",
"hash": 4186828011098622000,
"line_mean": 30.8410596026,
"line_max": 114,
"alpha_frac": 0.6757487521,
"autogenerated": false,
"ratio": 3.006879299562226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.897894349715862,
"avg_score": 0.04073691090072124,
"num_lines": 151
} |
from __future__ import absolute_import, division
import numpy as np
from astropy.io import fits
from scipy.signal import fftconvolve as conv
from os import getcwd, mkdir, extsep
from os.path import join, basename, dirname, isdir
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.widgets import Slider
__author__= "Juhyung Kang"
__email__ = "jhkang@astro.snu.ac.kr"
def runDAVE(data0, output=False, overwrite=False, fwhm=10, adv=1, source=0,
noise=1, winFunc='Gaussian', outSig=False):
"""
Differential Affine Velocity Estimator for all spatial points.
This is the python function of dave_multi.pro IDL code written by J. Chae (2009).
Parameters
----------
data0: `~numpy.ndarray` or fits file.
Three-dimensional input array with shape (nt, ny, nx) or
fits file with same dimension and shape.
output: `str`, optional
The name of the output fits file name.
If False, it makes OFE directory and write the *_dave.fits file.
Default is `False`.
fwhm: `int`, optional
FWHM of the window function (should be even positive integer)
Returns
-------
fits file:
"""
if type(data0) == str:
data = fits.getdata(data0)
dirn = join(dirname(data0), 'ofe')
if not isdir(dirn):
mkdir(dirn)
fname = f'{basename(data0).split(extsep)[0]}_dave.fits'
else:
data = data0
dirn = getcwd()
fname = 'dave.fits'
if data.ndim != 3:
raise ValueError('data must be 3-D array.')
if not output:
output = join(dirn, fname)
dnt, dny, dnx = data.shape
psw = adv
qsw = source
# Construncting window function
winFunc = winFunc.capitalize()
h = int(fwhm/2)
if winFunc == 'Square':
mf = 1
else:
mf = 2
nx = 2*h*mf+1
ny = 2*h*mf+1
x = -(np.arange(nx)-nx//2)
y = -(np.arange(ny)-ny//2)
if winFunc == 'Square':
w = np.ones((ny, nx))
elif winFunc == 'Gaussian':
w = np.exp(-np.log(2)*((x/h)**2+(y[:,None]/h)**2))
elif winFunc == 'Hanning':
w = (1+np.cos(np.pi*x/h/2))*(1+np.cos(np.pi*y/h/2))/4
else:
raise ValueError("winFunc must be one of ('Square', 'Gaussian', "
"'Hanning')")
w /= noise**2
# Construncting coefficent arrays
im = data
imT = (np.roll(im, -1, axis=0) - np.roll(im, 1, axis=0))/2
imY, imX = np.gradient(im, axis=(1, 2))
npar = 6+qsw
A = np.empty((npar, npar, dnt, dny, dnx))
A[0,0] = conv(imX*imX, w[None, :, :],
'same', axes=(1, 2)) # U0, U0
A[1,0] = A[0,1] = conv(imY*imX, w[None, :, :],
'same', axes=(1, 2)) # V0, U0
A[1,1] = conv(imY*imY, w[None, :, :],
'same', axes=(1, 2)) # V0, V0
A[2,0] = A[0,2] = conv(imX*imX, x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imX*im, w[None, :, :],
'same',axes=(1, 2)) # Ux, U0
A[2,1] = A[1,2] = conv(imX*imY, x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imY*im, w[None, :, :],
'same', axes=(1, 2)) # Ux, V0
A[2,2] = conv(imX*imX, x*x*w[None, :, :],
'same', axes=(1, 2)) \
+ 2*psw*conv(imX*im, x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw**2*conv(im*im, w[None, :, :],
'same', axes=(1, 2)) # Ux, Ux
A[3,0] = A[0,3] = conv(imY*imX, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imX*im, w[None, :, :],
'same', axes=(1, 2)) # Vy, U0
A[3,1] = A[1,3] = conv(imY*imY, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imY*im, w[None, :, :],
'same', axes=(1, 2)) # Vy, V0
A[3,2] = A[2,3] = conv(imY*imX, y[None,:,None]*x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imY*im, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) + \
+ psw*conv(imX*im, x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw**2*conv(im*im, w[None, :, :],
'same', axes=(1, 2)) # Vy, Ux
A[3,3] = conv(imY*imY, y[None,:,None]*y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) \
+ 2*psw*conv(imY*im, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) \
+ psw**2*conv(im*im, w[None, :, :],
'same', axes=(1, 2)) # Vy, Vy
A[4,0] = A[0,4] = conv(imX*imX, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) # Uy, U0
A[4,1] = A[1,4] = conv(imX*imY, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) # Uy, V0
A[4,2] = A[2,4] = conv(imX*imX, y[None,:,None]*x*w[None, :, :],
'same', axes=(1, 2)) \
+ psw*conv(imX*im, y[None,:,None]*w[None, :, :],
'same', axes=(1, 2)) # Uy, Ux
A[4,3] = A[3,4] = conv(imX*imY,
y[None,:,None]*y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) \
+ psw*conv(imX*im, y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) # Uy, Vy
A[4,4] = conv(imX*imX, y[None,:,None]*y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) # Uy, Uy
A[5,0] = A[0,5] = conv(imY*imX, x*w[None,:,:],
'same', axes=(1, 2)) # Vx, U0
A[5,1] = A[1,5] = conv(imY*imY, x*w[None,:,:],
'same', axes=(1, 2)) # Vx, V0
A[5,2] = A[2,5] = conv(imY*imX, x*x*w[None,:,:],
'same', axes=(1, 2)) \
+ psw*conv(im*imY, x*w[None,:,:],
'same', axes=(1, 2)) # Vx, Ux
A[5,3] = A[3,5] = conv(imY*imY, x*y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) \
+ psw*conv(im*imY, x*w[None,:,:],
'same', axes=(1, 2)) # Vx, Vy
A[5,4] = A[4,5] = conv(imY*imX, x*y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) # Vx, Uy
A[5,5] = conv(imY*imY, x*x*w[None,:,:],
'same', axes=(1, 2)) #Vx, Vx
if qsw:
A[6,0] = A[0,6] = -qsw*conv(im*imX, w[None,:,:],
'same', axes=(1, 2)) # mu, U0
A[6,1] = A[1,6] = -qsw*conv(im*imY, w[None,:,:],
'same', axes=(1, 2)) # mu, V0
A[6,2] = A[2,6] = -qsw*conv(im*imX, x*w[None,:,:],
'same', axes=(1, 2)) \
- qsw*psw*conv(im*im, w[None,:,:],
'same', axes=(1, 2)) # mu, Ux
A[6,3] = A[3,6] = -qsw*conv(im*imY, y[None,:,None]*w[None,:,:],
'same', axes=(1, 2)) \
- qsw*psw*conv(im*im, w[None,:,:],
'same', axes=(1, 2)) # mu, Vy
A[6,4] = A[4,6] = -qsw*conv(im*imX, y[None,:,None]*w[None,:,:],
'same', axes=(1,2)) # mu, Uy
A[6,5] = A[5,6] = -qsw*conv(im*imY, x*w[None,:,:],
'same', axes=(1, 2)) # mu, Vx
A[6,6] = -qsw**2*conv(im*im, w[None,:,:],
'same', axes=(1,2)) # mu, mu
B = np.empty((npar, dnt, dny, dnx))
B[0] = conv(imT*imX, -w[None,:,:], 'same', axes=(1,2))
B[1] = conv(imT*imY, -w[None,:,:], 'same', axes=(1,2))
B[2] = conv(imT*imX, -x*w[None,:,:], 'same', axes=(1,2)) \
+ psw*conv(imT*im, -w[None,:,:], 'same', axes=(1,2))
B[3] = conv(imT*imY, -y[None,:,None]*w[None,:,:], 'same', axes=(1,2)) \
+ psw*conv(imT*im, -w[None,:,:], 'same', axes=(1,2))
B[4] = conv(imT*imX, -y[None,:,None]*w[None,:,:], 'same', axes=(1,2))
B[5] = conv(imT*imY, -x*w[None,:,:], 'same', axes=(1,2))
if qsw:
B[6] = qsw*conv(imT*(-im), -w[None,:,:], 'same', axes=(1,2))
dave = np.linalg.solve(A.T, B.T).T
if not outSig:
hdu = fits.PrimaryHDU(dave)
hdu.header['type'] = 'DAVE'
hdu.writeto(output, overwrite=overwrite)
# else: #TODO sigma and chisq calculation
return output
class readOFE:
def __init__(self, data0, ofeFile, scale=None, dt=None, gMethod=True):
"""
Read the Optical Flow Estimated File.
Parameters
----------
scale: float
Spatial pixel scale (arcsec).
"""
if type(data0) == str:
data = fits.getdata(data0)
header = fits.getheader(data0)
nx = header['naxis1']
ny = header['naxis2']
cx = header['crval1']
cy = header['crval2']
cxp = header['crpix1']
cyp = header['crpix2']
dx = header['cdelt1']
dy = header['cdelt2']
dt = header['cdelt3']
l = -(cxp+0.5)*dx+cx
b = -(cyp+0.5)*dy+cy
r = (nx-(cxp+0.5))*dx+cx
t = (ny-(cyp+0.5))*dy+cy
scale = dx
else:
data = data0.copy()
l = -0.5
b = -0.5
r = nx-0.5
t = ny-0.5
dx = 1
dy = 1
self.extent = [l, r, b, t]
self.data = data
self.nt, self.ny, self.nx = self.data.shape
self._xarr = np.linspace(self.extent[0]+dx*0.5,
self.extent[1]-dx*0.5,
self.nx)
self._yarr = np.linspace(self.extent[2]+dy*0.5,
self.extent[3]-dy*0.5,
self.ny)
if self.data.ndim != 3:
raise ValueError("data must have 3 dimension")
if not scale or not dt:
raise KeyError("If data is an `~numpy.ndarray`, "
"'scale' and 'dt' must be given.")
unit = scale*725/dt # km/s
self.ofe = fits.getdata(ofeFile)
self.oheader = fits.getheader(ofeFile)
self.otype = self.oheader['type']
self.U0 = self.ofe[0]*unit
self.V0 = self.ofe[1]*unit
self.Ux = self.ofe[2]*unit
self.Vy = self.ofe[3]*unit
self.Uy = self.ofe[4]*unit
self.Vx = self.ofe[5]*unit
self.C = np.arctan2(self.V0, self.U0)
if gMethod:
Uy, Ux = np.gradient(self.U0, axis=(1,2))
Vy, Vx = np.gradient(self.V0, axis=(1,2))
else:
Ux = self.Ux
Uy = self.Uy
Vx = self.Vx
Vy = self.Vy
self.div = Ux + Vy
self.curl = Vx - Uy
def imshow(self, t=1, div=True, curl=True, **kwargs):
"""
Display an data with velocity vector field.
Parameters
----------
t: int
Default is 1.
div: bool
If True, display divergence map.
curl: bool
If True, display curl map.
"""
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
self.t = t
self._onDiv = div
self._onCurl = curl
kwargs['extent'] = kwargs.pop('extent', self.extent)
kwargs['origin'] = kwargs.pop('origin', 'lower')
width = kwargs.pop('width', 0.004)
scale = kwargs.pop('scale', 200)
if div or curl:
nw = div + curl + 1
else:
nw = 1
self.nw = nw
self.fig = plt.figure(self.otype, figsize=(6*nw,6), clear=True)
gs = GridSpec(11, nw, wspace=0, hspace=0)
self.axVec = self.fig.add_subplot(gs[:10, 0])
self.axSlider = self.fig.add_subplot(gs[10, :])
self.im = self.axVec.imshow(self.data[self.t], **kwargs)
self.vec = self.axVec.quiver(self._xarr, self._yarr,
self.U0[self.t],
self.V0[self.t],
self.C[self.t],
cmap=plt.cm.hsv,
width=width,
scale=scale)
self.axVec.set_title(r'$\mathbf{v}$')
self.axVec.set_xlabel('X')
self.axVec.set_ylabel('Y')
self.axVec.set_title(r'$\mathbf{v}$ field ' f'({self.otype})')
if div:
self.axDiv = self.fig.add_subplot(gs[:100, 1], sharex=self.axVec,
sharey=self.axVec)
self.imDiv = self.axDiv.imshow(self.div[self.t],
plt.cm.Seismic,
**kwargs)
self.axDiv.tick_params(labelbottom=False, labelleft=False)
self.axDiv.set_title(r'$\mathbf{\nabla} \cdot$'
r'$\mathbf{v}$')
if curl:
self.axCurl = self.fig.add_subplot(gs[:10, -1], sharex=self.axVec,
sharey=self.axVec)
self.imCurl = self.axCurl.imshow(self.curl[self.t],
plt.cm.PiYG,
**kwargs)
self.axCurl.tick_params(labelbottom=False, labelleft=False)
self.axCurl.set_title(r'$\mathbf{\nabla} \times \mathbf{V}$')
self.sT = Slider(self.axSlider, 'Time(pix)', 0, self.nt-1,
valinit=self.t, valstep=1, valfmt="%i")
self.sT.on_changed(self._chTime)
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._onKey)
def _chTime(self, val):
self.t = int(self.sT.val)
self.im.set_data(self.data[self.t])
self.vec.set_UVC(self.U0[self.t], self.V0[self.t], self.C[self.t])
if self._onDiv:
self.imDiv.set_data(self.div[self.t])
if self._onCurl:
self.imCurl.set_data(self.curl[self.t])
def _onKey(self, event):
if event.key == 'left':
if self.t > 0:
self.t -= 1
else:
self.t = self.nt-1
self.sT.set_val(self.t)
elif event.key == 'right':
if self.t < self.nt-1:
self.t += 1
else:
self.t = 0
self.sT.set_val(self.t)
# def runNAVE():
| {
"repo_name": "SNU-sunday/fisspy",
"path": "fisspy/analysis/ofe.py",
"copies": "1",
"size": "15190",
"license": "bsd-2-clause",
"hash": -1803362446908679200,
"line_mean": 38.8687664042,
"line_max": 85,
"alpha_frac": 0.4192231731,
"autogenerated": false,
"ratio": 3.1216605014385532,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8996049523807366,
"avg_score": 0.008966830146237408,
"num_lines": 381
} |
from __future__ import absolute_import, division
import numpy as np
from astropy.io import fits
from scipy.signal import savgol_filter
from scipy.signal import fftconvolve as conv
from fisspy import cm
import matplotlib.pyplot as plt
from astropy.constants import c
from fisspy.analysis.doppler import lambdameter
from fisspy.image import interactive_image as IAI
from fisspy.read.readbase import getRaster, getHeader, readFrame
from fisspy.analysis.filter import FourierFilter
from astropy.time import Time
import astropy.units as u
from matplotlib import gridspec
from fisspy.analysis.wavelet import Wavelet
from matplotlib import ticker
from fisspy.analysis.tdmap import TDmap
#from mpl_toolkits.axes_grid1 import make_axes_locatable
__author__= "Juhyung Kang"
__email__ = "jhkang@astro.snu.ac.kr"
__all__ = ["rawData", "FISS", "FD"]
class rawData:
"""
Read a raw file of the FISS.
Parameters
----------
file : `str`
File name of the raw fts data file of the FISS.
Examples
--------
>>> from fisspy.read import rawData
>>> f = 'D:/fisspy_examples/raw_A.fts'
>>> raw = rawData(f)
>>> raw.imshow()
"""
def __init__(self, file):
if file.find('A.fts') != -1 or file.find('B.fts') != -1:
self.ftype = 'raw'
scale = 0.16
self.filename = file
self.xDelt = scale
self.yDelt = scale
self.header = fits.getheader(file)
self.data = fits.getdata(file)
self.data = self.data.transpose([1, 0, 2])
self.ndim = self.header['naxis']
self.cam = file.split('.fts')[0][-1]
if self.cam == 'A':
self.wvDelt = 0.019
elif self.cam == 'B':
self.wvDelt = -0.026
self.nwv = self.header['naxis1']
self.ny = self.header['naxis2']
self.nx = self.header['naxis3']
self.date = self.header['date']
self.band = self.header['wavelen'][:4]
#simple wavelength calibration
self.wave = (np.arange(self.nwv)-self.nwv//2)*self.wvDelt
self.centralWavelength = 0.
self.extentRaster = [0, self.nx*self.xDelt,
0, self.ny*self.yDelt]
self.extentSpectro = [self.wave.min()-self.wvDelt/2,
self.wave.max()+self.wvDelt/2,
0, self.ny*self.yDelt]
if self.band == '6562' or self.band =='8542':
self.set = '1'
elif self.band == '5889' or self.band == '5434':
self.set = '2'
self.cmap = plt.cm.gray
def getRaster(self, wv, hw=0.05):
"""
Make a raster image for a given wavelength with in width 2*hw
Parameters
----------
wv : float
Referenced wavelength.
hw : float
A half-width of wavelength to be integrated
Default is 0.05
Example
-------
>>> raster = raw.getRaster(0.5)
"""
self.wv = wv
return getRaster(self.data, self.wave, wv, self.wvDelt, hw=hw)
def imshow(self, x=None, y=None, wv=None, scale='minMax',
sigFactor=3, helpBox=True, **kwargs):
"""
Draw the interactive image for single band FISS raw data.
Parameters
----------
x : `float`
X position that you draw a spectral profile.
Default is image center.
y : `float`
Y position that you draw a spectral profile.
Default is image center.
wv : `float`
Wavelength positin that you draw a raster images.
Default is central wavelength.
scale : `string`
Scale method of colarbar limit.
Default is minMax.
option: 'minMax', 'std', 'log'
sigFactor : `float`
Factor of standard deviation.
This is worked if scale is set to be 'std'
helpBox : `bool`
Show the interacitve key and simple explanation.
Default is True
Other Parameters
----------------
**kwargs : `~matplotlib.pyplot` properties
"""
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
if not x:
x = self.nx//2*self.xDelt
if not y:
y = self.ny//2*self.yDelt
if not wv:
wv = self.centralWavelength
self.x = x
self.y = y
self.wv = wv
self.imInterp = kwargs.get('interpolation', 'bilinear')
kwargs['interpolation'] = self.imInterp
self.iIm = IAI.singleBand(self, x, y, wv,
scale=scale, sigFactor=sigFactor,
helpBox=helpBox, **kwargs) # Basic resource to make interactive image is `~fisspy.image.tdmap.TDmap`
plt.show()
def chRasterClim(self, cmin, cmax):
self.iIm.chRasterClim(cmin, cmax)
def chSpectroClim(self, cmin, cmax):
self.iIm.chSpectroClim(cmin, cmax)
def chcmap(self, cmap):
self.iIm.chcmap(cmap)
def chRaster(self, wv):
self.iIm.wv = wv
self.iIm._chRaster()
def chSpect(self, x, y):
self.iIm.x = x
self.iIm.y = y
self.iIm._chSpect()
class FISS:
"""
Read a FISS data file (proc or comp).
Parameters
----------
file : `str`
File name of the FISS fts data.
x1 : `int`, optional
A left limit index of the frame along the scan direction
x2 : `int`, optional
A right limit index of the frame along the scan direction
If None, read all data from x1 to the end of the scan direction.
y1 : `int`, optional
A left limit index of the frame along the scan direction
y2 : `int`, optional
A right limit index of the frame along the scan direction
If None, read all data from x1 to the end of the scan direction.
noceff : `int`, optional
he number of coefficients to be used for
the construction of frame in a pca file.
noiseSuprresion : `bool`, optional
If True Savitzky-Golay noise filter is applied in the wavelength axis.
Default is False.
simpleWvCalib : `bool`, optional
If True wavelength is simply calibrated by using the header parameters.
Default is True.
absScale : `bool`, optional
If False the central wavelength is set to be zero.
If True the central wavelength is set to be wavelength at lab frame.
It works if simpleWvCalibration is True.
Default is True
Other Parameters
----------------
**kwargs : `~scipy.signal.svagol_filter` properties
See also
--------
`~scipy.signal.savgol_filter`
Examples
--------
>>> from fisspy import read
>>> import fisspy.data.sample
>>> fiss = read.FISS(fisspy.data.sample.FISS_IMAGE)
"""
def __init__(self, file, x1=0, x2=None, y1=0, y2=None, ncoeff=False, noiseSuppression=False,
simpleWaveCalib=True, absScale=True, **kwargs):
if file.find('1.fts') != -1:
self.ftype = 'proc'
elif file.find('c.fts') != -1:
self.ftype = 'comp'
if self.ftype != 'proc' and self.ftype != 'comp':
raise ValueError("Input file is neither proc nor comp data")
self.x1 = x1
self.x2 = x2
self.y1 = y1
self.y2 = y2
self.filename = file
self.xDelt = 0.16
self.yDelt = 0.16
self.header = getHeader(file)
self.pfile = self.header.pop('pfile', False)
self.data = readFrame(file, self.pfile, x1=x1, x2=x2, y1=y1, y2=y2, ncoeff=ncoeff)
self.ndim = self.header['naxis']
self.ny, self.nx, self.nwv = self.data.shape
self.wvDelt = self.header['cdelt1']
self.date = self.header['date']
self.band = self.header['wavelen'][:4]
self.refProfile = self.data.mean((0,1))
self.wave = self._waveCalibration(simpleWaveCalib= simpleWaveCalib,
absScale= absScale, **kwargs)
self.noiseSuppression = noiseSuppression
if noiseSuppression:
self._noiseSuppression()
if self.band == '6562':
self.cam = 'A'
self.set = '1'
self.cmap = cm.ha
elif self.band == '8542':
self.cam = 'B'
self.set = '1'
self.cmap = cm.ca
elif self.band == '5889':
self.cam = 'A'
self.set = '2'
self.cmap = cm.na
elif self.band == '5434':
self.cam = 'B'
self.set = '2'
self.cmap = cm.fe
self.extentRaster = [0, self.nx*self.xDelt,
0, self.ny*self.yDelt]
self.extentSpectro = [self.wave.min()-self.wvDelt/2,
self.wave.max()+self.wvDelt/2,
0, self.ny*self.yDelt]
def reload(self, x1=0, x2=None, y1=0, y2=None, ncoeff=False, noiseSuppression=False):
"""
Reload the FISS data.
Parameters
----------
x1 : `int`, optional
A left limit index of the frame along the scan direction
x2 : `int`, optional
A right limit index of the frame along the scan direction
If None, read all data from x1 to the end of the scan direction.
y1 : `int`, optional
A left limit index of the frame along the scan direction
y2 : `int`, optional
A right limit index of the frame along the scan direction
If None, read all data from x1 to the end of the scan direction.
noceff : `int`, optional
he number of coefficients to be used for
the construction of frame in a pca file.
noiseSuprresion : `bool`, optional
If True Savitzky-Golay noise filter is applied in the wavelength axis.
Default is False.
"""
self.data = readFrame(self.filename, self.pfile, x1=x1, x2=x2, y1=y1, y2=y2, ncoeff=ncoeff)
self.ny, self.nx, self.nwv = self.data.shape
self.x1 = x1
self.x2 = x2
self.y1 = y1
self.y2 = y2
self.extentRaster = [0, self.nx*self.xDelt,
0, self.ny*self.yDelt]
self.extentSpectro = [self.wave.min()-self.wvDelt/2,
self.wave.max()+self.wvDelt/2,
0, self.ny*self.yDelt]
if noiseSuppression:
self._noiseSuppression()
def getRaster(self, wv, hw=0.05):
"""
Make a raster image for a given wavelength with in width 2*hw
Parameters
----------
wv : float
Referenced wavelength.
hw : float
A half-width of wavelength to be integrated
Default is 0.05
Example
-------
>>> from fisspy.read import FISS
>>> fiss = FISS(file)
>>> raster = fiss.getRaster(0.5)
"""
self.wv = wv
return getRaster(self.data, self.wave, wv, self.wvDelt, hw=hw)
def _waveCalibration(self, simpleWaveCalib= True, absScale= True,
**kwargs):
"""
Wavelength calibration
If SimpleWvCalib is True, the wavelength is calibrated by using information in header.
If absScale is True, the central wavelength is set to be wavelength in the lab frame,
but if absScale is False, the central wavelength is set to be zero.
"""
method = kwargs.pop('method', True)
if simpleWaveCalib:
self.lamb0 = self.header['crval1']
if absScale:
self.centralWavelength = self.header['crval1']
return (np.arange(self.nwv) -
self.header['crpix1']) * self.header['cdelt1'] + self.header['crval1']
else:
self.centralWavelength = 0
return (np.arange(self.nwv) -
self.header['crpix1']) * self.header['cdelt1']
else:
if method:
if self.band == '6562':
line=np.array([6561.097,6564.206])
lamb0=6562.817
dldw=0.019182
elif self.band == '8542':
line=np.array([8540.817,8546.222])
lamb0=8542.090
dldw=-0.026252
elif self.band == '5889':
line=np.array([5889.951,5892.898])
lamb0=5889.9509
dldw=0.016847
elif self.band == '5434':
line=np.array([5434.524,5436.596])
lamb0=5434.5235
dldw=-0.016847
else:
if self.band == '6562':
line=np.array([6562.817,6559.580])
lamb0=6562.817
dldw=0.019182
elif self.band == '8542':
line=np.array([8542.089,8537.930])
lamb0=8542.090
dldw=-0.026252
w = np.arange(self.nwv)
wl = np.zeros(2)
wc = self.refProfile[20:self.nwv-20].argmin() + 20
lamb = (w - wc) * dldw + lamb0
for i in range(2):
mask = np.abs(lamb - line[i]) <= 0.3
wtmp = w[mask]
ptmp = conv(self.refProfile[mask], [-1, 2, -1], 'same')
mask2 = ptmp[1:-1].argmin() + 1
try:
wtmp = wtmp[mask2-3:mask2+4]
ptmp = ptmp[mask2-3:mask2+4]
except:
raise ValueError('Fail to wavelength calibration\n'
'please change the method %s to %s' %(repr(method), repr(not method)))
c = np.polyfit(wtmp - np.median(wtmp), ptmp, 2)
wl[i] = np.median(wtmp) - c[1]/(2*c[0])
dldw = (line[1] - line[0])/(wl[1] - wl[0])
wc = wl[0] - (line[0] - lamb0)/dldw
return (w - wc) * dldw
def _noiseSuppression(self, **kwargs):
window_length = kwargs.pop('window_length', 7)
polyorder = kwargs.pop('polyorder', 2)
deriv = kwargs.pop('deriv', 0)
delta = kwargs.pop('delta', 1.0)
mode = kwargs.pop('mode', 'interp')
cval = kwargs.pop('cval', 0.0)
self.data = savgol_filter(self.data, window_length, polyorder,
deriv= deriv, delta= delta, cval= cval,
mode= mode)
self.noiseSuppression = True
def lambdaMeter(self, hw= 0.03, sp= 5e3, wvRange= False,
wvinput= True, shift2velocity= True):
"""
Calculate the doppler shift by using lambda-meter (bisector) method.
Parameters
----------
shift2velocity: `bool`
Convert doppler shift value with the velocity (unit: km s^-1)
wvinput : bool
There are two cases.
* Case wvinput==True
hw : float
A half width of the horizontal line segment.
Returns
-------
wc : nd ndarray
n dimensional array of central wavelength values.
intc : nd ndarray
n dimensional array of intensies of the line segment.\\
* Case wvinput==False
sp : float
An intensity of the horiznotal segment.
Returns
-------
wc : nd ndarray
n dimensional array of central wavelength values.
hwc : nd ndarray
n dimensional array of half widths of the line segment.
"""
lineShift, intensity = lambdameter(self.wave, self.data,
ref_spectrum= self.refProfile,
wvRange= wvRange, hw= hw,
wvinput= wvinput)
if shift2velocity:
LOSvelocity = (lineShift-self.centralWavelength) * c.to('km/s').value/self.lamb0
return LOSvelocity, intensity
else:
return lineShift, intensity
def imshow(self, x=None, y=None, wv=None, scale='minMax',
sigFactor=3, helpBox=True, **kwargs):
"""
Draw interactive FISS raster, spectrogram and profile for single band.
Parameters
----------
x : `float`
X position that you draw a spectral profile.
Default is image center.
y : `float`
Y position that you draw a spectral profile.
Default is image center.
wv : `float`
Wavelength positin that you draw a raster images.
Default is central wavelength.
scale : `string`
Scale method of colarbar limit.
Default is minMax.
option: 'minMax', 'std', 'log'
sigFactor : `float`
Factor of standard deviation.
This is worked if scale is set to be 'std'
helpBox : `bool`
Show the interacitve key and simple explanation.
Default is True
Other Parameters
----------------
**kwargs : `~matplotlib.pyplot` properties
"""
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
if not x:
x = self.nx//2*self.xDelt
if not y:
y = self.ny//2*self.yDelt
if not wv:
wv = self.centralWavelength
self.x = x
self.y = y
self.wv = wv
self.imInterp = kwargs.get('interpolation', 'bilinear')
self.cmap = kwargs.pop('cmap', self.cmap)
kwargs['interpolation'] = self.imInterp
self.iIm = IAI.singleBand(self, x, y, wv,
scale=scale, sigFactor=sigFactor,
helpBox=helpBox, **kwargs) # Basic resource to make interactive image is `~fisspy.image.tdmap.TDmap`
def chRasterClim(self, cmin, cmax):
self.iIm.chRasterClim(cmin, cmax)
def chSpectroClim(self, cmin, cmax):
self.iIm.chSpectroClim(cmin, cmax)
def chcmap(self, cmap):
self.iIm.chcmap(cmap)
def chRaster(self, wv):
self.iIm.wv = wv
self.iIm._chRaster()
def chSpect(self, x, y):
self.iIm.x = x
self.iIm.y = y
self.iIm._chSpect()
class FD:
"""
Read the FISS Data (FD) file.
Parameters
----------
fdFile: `str`
File name of the FISS Data file.
maskFile: `str`
File name of the mask file.
timeFile: `str`
File name of the time file.
maskValue: `float`
Value of the mask pixel.
spatialAvg: `bool`
Subtract the spatially averaged value to all pixels.
timeAvg: `bool`
Subtract the temporal averaged value to all pixels.
"""
def __init__(self, fdFile, maskFile, timeFile, maskValue=-1,
spatialAvg=False, timeAvg=False):
self.maskValue = maskValue
self._spAvg = spatialAvg
self._timeAvg = timeAvg
self.ftype = 'FD'
self.data = fits.getdata(fdFile).astype(float)
self.fdFile = fdFile
self.header = fits.getheader(fdFile)
self.time = fits.getdata(timeFile)
self.reftpix = np.abs(self.time-0).argmin()
self.xDelt = self.yDelt = 0.16
self.min0 = np.min(self.data, axis=(1,2))
self.max0 = np.max(self.data, axis=(1,2))
unit = fits.getheader(timeFile)['unit']
if unit == 'min':
self.time *= 60
self.mask = fits.getdata(maskFile).astype(bool)
self.dt = np.median(self.time-np.roll(self.time, 1))
self.nt, self.ny, self.nx, self.nid = self.data.shape
reftime = self.header['reftime']
self.reftime = _isoRefTime(reftime)
self.Time = self.reftime + self.time * u.second
self.timei = self.time-self.time[0]
self.header['sttime'] = self.Time[0].value
wid = self.header['ID1'][:2]
if wid == 'HI':
self.cmap = [cm.ha]*self.nid
elif wid == 'Ca':
self.cmap = [cm.ca]*self.nid
elif wid == 'Na':
self.cmap = [cm.na]*self.nid
elif wid == 'Fe':
self.cmap = [cm.fe]*self.nid
try:
xpos = self.header['xpos']
ypos = self.header['ypos']
except:
xpos = self.header.get('crval1', 0)
ypos = self.header.get('crval2', 0)
self.xpos = xpos
self.ypos = ypos
xm = xpos - self.nx/2*self.xDelt
xM = xpos + self.nx/2*self.xDelt
ym = ypos - self.ny/2*self.yDelt
yM = ypos + self.ny/2*self.yDelt
self.extent = [xm, xM, ym, yM]
self._xar = np.linspace(xm+self.xDelt/2,
xM-self.xDelt/2, self.nx)
self._yar = np.linspace(ym+self.yDelt/2,
yM-self.yDelt/2, self.ny)
if maskValue != -1:
self._mask(maskValue)
if spatialAvg:
self.spatialAverage()
if timeAvg:
self.timeAverage()
self.min = self.min0[self.reftpix]
self.max = self.max0[self.reftpix]
self.idh = self.header['ID*']
for i in range(self.nid):
if self.idh[i][-1] == 'V':
self.cmap[i] = plt.cm.RdBu_r
tmp = np.abs(self.max[i]-self.min[i])/2*0.7
if tmp > 15:
tmp = 0.8
self.min[i] = -tmp
self.max[i] = tmp
def _mask(self, val):
self.data[np.invert(self.mask),:] = val
def spatialAverage(self):
for i in range(self.nt):
med = np.median(self.data[i,self.mask[i]], 0)
self.data[i] -= med
self.min0[i] -= med
self.max0[i] -= med
def timeAverage(self):
med = np.median(self.data, 0)
self.data -= med
self.min0 -= np.median(med, (0,1))
self.max0 -= np.median(med, (0,1))
def originalData(self, maskValue=-1, spatialAvg=False, timeAvg=False):
self.data = fits.getdata(self.fdFile).astype(float)
self.min0 = np.min(self.data, axis=(1,2))
self.max0 = np.max(self.data, axis=(1,2))
if maskValue != -1:
self.maskValue = maskValue
self._mask(maskValue)
if spatialAvg:
self.spatialAverage()
if timeAvg:
self.timeAverage()
self.min = self.min0[self.reftpix]
self.max = self.max0[self.reftpix]
for i in range(self.nid):
if self.idh[i][-1] == 'V':
self.cmap[i] = plt.cm.RdBu_r
tmp = np.abs(self.max[i]-self.min[i])/2*0.7
if tmp > 15:
tmp = 0.8
self.min[i] = -tmp
self.max[i] = tmp
def bandpassFilter(self, filterRange):
for n, i in enumerate(filterRange):
filterRange[n] = i*1e-3
self.data = FourierFilter(self.data, self.nt, self.dt, filterRange)
if self.maskValue != -1:
self._mask(self.maskValue)
self.min0 = np.min(self.data, axis=(1,2))
self.max0 = np.max(self.data, axis=(1,2))
self.min = self.min0[self.reftpix]
self.max = self.max0[self.reftpix]
for i in range(self.nid):
if self.idh[i][-1] == 'V':
self.cmap[i] = plt.cm.RdBu_r
tmp = np.abs(self.max[i]-self.min[i])/2*0.7
if tmp > 15:
tmp = 0.8
self.min[i] = -tmp
self.max[i] = tmp
def imshow(self, x=0, y=0, t=0, cid=0,
levels=None, maxPeriod=32, helpBox=True, **kwargs):
self.kwargs = kwargs
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
# transpose to pixel position.
xpix, ypix, tpix = self._pixelPosition(x, y, t)
self._x0 = self.x
self._y0 = self.y
self._t0 = self.t
self._xh = self.x
self._yh = self.y
self._th = self.t
self.cid = cid
self._cidh = cid
self.maxPeriod = maxPeriod
#Keyboard helpBox
if helpBox:
helpFig = plt.figure('Key Help Box', figsize=[3.5, 3])
ax = helpFig.add_subplot(111)
ax.set_position([0,0,1,1])
ax.set_axis_off()
ax.text(0.05, 0.91, 'ctrl+h: Reset to original setting')
ax.text(0.05, 0.81, 'ctrl+num: Draw the plot ID = num')
ax.text(0.05, 0.71, 'ctrl+right: Move to right')
ax.text(0.05, 0.61, 'ctrl+left: Move to left')
ax.text(0.05, 0.51, 'ctrl+up: Move to up')
ax.text(0.05, 0.41, 'ctrl+down: Move to down')
ax.text(0.05, 0.31, 'right: Next time data')
ax.text(0.05, 0.21, 'right: Previous time data')
ax.text(0.05, 0.11, 'spacebar: change to current mouse point')
ax.text(0.05, 0.01, 'ctrl+b: back to the previous image')
# Figure setting
figsize = kwargs.pop('figsize', [10, 8])
self.fig = plt.figure(figsize=figsize)
self.fig.canvas.set_window_title('FISS Data')
self.imInterp = kwargs.get('interpolation', 'bilinear')
gs = gridspec.GridSpec(5,5)
self.axRaster = self.fig.add_subplot(gs[0:3, :2]) # Raster
self.axRaster.set_xlabel('X (arcsec)')
self.axRaster.set_ylabel('Y (arcsec)')
self.axRaster.set_title(self.idh[0])
self.axTS = self.fig.add_subplot(gs[1:3, 2:]) # TimeSeries
self.axTS.set_xlabel('Time (sec)')
self.axTS.set_ylabel('Intensity (count)')
self.axTS.set_xlim(self.timei[0], self.timei[-1])
self.axTS.minorticks_on()
self.axTS.tick_params(which='both', direction='in')
self.axTS.set_title('Time series')
self.axWavelet = self.fig.add_subplot(gs[3:, 2:])
self.axWavelet.set_title('Wavelet Power Spectrum')
self.axWavelet.set_xlabel('Time (sec)')
self.axWavelet.set_ylabel('Period (minute)')
self.axWavelet.set_xlim(self.timei[0], self.timei[-1])
self.axWavelet.set_yscale('symlog', basey=2)
self.axWavelet.yaxis.set_major_formatter(ticker.ScalarFormatter())
self.axWavelet.ticklabel_format(axis='y',style='plain')
self.axWavelet.set_ylim(self.maxPeriod, 0.5)
self.axPower = self.fig.add_subplot(gs[3:, :2])
self.axPower.set_title('Power Spectrum')
self.axPower.set_ylabel('Period (minute)')
self.axPower.set_ylim(self.maxPeriod, 0.5)
self.axPower.set_yscale('symlog', basey=2)
self.axPower.yaxis.set_major_formatter(ticker.ScalarFormatter())
self.axPower.ticklabel_format(axis='x',style='sci', scilimits=(0,1))
self.axPower.minorticks_on()
self.axPower.tick_params(which='both', direction='in')
# Plot
data = self.data[:, ypix, xpix, self.cid]
self.imRaster = self.axRaster.imshow(self.data[tpix,:,:,cid],
self.cmap[cid],
origin='lower',
extent=self.extent,
clim=[self.min[cid],
self.max[cid]],
interpolation=self.imInterp)
self.timeseries = self.axTS.plot(self.timei,
data,
color='k')[0]
#wavelet
if not levels:
levels = [0.1, 0.25, 0.4,
0.55, 0.7, 1]
self.levels = levels
self._plotWavelet(xpix, ypix)
# divider = make_axes_locatable(self.axWavelet)
# cax = divider.append_axes('right', size='5%', pad=0.1)
# plt.colorbar(self.contourIm, cax=cax)
#gws
self.powerGWS = self.axPower.plot(self.gws, self.period, color='k',
label='GWS')[0]
#lws
self.lws = self.wavelet.power[:, tpix]
self.powerLWS = self.axPower.plot(self.lws, self.period,
color='r', label='LWS')[0]
self.axPower.legend()
# marker
self.point = self.axRaster.scatter(self.x, self.y, 50,
marker='x',
color='r')
self.vlineTS = self.axTS.axvline(self.t,
ls='dashed',
color='b')
self.vlineWavelet = self.axWavelet.axvline(self.t,
ls='dashed',
color='k')
peakPGWS = self.period[self.gws.argmax()]
peakPLWS = self.period[self.lws.argmax()]
self.hlineGWS = self.axPower.axhline(peakPGWS,
ls='dotted',
color='k')
self.hlineLWS = self.axPower.axhline(peakPLWS,
ls='dotted',
color='r')
#infoBox
self.axInfo = self.fig.add_subplot(gs[0, 2:])
self.axInfo.set_axis_off()
self.isotInfo = self.axInfo.text(0.05, 0.8,
'%s'%self.Time[self.tpix].value,
fontsize=12)
self.tInfo = self.axInfo.text(0.05, 0.55,
't=%i sec (tpix=%i)'%(self.t, self.tpix),
fontsize=12)
self.posiInfo = self.axInfo.text(0.05, 0.3,
"X=%.1f'', Y=%.1f'' (xpix=%i, ypix=%i)"%(self.x,
self.y,
xpix,
ypix),
fontsize=12)
self.peakPeriodGWS = self.axInfo.text(0.05, -0.1,
r'P$_{peak, GWS}$=%.2f min'%peakPGWS,
fontsize=12)
self.peakPeriodLWS = self.axInfo.text(0.05, -0.35,
r'P$_{peak, LWS}$=%.2f min'%peakPLWS,
fontsize=12)
#Axis limit
self.axTS.set_ylim(data.min(), data.max())
self.axPower.set_xlim(0, self.lpmax)
self.axWavelet.set_aspect(adjustable='box', aspect='auto')
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._on_key)
plt.show()
def _on_key(self, event):
if event.key == 'ctrl+right':
if self.x < self._xar[-1]:
self.x += self.xDelt
else:
self.x = self._xar[0]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == 'ctrl+left':
if self.x > self._xar[0]:
self.x -= self.xDelt
else:
self.x = self._xar[-1]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == 'ctrl+up':
if self.y < self._yar[-1]:
self.y += self.yDelt
else:
self.y = self._yar[0]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == 'ctrl+down':
if self.y > self._yar[0]:
self.y -= self.yDelt
else:
self.y = self._yar[-1]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == 'right':
if self.tpix < self.nt-1:
self.tpix += 1
else:
self.tpix = 0
self.t = self.timei[self.tpix]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == 'left':
if self.tpix > 0:
self.tpix -= 1
else:
self.tpix = self.nt-1
self.t = self.timei[self.tpix]
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
elif event.key == ' ' and event.inaxes == self.axRaster:
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
self.x = event.xdata
self.y = event.ydata
elif event.key == ' ' and (event.inaxes == self.axTS or
event.inaxes == self.axWavelet):
self.t = event.xdata
self._xb = self._x0
self._yb = self._y0
self._tb = self._t0
self.tpix = np.abs(self.timei-self.t).argmin()
self.t = self.timei[self.tpix]
elif event.key == 'ctrl+b':
x = self.x
y = self.y
t = self.t
self.x = self._xb
self.y = self._yb
self.t = self._tb
self._xb = x
self._yb = y
self._tb = t
self.tpix = np.abs(self.timei-self.t).argmin()
elif event.key == 'ctrl+h':
self.x = self._xh
self.y = self._yh
self.t = self._th
self.tpix = np.abs(self.timei-self.t).argmin()
self.cid = self._cidh
self._changeID()
self.axRaster.set_title(self.idh[self.cid])
self.imRaster.set_cmap(self.cmap[self.cid])
for iid in range(self.nid):
if event.key == 'ctrl+%i'%iid:
self.cid = iid
self._changeID()
self.axRaster.set_title(self.idh[iid])
self.imRaster.set_cmap(self.cmap[self.cid])
if self.idh[iid][-1] == 'V':
self.axTS.set_ylabel('Velocity (km/s)')
else:
self.axTS.set_ylabel('Intensity (Count)')
if self.x != self._x0 or self.y != self._y0:
xpix, ypix, tpix = self._pixelPosition(self.x, self.y,
self.t)
self._changeWavelet(xpix, ypix)
self._changePlot(xpix, ypix)
self._x0 = self.x
self._y0 = self.y
self.posiInfo.set_text(
"X=%.1f'', Y=%.1f'' (xpix=%i, ypix=%i)"%(self.x,
self.y,
xpix,
ypix))
if self.t != self._t0:
self._changeRaster()
self.lws = self.wavelet.power[:, self.tpix]
self.powerLWS.set_xdata(self.lws)
self.vlineTS.set_xdata(self.t)
self.vlineWavelet.set_xdata(self.t)
peakPLWS = self.period[self.lws.argmax()]
self.hlineLWS.set_ydata(peakPLWS)
self._t0 = self.t
self.isotInfo.set_text('%s'%self.Time[self.tpix].value)
self.tInfo.set_text('t=%i sec (tpix=%i)'%(self.t, self.tpix))
self.peakPeriodLWS.set_text(
r'P$_{peak, LWS}$=%.2f min'%peakPLWS)
self.fig.canvas.draw_idle()
def _changeID(self):
xpix, ypix, tpix = self._pixelPosition(self.x, self.y,
self.t)
self._changeWavelet(xpix, ypix)
self._changePlot(xpix, ypix)
self._changeRaster()
self.imRaster.set_clim(self.min[self.cid],
self.max[self.cid])
def _changePlot(self, xpix, ypix):
data = self.data[:, ypix, xpix, self.cid]
self.timeseries.set_ydata(data)
self.axTS.set_ylim(data.min(), data.max())
self.powerGWS.set_xdata(self.gws)
self.lws = self.wavelet.power[:, self.tpix]
self.powerLWS.set_xdata(self.lws)
self.point.set_offsets([self.x, self.y])
peakPGWS = self.period[self.gws.argmax()]
peakPLWS = self.period[self.lws.argmax()]
self.hlineGWS.set_ydata(peakPGWS)
self.hlineLWS.set_ydata(peakPLWS)
self.peakPeriodGWS.set_text(
r'P$_{peak, GWS}$=%.2f min'%peakPGWS)
self.peakPeriodLWS.set_text(
r'$P_{peak, LWS}$=%.2f min'%peakPLWS)
self.axPower.set_xlim(0, self.lpmax)
def _changeRaster(self):
self.imRaster.set_data(self.data[self.tpix, :, :, self.cid])
def _pixelPosition(self, x, y, t):
tpix = np.abs(self.timei-t).argmin()
xpix = np.abs(self._xar-x).argmin()
ypix = np.abs(self._yar-y).argmin()
self.x = self._xar[xpix]
self.y = self._yar[ypix]
self.t = self.timei[tpix]
self.tpix = tpix
return xpix, ypix, tpix
def _changeWavelet(self, xpix, ypix):
self.axWavelet.cla()
self._plotWavelet(xpix, ypix)
def _plotWavelet(self, xpix, ypix):
self.wavelet = Wavelet(self.data[:, ypix, xpix, self.cid],
self.dt, **self.kwargs)
self.lpmax = self.wavelet.power.max()
self.period = self.wavelet.period/60
self.gws = self.wavelet.gws
wpower = self.wavelet.power/self.wavelet.power.max()
self.contour = self.axWavelet.contourf(self.timei, self.period,
wpower, len(self.levels),
colors=['w'])
self.contourIm = self.axWavelet.contourf(self.contour,
levels=self.levels
)
self.axWavelet.fill_between(self.timei, self.wavelet.coi/60,
self.period.max(), color='grey',
alpha=0.4, hatch='x')
self.axWavelet.set_title('Wavelet Power Spectrum')
self.axWavelet.set_xlabel('Time (sec)')
self.axWavelet.set_ylabel('Period (minute)')
self.axWavelet.set_xlim(self.timei[0], self.timei[-1])
self.axWavelet.set_yscale('symlog', basey=2)
self.axWavelet.yaxis.set_major_formatter(ticker.ScalarFormatter())
self.axWavelet.ticklabel_format(axis='y',style='plain')
self.vlineWavelet = self.axWavelet.axvline(self.t,
ls='dashed',
color='k')
self.axWavelet.set_ylim(self.maxPeriod, 0.5)
def chLevels(self, levels):
"""
"""
self.levels = levels
xpix, ypix, tpix = self._pixelPosition(self.x, self.y, self.t)
self._changeWavelet(xpix, ypix)
def chInterp(self, interp):
"""
"""
self.imInterp = interp
self.imRaster.set_interpolation(interp)
def chBPFilter(self, filterRange):
"""
"""
self.originalData(maskValue=self.maskValue, spatialAvg=self._spAvg,
timeAvg=self._timeAvg)
self.bandpassFilter(filterRange)
def chRasterClim(self, cmin, cmax):
"""
"""
self.imRaster.set_clim(cmin, cmax)
def chPosition(self, x, y):
"""
"""
self.x = x
self.y = y
self._x0 = x
self._y0 =y
xpix, ypix, tpix = self._pixelPosition(x, y, self.t)
self._changeWavelet(xpix, ypix)
self._changePlot(xpix, ypix)
self.posiInfo.set_text(
"X=%.1f'', Y=%.1f'' (xpix=%i, ypix=%i)"%(self.x,
self.y,
xpix,
ypix))
def chtime(self, t):
"""
"""
self.t = t
self._t0 = t
self._changeRaster()
self.lws = self.wavelet.power[:, self.tpix]
self.LWS.set_xdata(self.lws)
self.vlineTS.set_xdata(self.t)
self.vlineWavelet.set_xdata(self.t)
peakPLWS = self.period[self.lws.argmax()]
self.hlineLWS.set_ydata(peakPLWS)
self._t0 = self.t
self.isotInfo.set_text('%s'%self.Time[self.tpix].value)
self.tInfo.set_text('t=%i sec (tpix=%i)'%(self.t, self.tpix))
self.peakPeriodLWS.set_text(
r'P$_{peak, LWS}$=%.2f min'%peakPLWS)
def TD(self, ID=0, filterRange=None):
hdu = fits.PrimaryHDU(self.data[:,:,:,ID])
h= hdu.header
h['cdelt1'] = self.xDelt
h['cdelt2'] = self.yDelt
h['cdelt3'] = self.dt
h['crval1'] = self.xpos
h['crval2'] = self.ypos
h['sttime'] = self.Time[0].value
return TDmap(self.data[:,:,:,ID], h, self.time,
filterRange=filterRange, cmap=self.cmap[ID])
def set_clim(self, cmin, cmax):
self.imRaster.set_clim(cmin, cmax)
class calibData:
"""
Read the calibration file such as 'BiasDark', 'Flat', 'FLAT' and 'SLIT'.
Parameters
----------
file : str
"""
def __init__(self, file):
if file.find('BiasDark') != -1:
self.ftype = 'BiasDark'
elif file.find('Flat') != -1:
self.ftype = 'Flat'
elif file.find('FLAT') != -1:
self.ftype = 'FLAT'
elif file.find('SLIT') != -1:
self.ftype = 'SLIT'
self.data = fits.getdata(file)
self.header = fits.getheader(file)
self.nx = self.header['naxis1']
self.ny = self.header['naxis2']
if self.ftype == 'Flat':
self.nf = self.header['naxis3']
if file.find('_A') != -1:
self.cam = 'A'
elif file.find('_B') != -1:
self.cam = 'B'
def imshow(self):
"""
"""
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
self.fig, self.ax = plt.subplots(figsize=[10, 6])
if self.ftype != 'Flat':
self.image = self.ax.imshow(self.data, origin='lower',
cmap = plt.cm.gray)
self.fig.tight_layout()
else:
self.num = 0
self.num0 = self.num
self.image = self.ax.imshow(self.data[self.num], origin='lower',
cmap = plt.cm.gray)
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._onKey)
def _onKey(self, event):
if event.key == 'right':
if self.num < self.nf-1:
self.num += 1
else:
self.num = 0
elif event.key == 'left':
if self.num > 0:
self.num -= 1
else:
self.num = self.nf-1
if self.num != self.num0:
self.image.set_data(self.data[self.num])
self.num0 = self.num
self.fig.canvas.draw_idle()
def _isoRefTime(refTime):
year = refTime[:4]
month = refTime[4:6]
day = refTime[6:8]
hour = refTime[9:11]
minute = refTime[11:13]
sec = refTime[13:15]
isot = '%s-%s-%sT%s:%s:%s'%(year, month, day, hour, minute, sec)
return Time(isot)
| {
"repo_name": "SNU-sunday/fisspy",
"path": "fisspy/read/read_factory.py",
"copies": "1",
"size": "43860",
"license": "bsd-2-clause",
"hash": -5149991508080585000,
"line_mean": 35.368159204,
"line_max": 135,
"alpha_frac": 0.5004331965,
"autogenerated": false,
"ratio": 3.545961678389522,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9531682262547567,
"avg_score": 0.002942522468390998,
"num_lines": 1206
} |
from __future__ import absolute_import, division
import numpy as np
from keras import backend as K
#from keras.utils.generic_utils import get_from_module
from six.moves import zip
import six
from keras.utils.generic_utils import deserialize_keras_object
def identity_vstacked(shape, scale=1, name=None, dim_ordering='th'):
scale = shape[1]/float(shape[0])
a = np.identity(shape[1])
for i in range(1, int(1/scale)):
a = np.concatenate((a, np.identity(shape[1])),axis=0)
return K.variable(a, name=name)
def column_vstacked(shape, scale=1, name=None, dim_ordering='th'):
scale = shape[1]/float(shape[0])
b = np.zeros((1,shape[1]))
b[0,0] = 1
a = np.copy(b)
for i in range(1, int(1/scale)):
a = np.concatenate((a, b),axis=0)
for j in range(1, shape[1]):
b = np.zeros((1,shape[1]))
b[0,j] = 1
for i in range(0, int(1/scale)):
a = np.concatenate((a, b),axis=0)
return K.variable(a, name=name)
def column_vstacked_nullnode(shape, scale=1, name=None, dim_ordering='th'):
scale = (shape[1]-1)/float(shape[0])
b = np.zeros((1,shape[1]))
b[0,0] = 1
a = np.copy(b)
for i in range(1, int(1/scale)):
a = np.concatenate((a, b),axis=0)
for j in range(1, shape[1]-1):
b = np.zeros((1,shape[1]))
b[0,j] = 1
for i in range(0, int(1/scale)):
a = np.concatenate((a, b),axis=0)
return K.variable(a, name=name)
def identity_dstacked(shape, scale=1, name=None, dim_ordering='th'):
scale = shape[1]/float(shape[0])
a = np.identity(shape[1])
for i in range(1, int(1/scale)):
a = np.concatenate((a, np.identity(shape[1])),axis=0)
b = np.expand_dims(np.diag(a[:,0]), axis=2)
for i in range(1, shape[1]):
c = np.expand_dims(np.diag(a[:,i]), axis=2)
b = np.concatenate((b, c),axis=2)
return K.variable(b, name=name)
'''def get(identifier, **kwargs):
return get_from_module(identifier, globals(),
'initialization', kwargs=kwargs)
'''
def deserialize(config, custom_objects=None):
return deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name='initializer')
def get(identifier):
if isinstance(identifier, dict):
return deserialize(identifier)
elif isinstance(identifier, six.string_types):
config = {'class_name': str(identifier), 'config': {}}
return deserialize(config)
elif callable(identifier):
return identifier
else:
raise ValueError('Could not interpret initializer identifier:', identifier) | {
"repo_name": "yhalk/vw_challenge_ECR",
"path": "src/jetson/acol/initializations.py",
"copies": "1",
"size": "2664",
"license": "apache-2.0",
"hash": 9188425772889853000,
"line_mean": 33.1666666667,
"line_max": 83,
"alpha_frac": 0.6141141141,
"autogenerated": false,
"ratio": 3.1904191616766466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9259790817394354,
"avg_score": 0.008948491676458368,
"num_lines": 78
} |
from __future__ import absolute_import, division
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_allclose
from pytest import raises
from fatiando.seismic import conv
def test_impulse_response():
"""
conv.convolutional_model raises the source wavelet as result when the model
is a centred spike, considering the dimension of the model equal to the
source wavelet
"""
w = conv.rickerwave(30., 2.e-3)
rc_test = np.zeros((w.shape[0], 20))
rc_test[w.shape[0]//2, :] = 1.
spike = conv.convolutional_model(rc_test, 30., conv.rickerwave, dt=2.e-3)
for j in range(0, rc_test.shape[1]):
assert_array_almost_equal(spike[:, j], w, 9)
def test_rc_shorter_than_wavelet():
"""
When the reflectivity series is shorter than the wavelength, the spike
response is observed like in the opposite case. The difference is that the
the ricker wavelet (or other symmetric wavelet) is shorter in the result.
"""
w = conv.rickerwave(30., 2.e-3)
rc_test = np.zeros((21, 20))
rc_test[rc_test.shape[0]//2, :] = 1
spike = conv.convolutional_model(rc_test, 30., conv.rickerwave, dt=2.e-3)
for j in range(0, rc_test.shape[1]):
wmin = (w.shape[0] - rc_test.shape[0])//2
wmax = -(w.shape[0] - rc_test.shape[0])//2
assert_array_almost_equal(spike[:, j], w[wmin:wmax], 9)
def test_reflectivity_wrong_dimensions():
"""
Velocity and density are provided as matrix or vector to reflectivity
calculation, so they must have the same dimension.
"""
vel = np.ones((10, 10))
dens = np.ones((11, 11))
raises(AssertionError, conv.reflectivity, vel, dens)
vel = np.ones((10))
dens = np.ones((11))
raises(AssertionError, conv.reflectivity, vel, dens)
def test_depth_2_time_wrong_dimensions():
"""
Velocity and property are provided as matrix to depth to time cconversion,
so they must have the same dimension.
"""
vel = np.ones((10, 10))
dens = np.ones((11, 11))
dt = 2.e-3
dz = 1.
raises(AssertionError, conv.depth_2_time, vel, dens, dt, dz)
def test_ricker():
"""
conv.rickerwave inputs must satisfy the condition for sampling and
stability, otherwise this implies in a error.
"""
f = 50.
dt = 2.e-3
raises(AssertionError, conv.rickerwave, f, dt)
| {
"repo_name": "rafaelmds/fatiando",
"path": "fatiando/seismic/tests/test_seismic_conv.py",
"copies": "6",
"size": "2358",
"license": "bsd-3-clause",
"hash": 6466810286666184000,
"line_mean": 32.2112676056,
"line_max": 79,
"alpha_frac": 0.6518235793,
"autogenerated": false,
"ratio": 3.1693548387096775,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 71
} |
from __future__ import absolute_import, division
import numpy as np
from scipy.ndimage.morphology import binary_erosion
from scipy.signal import convolve
from .common import iterative_threshold, Baseline
def dietrich_baseline(bands, intensities, half_window=16, num_erosions=10):
'''
Fast and precise automatic baseline correction of ... NMR spectra, 1991.
http://www.sciencedirect.com/science/article/pii/002223649190402F
http://www.inmr.net/articles/AutomaticBaseline.html
'''
Y = intensities.copy()
half_window = np.clip(half_window, 1, Y.shape[-1]//2)
# Step 1: moving-window smoothing
window_len = 2*half_window + 1
window = np.full(window_len, 1./window_len)
if Y.ndim == 2:
window = window[None]
Y[...,half_window:-half_window] = convolve(Y, window, mode='valid')
# Step 2: Derivative.
dY = np.diff(Y)**2
# Step 3: Iterative thresholding.
is_baseline = np.ones(Y.shape, dtype=bool)
is_baseline[...,1:] = iterative_threshold(dY)
# Step 3: Binary erosion, to get rid of peak-tops.
mask = np.zeros_like(is_baseline)
mask[...,half_window:-half_window] = True
s = np.ones(3, dtype=bool)
if Y.ndim == 2:
s = s[None]
is_baseline = binary_erosion(is_baseline, structure=s,
iterations=num_erosions, mask=mask)
# Step 4: Reconstruct baseline via interpolation.
if Y.ndim == 2:
return np.row_stack([np.interp(bands, bands[m], y[m])
for y, m in zip(intensities, is_baseline)])
return np.interp(bands, bands[is_baseline], intensities[is_baseline])
class Dietrich(Baseline):
def __init__(self, half_window=16, num_erosions=10):
self.half_window_ = half_window
self.num_erosions_ = num_erosions
def _fit_many(self, bands, intensities):
return dietrich_baseline(bands, intensities,
half_window=self.half_window_,
num_erosions=self.num_erosions_)
def param_ranges(self):
return {
'half_window_': (1, 100, 'integer'),
'num_erosions_': (1, 20, 'integer')
}
| {
"repo_name": "all-umass/superman",
"path": "superman/baseline/dietrich.py",
"copies": "1",
"size": "2077",
"license": "mit",
"hash": -1980948186499790600,
"line_mean": 32.5,
"line_max": 75,
"alpha_frac": 0.6499759268,
"autogenerated": false,
"ratio": 3.1613394216133943,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4311315348413394,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from fisspy.analysis.filter import FourierFilter
from interpolation.splines import LinearSpline
from matplotlib.animation import FuncAnimation
import astropy.units as u
from astropy.time import Time
__author__= "Juhyung Kang"
__email__ = "jhkang@astro.snu.ac.kr"
class TDmap:
"""
Make Time-Distance map for given slit position
Parameters
----------
data : `~numpy.ndarray`
3-dimensional data array (time, y, x).
header : '~astropy.io.fits.header.Header
Header of data.
tarr : `~numpy.ndarray`, optional
Array of time (unit: second).
filterRange : `list`, optional
List of range of Fourier bandpass filters
Returns
-------
td : `~fisspy.analysis.tdmap.TDmap`
A new time distance class object.
Examples
--------
"""
def __init__(self, data, header, tarr=None, filterRange=None, cmap=None):
self.data = data
self.header = header
self.nx = self.header['naxis1']
self.ny = self.header['naxis2']
self.nt = self.header['naxis3']
self.dx = self.header['cdelt1']
self.dy = self.header['cdelt2']
self.dt = self.header['cdelt3']
self.rx = self.header['crval1']
self.ry = self.header['crval2']
self.cmap = cmap
if not np.any(tarr):
tarr = np.arange(0, self.nt*self.dt, self.dt)
self._tarr = tarr
self.Time = Time(self.header['sttime']) + tarr*u.second
self.extent = [self.rx-self.nx/2*self.dx,
self.rx+self.nx/2*self.dx,
self.ry-self.ny/2*self.dy,
self.ry+self.ny/2*self.dy]
self._xarr = np.linspace(self.extent[0]+self.dx*0.5,
self.extent[1]-self.dx*0.5,
self.nx)
self._yarr = np.linspace(self.extent[2]+self.dy*0.5,
self.extent[3]-self.dy*0.5,
self.ny)
self.smin = [self._tarr[0],
self.extent[2]+0.5*self.dy,
self.extent[0]+0.5*self.dx]
self.smax = [self._tarr[-1],
self.extent[3]-0.5*self.dy,
self.extent[1]-0.5*self.dx]
self.order = [self.nt, self.ny, self.nx]
self._tname = ['ori']
if not filterRange:
self.nfilter = 1
self.fdata = np.empty([1, self.nt, self.ny, self.nx])
else:
self.nfilter = len(filterRange)+1
self.fdata = np.empty([self.nfilter, self.nt, self.ny, self.nx])
for n, fR in enumerate(filterRange):
self._tname += ['%.1f - %.1f mHZ'%(fR[0], fR[1])]
self.fdata[n+1] = FourierFilter(self.data, self.nt,
self.dt*1e-3, fR)
self.fdata[0] = self.data
self.interp = []
for data in self.fdata:
self.interp += [LinearSpline(self.smin, self.smax,
self.order, data)]
def get_TD(self, R, xc, yc, angle):
self.R = R
self.xc = xc
self.yc = yc
self.angle = angle
ang = np.deg2rad(self.angle)
nl = int(np.ceil(2*R/self.dx))
self.x1 = -R*np.cos(ang) + xc
self.x2 = R*np.cos(ang) + xc
self.y1 = -R*np.sin(ang) + yc
self.y2 = R*np.sin(ang) + yc
x = np.linspace(self.x1, self.x2, nl)
y = np.linspace(self.y1, self.y2, nl)
oiarr = np.empty([nl, self.nt, 3])
oiarr[:,:,0] = self._tarr
oiarr[:,:,1] = y[:,None]
oiarr[:,:,2] = x[:,None]
iarr = oiarr.reshape([nl*self.nt, 3])
td = self.interp[self.filterNum-1](iarr)
return td.reshape([nl, self.nt])
def imshow(self, R=5, xc=None, yc=None, angle=0, t=0,
filterNum=1, fps=10, cmap=plt.cm.gray,
interpolation='bilinear'):
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
if not xc:
xc = self.rx
if not yc:
yc = self.ry
self.R = self._R0 = R
self.angle = self._angle0 = angle
self.xc = self._xc0 = xc
self.yc = self._yc0 = yc
self.filterNum = self._filterNum0 = filterNum
self.t = self._t0 = t
self.fps = fps
self.pause = 'ini'
self.pos = []
self.mark = []
self.hlines = []
tpix = np.abs(self._tarr-self.t).argmin()
self.td = self.get_TD(R,xc,yc,angle)
self.tdextent = [self._tarr[0]-0.5*self.dt,
self._tarr[-1]+0.5*self.dt,
-self.R,
self.R]
if not self.cmap:
self.cmap = cmap
self.fig= plt.figure(figsize=[14,9])
self.fig.canvas.set_window_title('%s ~ %s'%(self.Time[0], self.Time[-1]))
gs = gridspec.GridSpec(5, self.nfilter)
self.axTD = self.fig.add_subplot(gs[3:, :])
self.axTD.set_xlabel('Time (sec)')
self.axTD.set_ylabel('Distance (arcsec)')
self.axTD.set_title('%i: %s, '
'Time: %s, '
'tpix: %i'%(filterNum, self._tname[filterNum-1],
self.Time[tpix].value,
tpix))
self.imTD = self.axTD.imshow(self.td,
extent=self.tdextent,
origin='lower',
cmap=self.cmap,
interpolation=interpolation)
self.axRaster = []
self.im = []
for i in range(self.nfilter):
if i == 0:
self.axRaster += [self.fig.add_subplot(gs[:3, i])]
self.axRaster[i].set_xlabel('X (arcsec)')
self.axRaster[i].set_ylabel('Y (arcsec)')
else:
self.axRaster += [self.fig.add_subplot(gs[:3, i],
sharex=self.axRaster[0],
sharey=self.axRaster[0])]
self.axRaster[i].tick_params(labelleft=False, labelbottom=False)
self.axRaster[i].set_title('%i: %s'%(i+1, self._tname[i]))
self.im += [self.axRaster[i].imshow(self.fdata[i, tpix],
extent=self.extent,
origin='lower',
cmap=self.cmap,
interpolation=interpolation)]
self.slit = self.axRaster[filterNum-1].plot([self.x1, self.x2],
[self.y1, self.y2],
color='k')[0]
self.center = self.axRaster[filterNum-1].scatter(self.xc, self.yc,
100, marker='+',
c='k')
self.top = self.axRaster[filterNum-1].scatter(self.x2, self.y2, 100,
marker='+', c='b', label='%.1f'%self.R)
self.bottom = self.axRaster[filterNum-1].scatter(self.x1, self.y1, 100,
marker='+', c='r',
label='-%.1f'%self.R)
self.tslit = self.axTD.axvline(self.t, ls='dashed', c='lime')
self.leg = self.axRaster[filterNum-1].legend()
self.axTD.set_aspect(adjustable='box', aspect='auto')
self.imTD.set_clim(self.fdata[filterNum-1,0].min(),
self.fdata[filterNum-1,0].max())
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._onKey)
plt.show()
def _onKey(self, event):
if event.key == 'up':
if self.angle < 360:
self.angle += 1
else:
self.angle = 1
elif event.key == 'down':
if self.angle > 0:
self.angle -=1
else:
self.angle = 359
elif event.key == 'right':
if self.t < self._tarr[-1]:
self.t += self.dt
else:
self.t = self._tarr[0]
elif event.key == 'left':
if self.t > self._tarr[0]:
self.t -= self.dt
else:
self.t = self._tarr[-1]
elif event.key == 'ctrl+right':
if self.xc < self._xarr[-1]:
self.xc += self.dx
else:
self.xc = self._xarr[0]
elif event.key == 'ctrl+left':
if self.xc > self._xarr[0]:
self.xc -= self.dx
else:
self.xc = self._xarr[-1]
elif event.key == 'ctrl+up':
if self.yc < self._yarr[-1]:
self.yc += self.dy
else:
self.yc = self._yarr[0]
elif event.key == 'ctrl+down':
if self.yc > self._yarr[0]:
self.yc -= self.dy
else:
self.yc = self._yarr[-1]
elif event.key == 'ctrl++':
self.R += self.dx
elif event.key == 'ctrl+-':
self.R -= self.dx
elif event.key == ' ' and event.inaxes in self.axRaster:
self.xc = event.xdata
self.yc = event.ydata
elif event.key == ' ' and event.inaxes == self.axTD:
self.t = event.xdata
elif event.key == 'x' and event.inaxes == self.axTD:
self.pos += [event.ydata]
ang = np.deg2rad(self.angle)
xp = self.pos[-1]*np.cos(ang) + self.xc
yp = self.pos[-1]*np.sin(ang) + self.yc
self.mark += [self.axRaster[self.filterNum-1].scatter(xp, yp, 100,
marker='+',
c='lime')]
self.hlines += [self.axTD.axhline(self.pos[-1], ls='dashed', c='lime')]
elif event.key == 'enter':
if self.pause == 'ini':
self.ani = FuncAnimation(self.fig, self._chTime,
frames=self._tarr,
blit=False,
interval=1e3/self.fps,
repeat=True)
# cache_frame_data=False)
self.pause = False
else:
self.pause ^= True
if self.pause:
self.ani.event_source.stop()
else:
self.ani.event_source.start(1e3/self.fps)
for iid in range(self.nfilter):
if event.key == 'ctrl+%i'%(iid+1):
self.filterNum = iid+1
tpix = np.abs(self._tarr-self.t).argmin()
self.changeSlit(self.R, self.xc, self.yc, self.angle)
self.axTD.set_title('%i: %s, '
'Time: %s, '
'tpix: %i'%(self.filterNum, self._tname[self.filterNum-1],
self.Time[tpix].value,
tpix))
self._filterNum0 = self.filterNum
self.imTD.set_clim(self.im[self.filterNum-1].get_clim())
if self.xc != self._xc0 or self.yc != self._yc0 or \
self.angle != self._angle0 or self.R != self._R0:
self.changeSlit(self.R, self.xc, self.yc, self.angle)
self._R0 = self.R
self._xc0 = self.xc
self._yc0 = self.yc
self._angle0 = self.angle
if self.t != self._t0:
self._chTime(self.t)
self._t0 = self.t
self.fig.canvas.draw_idle()
def changeSlit(self, R, xc, yc, angle):
td = self.get_TD(R, xc, yc, angle)
self.tdextent[2] = -R
self.tdextent[3] = R
self.axTD.set_ylim(-R, R)
ang = np.deg2rad(self.angle)
if self.filterNum != self._filterNum0:
self.leg.remove()
self.slit.remove()
self.bottom.remove()
self.center.remove()
self.top.remove()
self.slit = self.axRaster[self.filterNum-1].plot([self.x1, self.x2],
[self.y1, self.y2],
color='k')[0]
self.center = self.axRaster[self.filterNum-1].scatter(self.xc,
self.yc, 100, marker='+', c='k')
self.top = self.axRaster[self.filterNum-1].scatter(self.x2,
self.y2, 100,
marker='+', c='b', label='%.1f'%self.R)
self.bottom = self.axRaster[self.filterNum-1].scatter(self.x1,
self.y1, 100,
marker='+', c='r',
label='-%.1f'%self.R)
for n, pos in enumerate(self.pos):
self.mark[n].remove()
xp = pos*np.cos(ang) + self.xc
yp = pos*np.sin(ang) + self.yc
self.mark[n] = self.axRaster[self.filterNum-1].scatter(xp, yp, 100,
marker='+',
c='lime')
else:
self.slit.set_xdata([self.x1, self.x2])
self.slit.set_ydata([self.y1, self.y2])
self.bottom.set_offsets([self.x1, self.y1])
self.top.set_offsets([self.x2, self.y2])
self.center.set_offsets([self.xc, self.yc])
# change marker
for n, pos in enumerate(self.pos):
xp = pos*np.cos(ang) + self.xc
yp = pos*np.sin(ang) + self.yc
self.mark[n].set_offsets([xp, yp])
self.hlines[n].set_ydata(pos)
self.top.set_label('%.1f'%self.R)
self.bottom.set_label('-%.1f'%self.R)
self.imTD.set_data(td)
self.leg = self.axRaster[self.filterNum-1].legend()
def _chTime(self, t):
self.t = t
tpix = np.abs(self._tarr-t).argmin()
self.axTD.set_title('%i: %s, '
'Time: %s, '
'tpix: %i'%(self.filterNum, self._tname[self.filterNum-1],
self.Time[tpix].value,
tpix))
self.tslit.set_xdata(self.t)
for n, im in enumerate(self.im):
im.set_data(self.fdata[n, tpix])
def set_clim(self, cmin, cmax, frame):
self.im[frame-1].set_clim(cmin, cmax)
if self.filterNum == frame:
self.imTD.set_clim(cmin, cmax)
def remove_Mark(self):
for n in range(len(self.pos)):
self.mark[n].remove()
self.hlines[n].remove()
self.pos = []
self.mark = []
self.hlines = []
def savefig(self, filename, **kwargs):
self.fig.save(filename, **kwargs)
def saveani(self, filename, **kwargs):
fps = kwargs.pop('fps', self.fps)
self.ani.save(filename, fps=fps, **kwargs) | {
"repo_name": "SNU-sunday/fisspy",
"path": "fisspy/analysis/tdmap.py",
"copies": "1",
"size": "15873",
"license": "bsd-2-clause",
"hash": -7020525247197559000,
"line_mean": 40.0180878553,
"line_max": 86,
"alpha_frac": 0.4416304416,
"autogenerated": false,
"ratio": 3.6743055555555557,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4615935997155556,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from fisspy.read.readbase import getRaster as _getRaster
from fisspy.image.base import alignoffset, shift3d
__author__ = "Juhyung Kang"
__email__ = "jhkang@astro.snu.ac.kr"
class singleBand:
"""
Draw interactive FISS raster, spectrogram and profile for single band.
Parameters
----------
fiss: `fisspy.read.FISS`
FISS class.
x : `float`
X position that you draw a spectral profile.
Default is image center.
y : `float`
Y position that you draw a spectral profile.
Default is image center.
wv : `float`
Wavelength positin that you draw a raster images.
Default is central wavelength.
scale : `string`
Scale method of colarbar limit.
Default is minMax.
option: 'minMax', 'std', 'log'
sigFactor : `float`
Factor of standard deviation.
This is worked if scale is set to be 'std'
helpBox : `bool`
Show the interacitve key and simple explanation.
Default is True
Other Parameters
----------------
**kwargs : `~matplotlib.pyplot` properties
"""
def __init__(self, fiss, x=None, y=None, wv=None, scale='minMax',
sigFactor=3, helpBox=True, **kwargs):
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
if not x:
x = fiss.nx//2*fiss.xDelt
if not y:
y = fiss.ny//2*fiss.yDelt
if not wv:
wv = fiss.centralWavelength
self.extentRaster = fiss.extentRaster
self.extentSpectro = fiss.extentSpectro
self.scale = scale
self.sigFactor = sigFactor
self.hw = kwargs.pop('hw', 0.05)
self.xpix = round((x-fiss.xDelt/2)/fiss.xDelt)
self.x = self.xpix*fiss.xDelt+fiss.xDelt/2
self.ypix = round((y-fiss.yDelt/2)/fiss.yDelt)
self.y = self.ypix*fiss.yDelt+fiss.yDelt/2
self.wv = wv
self.x0 = self.x
self.y0 = self.y
self.wv0 = self.wv
self.xH = self.x
self.yH = self.y
self.wvH = self.wv
self.centralWavelength = fiss.centralWavelength
self.xDelt = fiss.xDelt
self.yDelt = fiss.yDelt
self.wvDelt = fiss.wvDelt
self.wave = fiss.wave
self.data = fiss.data
self.band = fiss.band
self.cam = fiss.cam
self._xMin = self.extentRaster[0]
self._xMax = self.extentRaster[1]
self._yMin = self.extentRaster[2]
self._yMax = self.extentRaster[3]
self._wvMin = self.extentSpectro[0]
self._wvMax = self.extentSpectro[1]
#Keyboard helpBox
if helpBox:
helpFig = plt.figure('Keyboard Help Box', figsize=[3.5,3])
ax = helpFig.add_subplot(111)
ax.set_position([0,0,1,1])
ax.set_axis_off()
ax.text(0.05,0.9,'ctrl+h: Reset to original setting')
ax.text(0.05,0.8,'ctrl+right: Move to right')
ax.text(0.05,0.7,'ctrl+left: Move to left')
ax.text(0.05,0.6,'ctrl+up: Move to up')
ax.text(0.05,0.5,'ctrl+down: Move to down')
ax.text(0.05,0.4,'right: Increase the wavelength')
ax.text(0.05,0.3,'left: Decrease the wavelength')
ax.text(0.05,0.2,'spacebar: Change to current mouse point')
#figure setting
figsize = kwargs.pop('figsize', [10, 6])
self.cmap = kwargs.pop('cmap', fiss.cmap)
self.fig = plt.figure(figsize=figsize)
self.fig.canvas.set_window_title(self.band)
self.imInterp = kwargs.get('interpolation', fiss.imInterp)
gs = gridspec.GridSpec(2, 3)
self.axRaster = self.fig.add_subplot(gs[:, 0])
self.axSpectro = self.fig.add_subplot(gs[0, 1:])
self.axProfile = self.fig.add_subplot(gs[1, 1:])
fiss.axRaster = self.axRaster
fiss.axSpectro = self.axSpectro
fiss.axProfile = self.axProfile
self.axRaster.set_xlabel('X (arcsec)')
self.axRaster.set_ylabel('Y (arcsec)')
self.axSpectro.set_xlabel(r'Wavelength ($\AA$)')
self.axSpectro.set_ylabel('Y (arcsec)')
self.axProfile.set_xlabel(r'Wavelength ($\AA$)')
self.axProfile.set_ylabel('Intensity (Count)')
self.axRaster.set_title(fiss.date)
self.axSpectro.set_title(r"X = %.2f'', Y = %.2f'' (X$_{pix}$ = %i, Y$_{pix}$ = %i)"%(self.x, self.y, self.xpix, self.ypix))
self.axRaster.set_xlim(fiss.extentRaster[0], fiss.extentRaster[1])
self.axRaster.set_ylim(fiss.extentRaster[2], fiss.extentRaster[3])
self.axSpectro.set_xlim(fiss.extentSpectro[0], fiss.extentSpectro[1])
self.axSpectro.set_ylim(fiss.extentSpectro[2], fiss.extentSpectro[3])
self.axProfile.set_title(r'%s Band (wv = %.2f $\AA$)'%(fiss.band, self.wv))
self.axProfile.set_xlim(fiss.wave.min(), fiss.wave.max())
self.axProfile.set_ylim(self.data[self.ypix, self.xpix].min()-100,
self.data[self.ypix, self.xpix].max()+100)
self.axProfile.minorticks_on()
self.axProfile.tick_params(which='both', direction='in')
# Draw
raster = _getRaster(self.data, self.wave, self.wv, self.wvDelt,
hw=self.hw)
if self.cam == 'A':
spectro = self.data[:, self.xpix]
elif self.cam == 'B':
spectro = self.data[:, self.xpix,::-1]
if self.scale == 'log':
raster = np.log10(raster)
spectro = np.log10(spectro)
self.imRaster = self.axRaster.imshow(raster,
fiss.cmap,
origin='lower',
extent=fiss.extentRaster,
**kwargs)
self.imSpectro = self.axSpectro.imshow(spectro,
fiss.cmap,
origin='lower',
extent=fiss.extentSpectro,
**kwargs)
self.plotProfile = self.axProfile.plot(self.wave,
self.data[self.ypix, self.xpix],
color='k')[0]
if self.scale == 'std':
self.imRaster.set_clim(np.median(raster)-raster.std()*self.sigFactor,
np.median(raster)+raster.std()*self.sigFactor)
self.imSpectro.set_clim(np.median(spectro)-spectro.std()*self.sigFactor,
np.median(spectro)+spectro.std()*self.sigFactor)
else:
self.imRaster.set_clim(raster.min(), raster.max())
self.imSpectro.set_clim(spectro.min(), spectro.max())
# Reference
self.vlineRaster = self.axRaster.axvline(self.x,
linestyle='dashed',
color='lime')
self.vlineProfile = self.axProfile.axvline(self.wv,
ls='dashed',
c='b')
self.vlineSpectro = self.axSpectro.axvline(self.wv,
ls='dashed',
c='lime')
self.hlineSpectro = self.axSpectro.axhline(self.y,
ls='dashed',
c='lime')
self.pointRaster = self.axRaster.scatter(self.x, self.y, 50,
marker='x',
color='r')
self.axSpectro.set_aspect(adjustable='box', aspect='auto')
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._on_key)
plt.show()
def _on_key(self, event):
### Interactive keyboard input
# Position
if event.key == 'ctrl+right':
if self.x < self._xMax:
self.x += self.xDelt
else:
self.x = self._xMin
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == 'ctrl+left':
if self.x > self._xMin:
self.x -= self.xDelt
else:
self.x = self._xMax
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == 'ctrl+up':
if self.y < self._yMax:
self.y += self.yDelt
else:
self.y = self._yMin
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == 'ctrl+down':
if self.y > self._yMin:
self.y -= self.yDelt
else:
self.y = self._yMax
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == 'right':
if self.wv < self._wvMax:
self.wv += abs(self.wvDelt)
else:
self.wv = self._wvMin
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == 'left':
if self.wv > self._wvMin:
self.wv -= abs(self.wvDelt)
else:
self.wv = self._wvMax
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == ' ' and event.inaxes == self.axRaster:
self.x = event.xdata
self.y = event.ydata
self.xb = self.x0
self.yb = self.y0
self.wvb = self.wv0
elif event.key == ' ' and event.inaxes == self.axProfile:
self.wv = event.xdata
self.wvb = self.wv0
self.xb = self.x0
self.yb = self.y0
elif event.key == ' ' and event.inaxes == self.axSpectro:
self.wv = event.xdata
self.y = event.ydata
self.wvb = self.wv0
self.xb = self.x0
self.yb = self.y0
elif event.key == 'ctrl+h':
self.wv = self.wvH
self.x = self.xH
self.y = self.yH
elif event.key == 'ctrl+b':
x = self.x
y = self.y
wv = self.wv
self.x = self.xb
self.y = self.yb
self.wv = self.wvb
self.xb = x
self.yb = y
self.wvb = wv
if self.x != self.x0 or self.y != self.y0:
self.xpix = int(round((self.x-self.xDelt/2)/self.xDelt))
self.ypix = int(round((self.y-self.yDelt/2)/self.yDelt))
self._chSpect()
if self.wv != self.wv0:
self._chRaster()
self.fig.canvas.draw_idle()
def _chRaster(self):
self.wv0 = self.wv
raster = _getRaster(self.data, self.wave, self.wv, self.wvDelt,
hw=self.hw)
if self.scale == 'log':
raster = np.log10(raster)
self.imRaster.set_data(raster)
self.vlineProfile.set_xdata(self.wv)
self.vlineSpectro.set_xdata(self.wv)
self.axProfile.set_title(r'%s Band (wv = %.2f $\AA$)'%(self.band, self.wv))
if self.scale == 'std':
self.imRaster.set_clim(np.median(raster)-raster.std()*self.sigFactor,
np.median(raster)+raster.std()*self.sigFactor)
else:
self.imRaster.set_clim(raster.min(), raster.max())
def _chSpect(self):
self.x0 = self.x
self.y0 = self.y
if self.cam == 'A':
spectro = self.data[:, self.xpix]
elif self.cam == 'B':
spectro = self.data[:, self.xpix,::-1]
if self.scale == 'log':
spectro = np.log10(spectro)
self.plotProfile.set_ydata(self.data[self.ypix, self.xpix])
self.imSpectro.set_data(spectro)
self.hlineSpectro.set_ydata(self.y)
self.vlineRaster.set_xdata(self.x)
self.pointRaster.set_offsets([self.x, self.y])
self.axProfile.set_ylim(self.data[self.ypix, self.xpix].min()-100,
self.data[self.ypix, self.xpix].max()+100)
self.axSpectro.set_title(r"X = %.2f'', Y = %.2f'' (X$_{pix}$ = %i, Y$_{pix}$ = %i)"%(self.x, self.y, self.xpix, self.ypix))
if self.scale == 'std':
self.imSpectro.set_clim(np.median(spectro)-spectro.std()*self.sigFactor,
np.median(spectro)+spectro.std()*self.sigFactor)
else:
self.imSpectro.set_clim(spectro.min(), spectro.max())
def chRasterClim(self, cmin, cmax):
self.imRaster.set_clim(cmin, cmax)
def chSpectroClim(self, cmin, cmax):
self.imSpectro.set_clim(cmin, cmax)
def chcmap(self, cmap):
self.imRaster.set_cmap(cmap)
self.imSpectro.set_cmap(cmap)
class dualBand:
"""
Draw interactive FISS raster, spectrogram and profile for dual band.
Parameters
----------
fissA: `fisspy.read.FISS`
FISS class.
fissB: `fisspy.read.FISS`
FISS class.
x : `float`
X position that you draw a spectral profile.
Default is image center.
y : `float`
Y position that you draw a spectral profile.
Default is image center.
wvA : `float`
Wavelength positin that you draw a raster images.
Default is central wavelength.
wvB : `float`
Wavelength positin that you draw a raster images.
Default is central wavelength.
scale : `string`
Scale method of colarbar limit.
Default is minMax.
option: 'minMax', 'std', 'log'
sigFactor : `float`
Factor of standard deviation.
This is worked if scale is set to be 'std'
helpBox : `bool`
Show the interacitve key and simple explanation.
Default is True
Other Parameters
----------------
**kwargs : `~matplotlib.pyplot` properties
"""
def __init__(self, fissA, fissB, x=None, y=None, wvA=None, wvB=None,
scale='minMax', sigFactor=3, helpBox=True, **kwargs):
try:
plt.rcParams['keymap.back'].remove('left')
plt.rcParams['keymap.forward'].remove('right')
except:
pass
kwargs['interpolation'] = kwargs.pop('interpolation', 'bilinear')
self.fissA = fissA
self.fissB = fissB
self.nx = self.fissA.nx
self.xDelt = self.fissA.xDelt
self.yDelt = self.fissA.yDelt
if self.fissA.ny >= self.fissB.ny:
self.fissA.data = self.fissA.data[:self.fissB.ny]
self.ny = self.fissB.ny
self.extentRaster = self.fissB.extentRaster
elif fissA.ny < fissB.ny:
self.fissB.data = self.fissB.data[:self.fissA.ny]
self.ny = self.fissA.ny
self.extentRaster = self.fissA.extentRaster
self._xMin = self.extentRaster[0]
self._xMax = self.extentRaster[1]
self._yMin = self.extentRaster[2]
self._yMax = self.extentRaster[3]
sh = alignoffset(self.fissB.data[:,:,50], self.fissA.data[:,:,-50])
tmp = shift3d(fissB.data.transpose(2, 0, 1), -sh).transpose(1,2,0)
self.fissB.data = tmp
tmp[tmp<10]=1
del tmp
if not x:
x = self.nx//2*self.xDelt
if not y:
y = self.ny//2*self.yDelt
if not wvA:
wvA = self.fissA.centralWavelength
if not wvB:
wvB = self.fissB.centralWavelength
xpix = round((x-self.xDelt/2)/self.xDelt)
ypix = round((y-self.yDelt/2)/self.yDelt)
self.x = xpix*self.xDelt+self.xDelt/2
self.y = ypix*self.yDelt+self.yDelt/2
self.scale = scale
self.sigFactor = sigFactor
self.hw = kwargs.pop('hw', 0.05)
self.wvA = wvA
self.wvB = wvB
self.x0 = self.x
self.y0 = self.y
self.wvA0 = self.wvA
self.wvB0 = self.wvB
self.xH = self.x
self.yH = self.y
self.wvAH = self.wvA
self.wvBH = self.wvB
#Keyboard helpBox
if helpBox:
helpFig = plt.figure('Keyboard Help Box', figsize=[3.5,3])
ax = helpFig.add_subplot(111)
ax.set_position([0,0,1,1])
ax.set_axis_off()
ax.text(0.05,0.92,'ctrl+h: Reset to original setting')
ax.text(0.05,0.82,'ctrl+right: Move to right')
ax.text(0.05,0.72,'ctrl+left: Move to left')
ax.text(0.05,0.62,'ctrl+up: Move to up')
ax.text(0.05,0.52,'ctrl+down: Move to down')
ax.text(0.05,0.42,'right: Increase the wavelength of the fissA')
ax.text(0.05,0.32,'left: Decrease the wavelength of the fissA')
ax.text(0.05,0.22,'up: Increase the wavelength of the fissB')
ax.text(0.05,0.12,'down: Decrease the wavelength of the fissB')
ax.text(0.05,0.02,'spacebar: Change to current mouse point')
#figure setting
figsize = kwargs.pop('figsize', [12, 6])
self.fig = plt.figure(figsize=figsize)
self.fig.canvas.set_window_title('Dual Band Image')
self.imInterp = kwargs.get('interpolation', 'bilinear')
gs = gridspec.GridSpec(2,4)
self.axRasterA = self.fig.add_subplot(gs[:,0])
self.axRasterB = self.fig.add_subplot(gs[:,1],
sharex=self.axRasterA,
sharey=self.axRasterA)
self.axProfileA = self.fig.add_subplot(gs[0,2:])
self.axProfileB = self.fig.add_subplot(gs[1,2:])
self.axRasterA.set_xlabel('X (arcsec)')
self.axRasterA.set_ylabel('Y (arcsec)')
self.axRasterB.set_xlabel('X (arcsec)')
self.axRasterB.set_ylabel('Y (arcsec)')
self.axProfileA.set_xlabel(r'Wavelength ($\AA$)')
self.axProfileA.set_ylabel('Intensity (Count)')
self.axProfileB.set_xlabel(r'Wavelength ($\AA$)')
self.axProfileB.set_ylabel('Intensity (Count)')
self.axRasterA.set_title(r'%s Band'%self.fissA.band)
self.axRasterB.set_title(r'%s Band'%self.fissB.band)
self.axProfileA.set_title(r'%s Band (wv = %.2f $\AA$)'%(self.fissA.band, self.wvA))
self.axProfileB.set_title(r'%s Band (wv = %.2f $\AA$)'%(self.fissB.band, self.wvB))
self.axRasterA.set_xlim(self.extentRaster[0], self.extentRaster[1])
self.axRasterB.set_xlim(self.extentRaster[0], self.extentRaster[1])
self.axRasterA.set_ylim(self.extentRaster[2], self.extentRaster[3])
self.axRasterB.set_ylim(self.extentRaster[2], self.extentRaster[3])
self.axProfileA.set_xlim(self.fissA.wave.min(), self.fissA.wave.max())
self.axProfileB.set_xlim(self.fissB.wave.min(), self.fissB.wave.max())
self.axProfileA.set_ylim(self.fissA.data[ypix, xpix].min()-100,
self.fissA.data[ypix, xpix].max()+100)
self.axProfileB.set_ylim(self.fissB.data[ypix, xpix].min()-100,
self.fissB.data[ypix, xpix].max()+100)
self.axProfileA.minorticks_on()
self.axProfileA.tick_params(which='both', direction='in')
self.axProfileB.minorticks_on()
self.axProfileB.tick_params(which='both', direction='in')
#Draw
rasterA = _getRaster(self.fissA.data, self.fissA.wave, self.wvA,
self.fissA.wvDelt, hw=self.hw)
rasterB = _getRaster(self.fissB.data, self.fissB.wave, self.wvB,
self.fissB.wvDelt, hw=self.hw)
whA = rasterA > 5
whB = rasterB > 5
if self.scale == 'log':
rasterA = np.log10(rasterA)
rasterB = np.log10(rasterB)
cminA = rasterB[whA].min()
cminB = rasterB[whB].min()
self.imRasterA = self.axRasterA.imshow(rasterA,
self.fissA.cmap,
origin='lower',
extent=self.extentRaster,
**kwargs)
self.imRasterB = self.axRasterB.imshow(rasterB,
self.fissB.cmap,
origin='lower',
extent=self.extentRaster,
**kwargs)
self.plotProfileA = self.axProfileA.plot(self.fissA.wave,
self.fissA.data[ypix, xpix],
color='k')[0]
self.plotProfileB = self.axProfileB.plot(self.fissB.wave,
self.fissB.data[ypix, xpix],
color='k')[0]
if self.scale == 'std':
self.imRasterA.set_clim(np.median(rasterA)-rasterA.std()*self.sigFactor,
np.median(rasterA)+rasterA.std()*self.sigFactor)
self.imRasterB.set_clim(np.median(rasterB)-rasterB.std()*self.sigFactor,
np.median(rasterB)+rasterB.std()*self.sigFactor)
else:
self.imRasterA.set_clim(cminA, rasterA.max())
self.imRasterB.set_clim(cminB, rasterB.max())
#Reference
self.vlineRasterA = self.axRasterA.axvline(self.x,
linestyle='dashed',
color='lime')
self.vlineRasterB = self.axRasterB.axvline(self.x,
linestyle='dashed',
color='lime')
self.vlineProfileA = self.axProfileA.axvline(self.wvA,
ls='dashed',
c='b')
self.vlineProfileB = self.axProfileB.axvline(self.wvB,
ls='dashed',
c='b')
self.pointRasterA = self.axRasterA.scatter(self.x, self.y, 50,
marker='x',
color='r')
self.pointRasterB = self.axRasterB.scatter(self.x, self.y, 50,
marker='x',
color='r')
self.fig.tight_layout()
self.fig.canvas.mpl_connect('key_press_event', self._on_key)
plt.show()
def _on_key(self, event):
if event.key == 'ctrl+right':
if self.x < self._xMax:
self.x += self.xDelt
else:
self.x = self._xMin+self.xDelt//2
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'ctrl+left':
if self.x > self._xMin:
self.x -= self.xDelt
else:
self.x = self._xMax-self.xDelt//2
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'ctrl+up':
if self.y < self._yMax:
self.y += self.yDelt
else:
self.y = self._yMin+self.yDelt//2
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'ctrl+down':
if self.y > self._yMin:
self.y -= self.yDelt
else:
self.y = self._yMax-self.yDelt//2
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'right':
if self.wvA < self.fissA.wave.max():
self.wvA += abs(self.fissA.wvDelt)
else:
self.wvA = self.fissA.wave.min()
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'left':
if self.wvA > self.fissA.wave.min():
self.wvA -= abs(self.fissA.wvDelt)
else:
self.wvA = self.fissA.wave.max()
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'up':
if self.wvB < self.fissB.wave.max():
self.wvB += abs(self.fissB.wvDelt)
else:
self.wvB = self.fissB.wave.min()
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'down':
if self.wvB > self.fissB.wave.min():
self.wvB -= abs(self.fissB.wvDelt)
else:
self.wvB = self.fissB.wave.max()
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == ' ' and (event.inaxes == self.axRasterA or
event.inaxes == self.axRasterB) :
self.x = event.xdata
self.y = event.ydata
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == ' ' and event.inaxes == self.axProfileA:
self.wvA = event.xdata
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == ' ' and event.inaxes == self.axProfileB:
self.wvB = event.xdata
self.xb = self.x0
self.yb = self.y0
self.wvAb = self.wvA0
self.wvBb = self.wvB0
elif event.key == 'ctrl+h':
self.wvA = self.wvAH
self.wvB = self.wvBH
self.x = self.xH
self.y = self.yH
elif event.key == 'ctrl+b':
x = self.x
y = self.y
wvA = self.wvA
wvB = self.wvB
self.x = self.xb
self.y = self.yb
self.wvA = self.wvAb
self.wvB = self.wvBb
self.xb = x
self.yb = y
self.wvAb = wvA
self.wvBb = wvB
if self.x != self.x0 or self.y != self.y0:
self._chSpect()
if self.wvA != self.wvA0:
self._chRasterA()
if self.wvB != self.wvB0:
self._chRasterB()
self.fig.canvas.draw_idle()
def _chSpect(self):
self.x0 = self.x
self.y0 = self.y
xpix = int(round((self.x-self.xDelt/2)/self.xDelt))
ypix = int(round((self.y-self.yDelt/2)/self.yDelt))
self.plotProfileA.set_ydata(self.fissA.data[ypix, xpix])
self.plotProfileB.set_ydata(self.fissB.data[ypix, xpix])
self.pointRasterA.set_offsets([self.x, self.y])
self.pointRasterB.set_offsets([self.x, self.y])
self.vlineRasterA.set_xdata(self.x)
self.vlineRasterB.set_xdata(self.x)
self.axProfileA.set_ylim(self.fissA.data[ypix, xpix].min()-100,
self.fissA.data[ypix, xpix].max()+100)
self.axProfileB.set_ylim(self.fissB.data[ypix, xpix].min()-100,
self.fissB.data[ypix, xpix].max()+100)
def _chRasterA(self):
self.wvA0 = self.wvA
rasterA = _getRaster(self.fissA.data, self.fissA.wave, self.wvA,
self.fissA.wvDelt,
hw=self.hw)
wh = rasterA > 5
if self.scale == 'log':
rasterA = np.log10(rasterA)
cmin = rasterA[wh].min()
self.imRasterA.set_data(rasterA)
self.vlineProfileA.set_xdata(self.wvA)
self.axProfileA.set_title(r'%s Band (wv = %.2f $\AA$)'%(self.fissA.band,
self.wvA))
if self.scale == 'std':
self.imRasterA.set_clim(np.median(rasterA)-rasterA.std()*self.sigFactor,
np.median(rasterA)+rasterA.std()*self.sigFactor)
else:
self.imRasterA.set_clim(cmin, rasterA.max())
def _chRasterB(self):
self.wvB0 = self.wvB
rasterB = _getRaster(self.fissB.data, self.fissB.wave, self.wvB,
self.fissB.wvDelt,
hw=self.hw)
wh = rasterB > 5
if self.scale == 'log':
rasterB = np.log10(rasterB)
cmin = rasterB[wh].min()
self.imRasterB.set_data(rasterB)
self.vlineProfileB.set_xdata(self.wvB)
self.axProfileB.set_title(r'%s Band (wv = %.2f $\AA$)'%(self.fissB.band,
self.wvB))
if self.scale == 'std':
self.imRasterB.set_clim(np.median(rasterB)-rasterB.std()*self.sigFactor,
np.median(rasterB)+rasterB.std()*self.sigFactor)
else:
self.imRasterB.set_clim(cmin, rasterB.max())
| {
"repo_name": "SNU-sunday/fisspy",
"path": "fisspy/image/interactive_image.py",
"copies": "1",
"size": "29801",
"license": "bsd-2-clause",
"hash": 5001937383226699000,
"line_mean": 39.7674418605,
"line_max": 131,
"alpha_frac": 0.502130801,
"autogenerated": false,
"ratio": 3.4003879507074397,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9386944354525435,
"avg_score": 0.00311487943640092,
"num_lines": 731
} |
from __future__ import absolute_import, division
import os
import errno
from math import sqrt, pi
import numpy as np
from scipy.ndimage import binary_erosion
try:
import pyopencl as cl
except ImportError:
pass
from . import volume
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def determine_core_indices(mask):
"""Calculate the core indices of a shape"""
core_indices = np.zeros(mask.shape)
eroded_mask = mask > 0
while eroded_mask.sum() > 0:
core_indices += eroded_mask
eroded_mask = binary_erosion(eroded_mask)
return core_indices
def get_queue(platformid=0, deviceid=0):
try:
platform = cl.get_platforms()[platformid]
devices = platform.get_devices(device_type=cl.device_type.GPU)
context = cl.Context(devices=devices)
queue = cl.CommandQueue(context, device=devices[deviceid])
except:
queue = None
return queue
def fisher_sigma(mv, fsc):
return 1 / sqrt(mv / fsc - 3)
def write_fits_to_pdb(structure, solutions, basename='fit'):
translated_structure = structure.duplicate()
center = translated_structure.coor.mean(axis=1)
translated_structure.translate(-center)
for n, sol in enumerate(solutions, start=1):
out = translated_structure.duplicate()
rot = np.asarray([float(x) for x in sol[6:]]).reshape(3, 3)
trans = sol[3:6]
out.rotate(rot)
out.translate(trans)
out.tofile(basename + '_{:d}.pdb'.format(n))
| {
"repo_name": "haddocking/powerfit",
"path": "powerfit/helpers.py",
"copies": "1",
"size": "1633",
"license": "apache-2.0",
"hash": -4982305960243595000,
"line_mean": 24.9206349206,
"line_max": 70,
"alpha_frac": 0.6448254746,
"autogenerated": false,
"ratio": 3.526997840172786,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4671823314772786,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import os
import functools as fun
import itertools as it
import collections as coll
import re
import numpy as np
from scipy import ndimage as nd
from skimage import io
from scipy.stats.mstats import mquantiles as quantiles
from skimage import morphology as skmorph, filters as imfilter, exposure
import skimage.filters.rank as rank
import skimage
import cytoolz as tlz
from cytoolz import curried
from six.moves import map, range, zip, filter
from ._util import normalise_random_state
from . import io as mio
def morphop(im, operation='open', radius='5'):
"""Perform a morphological operation with spherical structuring element.
Parameters
----------
im : array, shape (M, N[, P])
2D or 3D grayscale image.
operation : string, optional
The operation to perform. Choices are 'opening', 'closing',
'erosion', and 'dilation'. Imperative verbs also work, e.g.
'dilate'.
radius : int, optional
The radius of the structuring element (disk or ball) used.
Returns
-------
imout : array, shape (M, N[, P])
The transformed image.
Raises
------
ValueError : if the image is not 2D or 3D.
"""
if im.ndim == 2:
selem = skmorph.disk(radius)
elif im.ndim == 3:
selem = skmorph.ball(radius)
else:
raise ValueError("Image input to 'morphop' should be 2D or 3D"
", got %iD" % im.ndim)
if operation.startswith('open'):
imout = nd.grey_opening(im, footprint=selem)
elif operation.startswith('clos'):
imout = nd.grey_closing(im, footprint=selem)
elif operation.startswith('dila'):
imout = nd.grey_dilation(im, footprint=selem)
elif operation.startswith('ero'):
imout = nd.grey_erosion(im, footprint=selem)
return imout
def basefn(fn):
"""Get the filename without the extension.
Parameters
----------
fn : string
A filename.
Returns
-------
outfn : string
`fn` with the extension stripped.
Examples
--------
>>> file_name = 'file_name.ext'
>>> basefn(file_name)
'file_name'
"""
return os.path.splitext(fn)[0]
def max_mask_iter(fns, offset=0, close_radius=0, erode_radius=0):
"""Find masks for a set of images having brightness artifacts.
Parameters
----------
fns : list of string
The images being examined.
offset : int, optional
Offset the threshold automatically found.
close_radius : int, optional
Perform a morphological closing of the mask of this radius.
erode_radius : int, optional
Perform a morphological erosion of the mask, after any closing,
of this radius.
Returns
-------
maxes : iterator of bool array
The max mask image corresponding to each input image.
"""
ms = maxes(fns)
t = imfilter.threshold_otsu(ms)
ims = it.imap(io.imread, fns)
masks = ((im < t + offset) for im in ims)
if close_radius > 0:
masks = (morphop(mask, 'close', close_radius) for mask in masks)
if erode_radius > 0:
masks = (morphop(mask, 'erode', erode_radius) for mask in masks)
return masks
def write_max_masks(fns, offset=0, close_radius=0, erode_radius=0,
suffix='.mask.tif', compress=1):
"""Find a mask for images having a brightness artifact.
This function iterates over a set of images and finds the maximum
value of each. Then, Otsu's threshold is applied to the set of
maxima, and any element brighter than this in *any* image is
masked out.
Parameters
----------
fns : list of string
The images being examined.
offset : int, optional
Offset the threshold automatically found.
close_radius : int, optional
Perform a morphological closing of the mask of this radius.
erode_radius : int, optional
Perform a morphological erosion of the mask, after any closing,
of this radius.
suffix : string, optional
Save an image next to the original, with this suffix.
compress : int in [0, 9], optional
Compression level for saved images. 0 = no compression,
1 = fast compression, 9 = maximum compression, slowest.
Returns
-------
n, m : int
The number of images for which a mask was created, and the
total number of images
"""
masks = max_mask_iter(fns, offset, close_radius, erode_radius)
n = 0
m = 0
for fn, mask in zip(fns, masks):
outfn = basefn(fn) + suffix
m += 1
if not mask.all():
# we multiply by 255 to make the image easy to look at
mio.imsave(outfn, mask.astype(np.uint8) * 255, compress=compress)
n += 1
return n, m
def maxes(fns):
"""Return an array of the maximum intensity of each image.
Parameters
----------
fns : list of string
The filenames of the images.
Returns
-------
maxes : 1D array
The maximum value of each image examined.
"""
ims = map(io.imread, fns)
maxes = np.array(list(map(np.max, ims)))
return maxes
def stretchlim(im, bottom=0.001, top=None, mask=None, in_place=False):
"""Stretch the image so new image range corresponds to given quantiles.
Parameters
----------
im : array, shape (M, N, [...,] P)
The input image.
bottom : float, optional
The lower quantile.
top : float, optional
The upper quantile. If not provided, it is set to 1 - `bottom`.
mask : array of bool, shape (M, N, [...,] P), optional
Only consider intensity values where `mask` is ``True``.
in_place : bool, optional
If True, modify the input image in-place (only possible if
it is a float image).
Returns
-------
out : np.ndarray of float
The stretched image.
"""
if in_place and np.issubdtype(im.dtype, np.float):
out = im
else:
out = np.empty(im.shape, np.float32)
out[:] = im
if mask is None:
mask = np.ones(im.shape, dtype=bool)
if top is None:
top = 1 - bottom
q0, q1 = quantiles(im[mask], [bottom, top])
out -= q0
out /= q1 - q0
out = np.clip(out, 0, 1, out=out)
return out
def run_quadrant_stitch(fns, re_string='(.*)_(s[1-4])_(w[1-3]).*',
re_quadrant_group=1, compress=1):
"""Read images, stitched them, and write out to same directory.
Parameters
----------
fns : list of string
The filenames to be processed.
re_string : string, optional
The regular expression to match the filename.
re_quadrant_group : int, optional
The group from the re.match object that will contain quadrant info.
compress : int in [0, 9], optional
Compression level for saved images. 0 = no compression,
1 = fast compression, 9 = maximum compression, slowest.
Returns
-------
fns_out : list of string
The output filenames
"""
qd = group_by_quadrant(fns, re_string, re_quadrant_group)
fns_out = []
for fn_pattern, fns in qd.items():
new_filename = '_'.join(fn_pattern) + '_stitched.tif'
ims = list(map(io.imread, sorted(fns)))
im = quadrant_stitch(*ims)
mio.imsave(new_filename, im, compress=compress)
fns_out.append(new_filename)
return fns_out
def crop(im, slices=(slice(100, -100), slice(250, -300))):
"""Crop an image to contain only plate interior.
Parameters
----------
im : array
The image to be cropped.
slices : tuple of slice objects, optional
The slices defining the crop. The default values are for
stitched images from the Marcelle screen.
Returns
-------
imc : array
The cropped image.
Examples
--------
>>> im = np.zeros((5, 5), int)
>>> im[1:4, 1:4] = 1
>>> crop(im, slices=(slice(1, 4), slice(1, 4)))
array([[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
"""
return im[slices]
def group_by_channel(fns, re_string='(.*)_(w[1-3]).*',
re_channel_group=1):
"""Group filenames by channel to prepare for illumination estimation.
Intended to be run *after* quadrant stitching.
Parameters
----------
fns : list of string
The filenames to be processed.
re_string : string, optional
The regular expression to match the filename.
re_quadrant_group : int, optional
The group from the re.match object that will contain channel info.
Returns
-------
grouped : dict mapping tuple of string to list of string
The filenames, grouped into lists containing all images of the same
channel. The keys are the channel regular expression group, useful for
composing a filename for the illumination image.
Examples
--------
>>> fn_numbering = it.product(range(2), range(1, 4))
>>> fns = ['image_%i_w%i.tif' % (i, j) for i, j in fn_numbering]
>>> fns
['image_0_w1.tif', 'image_0_w2.tif', 'image_0_w3.tif', 'image_1_w1.tif', 'image_1_w2.tif', 'image_1_w3.tif']
>>> sorted(group_by_channel(fns).items())
[('w1', ['image_0_w1.tif', 'image_1_w1.tif']), ('w2', ['image_0_w2.tif', 'image_1_w2.tif']), ('w3', ['image_0_w3.tif', 'image_1_w3.tif'])]
"""
re_match = fun.partial(re.match, re_string)
match_objs = list(map(re_match, fns))
fns = [fn for fn, match in zip(fns, match_objs) if match is not None]
match_objs = [x for x in match_objs if x is not None]
matches = [x.groups() for x in match_objs]
keys = [m[re_channel_group] for m in matches]
grouped = {}
for k, fn in zip(keys, fns):
grouped.setdefault(k, []).append(fn)
return grouped
def group_by_quadrant(fns, re_string='(.*)_(s[1-4])_(w[1-3]).*',
re_quadrant_group=1):
"""Group filenames by quadrant to prepare for stitching.
Parameters
----------
fns : list of string
The filenames to be processed.
re_string : string, optional
The regular expression to match the filename.
re_quadrant_group : int, optional
The group from the re.match object that will contain quadrant info.
Returns
-------
grouped : dict mapping tuple of string to tuple of string
The filenames, grouped into tuples containing four quadrants of the
same image. The keys are all the regular expression match groups
*other* than the quadrant group, useful for composing a filename for
the stitched images.
Examples
--------
>>> fn_numbering = it.product(range(2), range(1, 5))
>>> fns = ['image_%i_s%i_w1.TIF' % (i, j) for i, j in fn_numbering]
>>> fns
['image_0_s1_w1.TIF', 'image_0_s2_w1.TIF', 'image_0_s3_w1.TIF', 'image_0_s4_w1.TIF', 'image_1_s1_w1.TIF', 'image_1_s2_w1.TIF', 'image_1_s3_w1.TIF', 'image_1_s4_w1.TIF']
>>> sorted(group_by_quadrant(fns).items())
[(('image_0', 'w1'), ['image_0_s1_w1.TIF', 'image_0_s2_w1.TIF', 'image_0_s3_w1.TIF', 'image_0_s4_w1.TIF']), (('image_1', 'w1'), ['image_1_s1_w1.TIF', 'image_1_s2_w1.TIF', 'image_1_s3_w1.TIF', 'image_1_s4_w1.TIF'])]
"""
re_match = fun.partial(re.match, re_string)
match_objs = list(map(re_match, fns))
fns = [fn for fn, match in zip(fns, match_objs) if match is not None]
match_objs = [x for x in match_objs if x is not None]
matches = [x.groups() for x in match_objs]
keys = list(map(tuple, [[m[i] for i in range(len(m))
if i != re_quadrant_group] for m in matches]))
grouped = {}
for k, fn in zip(keys, fns):
grouped.setdefault(k, []).append(fn)
return grouped
def quadrant_stitch(nw, ne, sw, se):
"""Stitch four seamless quadrant images into a single big image.
Parameters
----------
nw, ne, sw, se : np.ndarray, shape (Mi, Ni)
The four quadrant images, corresponding to the cardinal directions of
north-west, north-east, south-west, south-east.
Returns
-------
stitched : np.ndarray, shape (M0+M2, N0+N1)
The image resulting from stitching the four input images
Examples
--------
>>> imbase = np.ones((2, 3), int)
>>> nw, ne, sw, se = [i * imbase for i in range(4)]
>>> quadrant_stitch(nw, ne, sw, se)
array([[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1],
[2, 2, 2, 3, 3, 3],
[2, 2, 2, 3, 3, 3]])
"""
x1 = nw.shape[0]
x2 = sw.shape[0]
y1 = nw.shape[1]
y2 = ne.shape[1]
stitched = np.zeros((x1 + x2, y1 + y2), nw.dtype)
stitched[:x1, :y1] = nw
stitched[:x1, y1:] = ne
stitched[x1:, :y1] = sw
stitched[x1:, y1:] = se
return stitched
def rescale_to_11bits(im_float):
"""Rescale a float image in [0, 1] to integers in [0, 2047].
This operation makes rank filtering much faster.
Parameters
----------
im_float : array of float in [0, 1]
The float image. The range and type are *not* checked prior to
conversion!
Returns
-------
im11 : array of uint16 in [0, 2047]
The converted image.
Examples
--------
>>> im = np.array([0., 0.5, 1.])
>>> rescale_to_11bits(im)
array([ 0, 1024, 2047], dtype=uint16)
"""
im11 = np.round(im_float * 2047.).astype(np.uint16)
return im11
def rescale_from_11bits(im11):
"""Rescale a uint16 image with range in [0, 2047] to float in [0., 1.]
Parameters
----------
im11 : array of uint16, range in [0, 2047]
The input image, encoded in uint16 but having 11-bit range.
Returns
-------
imfloat : array of float, same shape as `im11`
The output image.
Examples
--------
>>> im = np.array([0, 1024, 2047], dtype=np.uint16)
>>> rescale_from_11bits(im)
array([ 0. , 0.5002, 1. ])
Notes
-----
Designed to be a no-op with the above `rescale_to_11bits` function,
although this is subject to approximation errors.
"""
return np.round(im11 / 2047., decimals=4)
def unpad(im, pad_width):
"""Remove padding from a padded image.
Parameters
----------
im : array
The input array.
pad_width : int or sequence of int
The width of padding: a number for the same width along each
dimension, or a sequence for different widths.
Returns
-------
imc : array
The unpadded image.
Examples
--------
>>> im = np.zeros((5, 5), int)
>>> im[1:4, 1:4] = 1
>>> unpad(im, 1)
array([[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
"""
if not isinstance(pad_width, coll.Iterable):
pad_width = [pad_width] * im.ndim
slices = tuple([slice(p, -p) for p in pad_width])
return im[slices]
def _reduce_with_count(pairwise, iterator, accumulator=None):
"""Return both the result of the reduction and the number of elements.
Parameters
----------
pairwise : function (a -> b -> a)
The function with which to reduce the `iterator` sequence.
iterator : iterable
The sequence being reduced.
accumulator : type "a", optional
An initial value with which to perform the reduction.
Returns
-------
result : type "a"
The result of the reduce operation.
count : int
The number of elements that were accumulated.
Examples
--------
>>> x = [5, 6, 7]
>>> _reduce_with_count(np.add, x)
(18, 3)
"""
def new_pairwise(a, b):
(elem1, c1), (elem2, c2) = a, b
return pairwise(elem1, elem2), c2
new_iter = zip(iterator, it.count(1))
new_acc = (0, accumulator)
return tlz.reduce(new_pairwise, new_iter, new_acc)
def find_background_illumination(fns, radius=51, quantile=0.05,
stretch_quantile=0., method='mean'):
"""Use a set of related images to find uneven background illumination.
Parameters
----------
fns : list of string
A list of image file names
radius : int, optional
The radius of the structuring element used to find background.
default: 51
quantile : float in [0, 1], optional
The desired quantile to find background. default: 0.05
stretch_quantile : float in [0, 1], optional
Stretch image to full dtype limit, saturating above this quantile.
method : 'mean', 'average', 'median', or 'histogram', optional
How to use combine the smoothed intensities of the input images
to infer the illumination field:
- 'mean' or 'average': Use the mean value of the smoothed
images at each pixel as the illumination field.
- 'median': use the median value. Since all images need to be
in-memory to compute this, use only for small sets of images.
- 'histogram': use the median value approximated by a
histogram. This can be computed on-line for large sets of
images.
Returns
-------
illum : np.ndarray, float, shape (M, N)
The estimated illumination over the image field.
See Also
--------
`correct_image_illumination`, `correct_multiimage_illumination`.
"""
# This function follows the "PyToolz" streaming data model to
# obtain the illumination estimate. First, define each processing
# step:
read = io.imread
normalize = (tlz.partial(stretchlim, bottom=stretch_quantile)
if stretch_quantile > 0
else skimage.img_as_float)
rescale = rescale_to_11bits
pad = fun.partial(skimage.util.pad, pad_width=radius, mode='reflect')
rank_filter = fun.partial(rank.percentile, selem=skmorph.disk(radius),
p0=quantile)
_unpad = fun.partial(unpad, pad_width=radius)
unscale = rescale_from_11bits
# Next, compose all the steps, apply to all images (streaming)
bg = (tlz.pipe(fn, read, normalize, rescale, pad, rank_filter, _unpad,
unscale)
for fn in fns)
# Finally, reduce all the images and compute the estimate
if method == 'mean' or method == 'average':
illum, count = _reduce_with_count(np.add, bg)
illum = skimage.img_as_float(illum) / count
elif method == 'median':
illum = np.median(list(bg), axis=0)
elif method == 'histogram':
raise NotImplementedError('histogram background illumination method '
'not yet implemented.')
else:
raise ValueError('Method "%s" of background illumination finding '
'not recognised.' % method)
return illum
def correct_multiimage_illumination(im_fns, illum, stretch_quantile=0,
random_state=None):
"""Divide input images pointwise by the illumination field.
However, where `correct_image_illumination` rescales each individual
image to span the full dynamic range of the data type, this one
rescales each image such that *all images, collectively,* span the
dynamic range. This aims to fix stretching of image noise when there
is no signal in the data [1]_.
Parameters
----------
ims : iterable of image filenames, each of shape (M, N, ..., P)
The images to be corrected.
illum : array, shape (M, N, ..., P)
The background illumination field.
stretch_quantile : float, optional
Clip intensity above and below this quantile. Stretch remaining
values to fill dynamic range.
random_state : None, int, or numpy RandomState instance, optional
An optional random number generator or seed, passed directly to
`_reservoir_sampled_image`.
Returns
-------
ims_out : iterable of corrected uint8 images
The images corrected for background illumination.
References
----------
.. [1] https://github.com/microscopium/microscopium/issues/38
"""
p0 = 100 * stretch_quantile
p1 = 100 - p0
im_fns = list(im_fns)
# in first pass, make a composite image to get global intensity range
ims_pass1 = map(io.imread, im_fns)
sampled = _reservoir_sampled_image(ims_pass1, random_state)
corrected = sampled / illum # don't do in-place, dtype may clash
corr_range = tuple(np.percentile(corrected, [p0, p1]))
# In second pass, correct every image and adjust exposure
ims_pass2 = map(io.imread, im_fns)
for im in ims_pass2:
corrected = im / illum
rescaled = exposure.rescale_intensity(corrected, in_range=corr_range,
out_range=np.uint8)
out = np.round(rescaled).astype(np.uint8)
yield out
def _reservoir_sampled_image(ims_iter, random_state=None):
"""Return an image where each pixel is sampled from a list of images.
The idea is to get a sample of image intensity throughout a collection
of images, to know what the "standard range" is for this type of image.
The implementation uses a "reservoir" image to sample while remaining
space-efficient, and only needs to hold about four images at one time
(the reservoir, the current sample, a random image for sampling, and
a thresholded version of the random image).
Parameters
----------
ims_iter : iterable of arrays
An iterable over numpy arrays (representing images).
random_state : None, int, or numpy RandomState instance, optional
An optional random number generator or seed from which to draw
samples.
Returns
-------
sampled : array, same shape as input
The sampled "image".
Examples
--------
>>> ims = iter(np.arange(27).reshape((3, 3, 3)))
>>> _reservoir_sampled_image(ims, 0)
array([[ 0, 1, 2],
[ 3, 13, 23],
[24, 25, 8]])
"""
random = normalise_random_state(random_state)
ims_iter = iter(ims_iter) # ensure iterator and not e.g. list
sampled = next(ims_iter)
for k, im in enumerate(ims_iter, start=2):
to_replace = random.rand(*im.shape) < (1 / k)
sampled[to_replace] = im[to_replace]
return sampled
def global_threshold(ims_iter, random_state=None):
"""Generate a global threshold for the collection of images given.
The threshold is determined by sampling the intensity of every
image and then computing the Otsu [1]_ threshold on this sample.
When the input images are multi-channel, the threshold is computed
separately for each channel.
Parameters
----------
ims_iter : iterable of arrays
An iterable over numpy arrays (representing images).
random_state : None, int, or numpy RandomState instance, optional
An optional random number generator or seed from which to draw
samples.
Returns
-------
thresholds : tuple of float, length equal to number of channels
The global threshold for the image collection.
References
----------
.. [1]: Nobuyuki Otsu (1979). "A threshold selection method from
gray-level histograms". IEEE Trans. Sys., Man., Cyber.
9 (1): 62-66. doi:10.1109/TSMC.1979.4310076
Examples
--------
>>> ims = iter(np.arange(27).reshape((3, 3, 3)))
>>> global_threshold(ims, 0)
(13,)
"""
sampled = _reservoir_sampled_image(ims_iter, random_state)
if sampled.ndim < 3:
sampled = sampled[..., np.newaxis] # add dummy channel dimension
thresholds = [imfilter.threshold_otsu(sampled[..., i])
for i in range(sampled.shape[-1])]
return tuple(thresholds)
def correct_image_illumination(im, illum, stretch_quantile=0, mask=None):
"""Divide input image pointwise by the illumination field.
Parameters
----------
im : np.ndarray of float
The input image.
illum : np.ndarray of float, same shape as `im`
The illumination field.
stretch_quantile : float, optional
Stretch the image intensity to saturate the top and bottom
quantiles given.
mask : array of bool, same shape as im, optional
Only stretch the image intensity where `mask` is ``True``.
Returns
-------
imc : np.ndarray of float, same shape as `im`
The corrected image.
See Also
--------
`correct_multiimage_illumination`
"""
if im.dtype != np.float:
imc = skimage.img_as_float(im)
else:
imc = im.copy()
imc /= illum
lim = stretch_quantile
imc = stretchlim(imc, lim, 1-lim, mask)
return imc
def montage(ims, order=None):
"""Stitch together a list of images according to a specified pattern.
The order pattern should be an array of integers where each element
corresponds to the index of the image in the fns list.
eg if order = [[20, 21, 22, 23, 24],
[19, 6, 7, 8, 9],
[18, 5, 0, 1, 10],
[17, 4, 3, 2, 11],
[16, 15, 14, 13, 12]]
This order will stitch together 25 images in a clockwise spiral pattern.
Parameters
----------
ims : iterable of array, shape (M, N[, 3])
The list of the image files to be stitched together. If None,
this parameter defaults to the order given above.
order : array-like of int, shape (P, Q)
The order of the stitching, with each entry referring
to the index of file in the fns array.
Returns
-------
montaged : array, shape (M * P, N * Q[, 3])
The stitched image.
Examples
--------
>>> ims = [np.zeros((2, 2), dtype=np.uint8),
... 2 * np.ones((2, 2), dtype=np.uint8)]
>>> order = [1, 0]
>>> montage(ims, order)
array([[2, 2, 0, 0],
[2, 2, 0, 0]], dtype=uint8)
"""
if order is None:
from .screens import cellomics
order = cellomics.SPIRAL_CLOCKWISE_RIGHT_25
order = np.atleast_2d(order)
# in case stream is passed, take one sip at a time ;)
ims = list(tlz.take(order.size, ims))
rows, cols = ims[0].shape[:2]
mrows, mcols = order.shape
montaged = np.zeros((rows * mrows, cols * mcols) + ims[0].shape[2:],
dtype=ims[0].dtype)
for i in range(mrows):
for j in range(mcols):
montaged[rows*i:rows*(i+1), cols*j:cols*(j+1)] = ims[order[i, j]]
return montaged
def find_missing_fields(fns, order=None,
re_string=r".*_[A-P]\d{2}f(\d{2})d0",
re_group=1):
"""Find which fields are missing from a list of files belonging to a well.
Given a list of image files, a stitch order, and a regex pattern
determining which part of the filename denotes the field, find out
which fields are missing.
Parameters
----------
fns : list of str
order : array-like of int, shape (M, N), optional
The order of the stitching, with each entry referring
to the index of file in the fns array.
re_string : str, optional
The regex pattern used to show where in the file the field is.
Default follows the Cellomics pattern eg.
MFGTMP_150406100001_A01f00d0.TIF where the field is the number
after "f".
re_group : int, optional
The regex group the field value belongs to. Default 1.
Returns
-------
missing : array of int
A possibly empty array containing the indices of missing fields.
"""
if order is None:
from .screens import cellomics
order = cellomics.SPIRAL_CLOCKWISE_RIGHT_25
# get fields present in list
pattern = re.compile(re_string)
fields = [int(re.match(pattern, fn).group(re_group)) for fn in fns]
# determine which fields are missing
missing = np.setdiff1d(order, fields)
return missing
def create_missing_mask(missing, order, rows=512, cols=512):
"""Create a binary mask for stitched images where fields are missing.
Given a list of missing fields, a stitch order, and the size of
the input images, create a binary mask with False values where
fields are missing. This is used to prevent missing fields from
upsetting feature computation on images where a field is missing.
Parameters
----------
missing : list of int, or empty list
The fields that are missing.
order : array-like of int, shape (M, N), optional
The order of the stitching, with each entry referring
to the index of file in the fns array.
rows : int, optional
The number of rows in the input images. Default 512.
cols : int, optional
The number of cols in the input images. Default 512.
Returns
-------
mask : array of bool, shape (P, Q)
A binary mask where False denotes a missing field.
"""
if order is None:
from .screens import cellomics
order = cellomics.SPIRAL_CLOCKWISE_RIGHT_25
order = np.atleast_2d(order)
mrows, mcols = order.shape
mask = np.ones((rows * mrows, cols * mcols),
dtype=bool)
for i in range(mrows):
for j in range(mcols):
if order[i, j] in missing:
mask[rows*i:rows*(i+1), cols*j:cols*(j+1)] = False
return mask
def montage_with_missing(fns, order=None):
"""Montage a list of images, replacing missing fields with dummy values.
The methods `montage` and `montage_stream` assume that image filenames
and image iterators passed to it are complete, and include the full set
images belonging to the well. Some screens have missing fields,
so this function can be used to montage together images with missing
fields. Missing fields are replaced with 0 values.
Missing fields are determined from the information in the image
file name. See 'find_missing_fields'
Parameters
----------
fns : list of str
The list of filenames to montage.
order : array-like of int, shape (M, N), optional
The order of the stitching, with each entry referring
to the index of file in the fns array.
Default cellomics.SPIRAL_CLOCKWISE_RIGHT_25
Returns
-------
montaged : array-like, shape (P, Q)
The montaged image.
mask : array of bool, shape (P, Q)
A binary mask, where entries with taking the value of
False represent missing fields in the montaged image.
missing : int
The number of fields that were found to be missing in the
input list of filenames. This is useful for normalising
features that depend on the entirety of the montaged image
(e.g. count of objects).
"""
if order is None:
from .screens import cellomics
order = cellomics.SPIRAL_CLOCKWISE_RIGHT_25
order = np.atleast_2d(order)
mrows, mcols = order.shape
# get width & height of first image. the rest of the images
# are assumed to be of the same shape
im0 = io.imread(fns[0])
rows, cols = im0.shape[:2]
# find which fields are missing
missing = find_missing_fields(fns, order)
# insert None value to list of files when fields missing
_fns = fns[:] # create copy of list to avoid referencing problems
for i in missing:
_fns.insert(i, None)
# create binary mask for the missing fields
mask = create_missing_mask(missing, order, rows, cols)
# instantiate array for output montaged image
montaged = np.zeros((rows * mrows, cols * mcols) + im0.shape[2:],
dtype=im0.dtype)
for i, j in it.product(range(mrows), range(mcols)):
index = order[i, j]
if _fns[index] is not None:
im = io.imread(_fns[index])
montaged[rows*i:rows*(i+1), cols*j:cols*(j+1)] = im
return montaged, mask, len(missing)
@tlz.curry
def reorder(index_list, list_to_reorder):
"""Curried function to reorder a list according to input indices.
Parameters
----------
index_list : list of int
The list of indices indicating where to put each element in the
input list.
list_to_reorder : list
The list being reordered.
Returns
-------
reordered_list : list
The reordered list.
Examples
--------
>>> list1 = ['foo', 'bar', 'baz']
>>> reorder([2, 0, 1], list1)
['baz', 'foo', 'bar']
"""
return [list_to_reorder[j] for j in index_list]
@tlz.curry
def stack_channels(images, order=[0, 1, 2]):
"""Stack multiple image files to one single, multi-channel image.
Parameters
----------
images : list of array, shape (M, N)
The images to be concatenated. List should contain
three images. Entries 'None' are considered to be dummy
channels
channel_order : list of int, optional
The order the channels should be in in the final image.
Returns
-------
stack_image : array, shape (M, N, 3)
The concatenated, three channel image.
Examples
--------
>>> image1 = np.ones((2, 2), dtype=int) * 1
>>> image2 = np.ones((2, 2), dtype=int) * 2
>>> joined = stack_channels((None, image1, image2))
>>> joined.shape
(2, 2, 3)
>>> joined[0, 0]
array([0, 1, 2])
>>> joined = stack_channels((image1, image2), order=[None, 0, 1])
>>> joined.shape
(2, 2, 3)
>>> joined[0, 0]
array([0, 1, 2])
"""
# ensure we support iterators
images = list(tlz.take(len(order), images))
# ensure we grab an image and not `None`
def is_array(obj): return isinstance(obj, np.ndarray)
image_prototype = next(filter(is_array, images))
# A `None` in `order` implies no image at that position
ordered_ims = [images[i] if i is not None else None for i in order]
ordered_ims = [np.zeros_like(image_prototype) if image is None else image
for image in ordered_ims]
# stack images with np.dstack, but if only a single channel is passed,
# don't add an extra dimension
stack_image = np.squeeze(np.dstack(ordered_ims))
while ordered_ims:
del ordered_ims[-1]
return stack_image
def montage_stream(ims, montage_order=None, channel_order=[0, 1, 2]):
"""From a sequence of single-channel field images, montage multichannels.
Suppose the input is a list:
```
ims = [green1a, blue1a, red1a, green1b, blue1b, red1b,
green2a, blue2a, red2a, green2b, blue2b, red2b]
```
with channel order ``[2, 0, 1]`` and montage order ``[1, 0]``, then
the output will be:
```
[rgb1_ba, rgb2_ba]
```
Parameters
----------
ims : iterator of array, shape (M, N)
A list of images in which consecutive images represent single
channels of the same image. (See example.)
montage_order : array-like of int, optional
The order of the montage images (in 1D or 2D).
channel_order : list of int, optional
The order in which the channels appear.
Returns
-------
montaged_stream : iterator of arrays
An iterator of the images composed into multi-channel montages.
Examples
--------
>>> images = (i * np.ones((4, 5), dtype=np.uint8) for i in range(24))
>>> montaged = list(montage_stream(images, [[0, 1], [2, 3]], [2, 0, 1]))
>>> len(montaged)
2
>>> montaged[0].shape
(8, 10, 3)
>>> montaged[0][0, 0, :]
array([2, 0, 1], dtype=uint8)
>>> montaged[0][4, 5, :]
array([11, 9, 10], dtype=uint8)
>>> montaged[1][4, 5, :]
array([23, 21, 22], dtype=uint8)
"""
if montage_order is None:
from .screens import cellomics
montage_order = cellomics.SPIRAL_CLOCKWISE_RIGHT_25
montage_order = np.array(montage_order)
ntiles = montage_order.size
nchannels = len(channel_order)
montage_ = fun.partial(montage, order=montage_order)
return tlz.pipe(ims, curried.partition(nchannels),
curried.map(stack_channels(order=channel_order)),
curried.partition(ntiles),
curried.map(montage_))
| {
"repo_name": "starcalibre/microscopium",
"path": "microscopium/preprocess.py",
"copies": "1",
"size": "36099",
"license": "bsd-3-clause",
"hash": 8583527495127033000,
"line_mean": 31.8172727273,
"line_max": 218,
"alpha_frac": 0.6067481094,
"autogenerated": false,
"ratio": 3.6233062330623307,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47300543424623304,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import os
import tempfile
import numpy as np
from microscopium import preprocess as pre
from microscopium import io as mio
import pytest
import warnings
@pytest.fixture
def image_files(request):
# for clarity we define images as integer arrays in [0, 11) and
# divide by 10 later
i = np.array([[ 7, 4, 1, 1, 0],
[ 2, 5, 9, 6, 7],
[ 2, 3, 3, 8, 5],
[ 3, 0, 1, 7, 5],
[ 6, 0, 10, 1, 6]], np.uint8)
j = np.array([[ 1, 10, 0, 9, 0],
[ 3, 10, 4, 1, 1],
[ 4, 10, 0, 7, 4],
[ 9, 3, 2, 0, 7],
[ 1, 3, 3, 9, 3]], np.uint8)
k = np.array([[ 9, 1, 7, 7, 3],
[ 9, 1, 6, 2, 2],
[ 2, 8, 2, 0, 3],
[ 4, 3, 8, 9, 10],
[ 6, 0, 2, 3, 10]], np.uint8)
files = []
for im in [i, j, k]:
f, fn = tempfile.mkstemp(suffix='.png')
files.append(fn)
mio.imsave(fn, im)
def cleanup():
for fn in files:
os.remove(fn)
request.addfinalizer(cleanup)
return files
def test_illumination_mean(image_files):
illum = pre.find_background_illumination(image_files, radius=1,
quantile=0.5,
stretch_quantile=1e-7,
method='mean')
illum_true = np.array([[5.33, 5.33, 4.67, 1.67, 1.67],
[3.67, 6.67, 2.67, 4.33, 3.00],
[6.67, 3.00, 4.33, 3.00, 5.33],
[2.67, 2.67, 2.67, 6.67, 6.00],
[3.33, 2.00, 2.33, 6.33, 7.33]]) / 10
np.testing.assert_array_almost_equal(illum, illum_true, decimal=1)
def test_illumination_median(image_files):
illum = pre.find_background_illumination(image_files, radius=1,
quantile=0.5,
stretch_quantile=1e-7,
method='median')
illum_true = np.array([[ 4., 5., 4., 1., 1.],
[ 4., 6., 2., 4., 2.],
[ 8., 3., 4., 2., 7.],
[ 3., 3., 3., 7., 6.],
[ 3., 3., 3., 7., 7.]]) / 10
np.testing.assert_array_almost_equal(illum, illum_true, decimal=1)
def conv(im):
return np.round(np.clip(im, 0, np.inf) * 255).astype(np.uint8)
@pytest.fixture
def image_files_noise(request):
"""Three sham images; one has no signal, one has an intensity artifact."""
r = np.random.RandomState(0)
shape = (5, 5)
# no signal
i = conv(0.01 * np.ones(shape, dtype=float) + 0.005 * r.randn(*shape))
# normal image
j = conv(0.5 * r.rand(*shape))
# blown-out corner
k = 0.5 * r.rand(*shape)
k[3:, 3:] = 1.0
k = conv(k)
files = []
for im in [i, j, k]:
f, fn = tempfile.mkstemp(suffix='.png')
files.append(fn)
mio.imsave(fn, im)
def cleanup():
for fn in files:
os.remove(fn)
request.addfinalizer(cleanup)
illum = 0.01 * np.ones(shape, dtype=float)
return files, illum
def test_correct_multiimage_illum(image_files_noise):
files, illum = image_files_noise
with mio.temporary_file('.tif') as out_fn:
ims = pre.correct_multiimage_illumination(files, illum, (2 / 25), 0)
i, j, k = list(ims)
# 1. check noise is not blown out in i
assert not np.any(i > 10)
# 2. check blown out corner in k has not suppressed all other values
assert np.median(k) > 100
cellomics_pattern = "MFGTMP_150406100001_A01f{0:02d}d0.TIF"
missing_test_fns = [
([cellomics_pattern.format(i) for i in range(25)], []),
([cellomics_pattern.format(i) for i in range(25)], [1, 13])
]
# delete "images" with fields 1 and 13 from second set of
# image filesnames
missing_test_fns[1][0].remove(cellomics_pattern.format(1))
missing_test_fns[1][0].remove(cellomics_pattern.format(13))
@pytest.mark.parametrize("fns, expected", missing_test_fns)
def test_find_missing_fields(fns, expected):
actual = pre.find_missing_fields(fns)
np.testing.assert_array_equal(actual, expected)
# create a list of parameters for testing the create missing mask files
# each entry in the tuple represents the fields: missing, order, rows, cols
# and expected (the expected output from the function)
missing_mask_test = [
([], [[0, 1, 2]], 10, 5, np.ones((10, 15), dtype=np.bool)),
([0, 5], [[0, 1, 2], [4, 5, 6]], 5, 10, np.ones((10, 30), dtype=np.bool)),
([3, 4], [[0, 1], [2, 3], [4, 5]], 10, 5, np.ones((30, 10), dtype=np.bool))
]
# insert False to missing areas of expected output
missing_mask_test[1][4][0:5, 0:10] = False
missing_mask_test[1][4][5:10, 10:20] = False
missing_mask_test[2][4][10:20, 5:10] = False
missing_mask_test[2][4][20:30, 0:5] = False
# pass the set of list parameters to the test_create_missing_mask
# function. the test wil run against every of parameters in the
# missing_mask_test list
@pytest.mark.parametrize("missing, order, rows, cols, expected",
missing_mask_test)
def test_create_missing_mask(missing, order, rows, cols, expected):
actual = pre.create_missing_mask(missing, order, rows, cols)
np.testing.assert_array_equal(actual, expected)
@pytest.fixture
def test_image_files_montage(request):
def make_test_montage_files(missing_fields):
shape = (2, 2)
fields = list(range(0, 25))
for missing_field in missing_fields:
fields.remove(missing_field)
ims = [np.ones(shape, np.uint8) * i for i in fields]
files = []
for field, im in zip(fields, ims):
prefix = "MFGTMP_140206180002_A01f{0:02d}d0".format(field)
f, fn = tempfile.mkstemp(prefix=prefix, suffix=".tif")
files.append(fn)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
mio.imsave(fn, im)
def cleanup():
for file in files:
os.remove(file)
request.addfinalizer(cleanup)
return files
return make_test_montage_files
def test_montage_with_missing(test_image_files_montage):
files = test_image_files_montage(missing_fields=[20])
montage, mask, number_missing = pre.montage_with_missing(files)
expect_montage = np.array([[0, 0, 21, 21, 22, 22, 23, 23, 24, 24],
[0, 0, 21, 21, 22, 22, 23, 23, 24, 24],
[19, 19, 6, 6, 7, 7, 8, 8, 9, 9],
[19, 19, 6, 6, 7, 7, 8, 8, 9, 9],
[18, 18, 5, 5, 0, 0, 1, 1, 10, 10],
[18, 18, 5, 5, 0, 0, 1, 1, 10, 10],
[17, 17, 4, 4, 3, 3, 2, 2, 11, 11],
[17, 17, 4, 4, 3, 3, 2, 2, 11, 11],
[16, 16, 15, 15, 14, 14, 13, 13, 12, 12],
[16, 16, 15, 15, 14, 14, 13, 13, 12, 12]],
np.uint8)
np.testing.assert_array_equal(expect_montage, montage)
def test_montage_with_missing_mask(test_image_files_montage):
files = test_image_files_montage(missing_fields=[3, 8])
montage, mask, number_missing = pre.montage_with_missing(files)
expected_mask = np.ones((10, 10), np.bool)
expected_mask[6:8, 4:6] = False
expected_mask[2:4, 6:8] = False
np.testing.assert_array_equal(expected_mask, mask)
def test_montage_with_missing_number_missing(test_image_files_montage):
files = test_image_files_montage(missing_fields=[10, 11, 12])
montage, mask, number_missing = pre.montage_with_missing(files)
assert number_missing == 3
if __name__ == '__main__':
pytest.main()
| {
"repo_name": "starcalibre/microscopium",
"path": "tests/test_pre.py",
"copies": "1",
"size": "8026",
"license": "bsd-3-clause",
"hash": 9149242427884415000,
"line_mean": 34.6711111111,
"line_max": 79,
"alpha_frac": 0.5254173935,
"autogenerated": false,
"ratio": 3.097645696642223,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41230630901422227,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import os
import traceback
import sys
import math
import urwid
from netlib import odict
from netlib.http import CONTENT_MISSING, Headers
from . import common, grideditor, signals, searchable, tabs
from . import flowdetailview
from .. import utils, controller, contentviews
from ..models import HTTPRequest, HTTPResponse, decoded
from ..exceptions import ContentViewException
class SearchError(Exception):
pass
def _mkhelp():
text = []
keys = [
("A", "accept all intercepted flows"),
("a", "accept this intercepted flow"),
("b", "save request/response body"),
("d", "delete flow"),
("D", "duplicate flow"),
("e", "edit request/response"),
("f", "load full body data"),
("m", "change body display mode for this entity"),
(None,
common.highlight_key("automatic", "a") +
[("text", ": automatic detection")]
),
(None,
common.highlight_key("hex", "e") +
[("text", ": Hex")]
),
(None,
common.highlight_key("html", "h") +
[("text", ": HTML")]
),
(None,
common.highlight_key("image", "i") +
[("text", ": Image")]
),
(None,
common.highlight_key("javascript", "j") +
[("text", ": JavaScript")]
),
(None,
common.highlight_key("json", "s") +
[("text", ": JSON")]
),
(None,
common.highlight_key("urlencoded", "u") +
[("text", ": URL-encoded data")]
),
(None,
common.highlight_key("raw", "r") +
[("text", ": raw data")]
),
(None,
common.highlight_key("xml", "x") +
[("text", ": XML")]
),
("M", "change default body display mode"),
("p", "previous flow"),
("P", "copy response(content/headers) to clipboard"),
("r", "replay request"),
("V", "revert changes to request"),
("v", "view body in external viewer"),
("w", "save all flows matching current limit"),
("W", "save this flow"),
("x", "delete body"),
("z", "encode/decode a request/response"),
("tab", "next tab"),
("h, l", "previous tab, next tab"),
("space", "next flow"),
("|", "run script on this flow"),
("/", "search (case sensitive)"),
("n", "repeat search forward"),
("N", "repeat search backwards"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
return text
help_context = _mkhelp()
footer = [
('heading_key', "?"), ":help ",
('heading_key', "q"), ":back ",
]
class FlowViewHeader(urwid.WidgetWrap):
def __init__(self, master, f):
self.master, self.flow = master, f
self._w = common.format_flow(
f,
False,
extended=True,
padding=0,
hostheader=self.master.showhost
)
signals.flow_change.connect(self.sig_flow_change)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self._w = common.format_flow(
flow,
False,
extended=True,
padding=0,
hostheader=self.master.showhost
)
cache = utils.LRUCache(200)
TAB_REQ = 0
TAB_RESP = 1
class FlowView(tabs.Tabs):
highlight_color = "focusfield"
def __init__(self, master, state, flow, tab_offset):
self.master, self.state, self.flow = master, state, flow
tabs.Tabs.__init__(self,
[
(self.tab_request, self.view_request),
(self.tab_response, self.view_response),
(self.tab_details, self.view_details),
],
tab_offset
)
self.show()
self.last_displayed_body = None
signals.flow_change.connect(self.sig_flow_change)
def tab_request(self):
if self.flow.intercepted and not self.flow.reply.acked and not self.flow.response:
return "Request intercepted"
else:
return "Request"
def tab_response(self):
if self.flow.intercepted and not self.flow.reply.acked and self.flow.response:
return "Response intercepted"
else:
return "Response"
def tab_details(self):
return "Detail"
def view_request(self):
return self.conn_text(self.flow.request)
def view_response(self):
return self.conn_text(self.flow.response)
def view_details(self):
return flowdetailview.flowdetails(self.state, self.flow)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self.show()
def content_view(self, viewmode, message):
if message.content == CONTENT_MISSING:
msg, body = "", [urwid.Text([("error", "[content missing]")])]
return msg, body
else:
full = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
False
)
if full:
limit = sys.maxsize
else:
limit = contentviews.VIEW_CUTOFF
return cache.get(
self._get_content_view,
viewmode,
message,
limit,
(bytes(message.headers), message.content) # Cache invalidation
)
def _get_content_view(self, viewmode, message, max_lines, _):
try:
description, lines = contentviews.get_content_view(
viewmode, message.content, headers=message.headers
)
except ContentViewException:
s = "Content viewer failed: \n" + traceback.format_exc()
signals.add_event(s, "error")
description, lines = contentviews.get_content_view(
contentviews.get("Raw"), message.content, headers=message.headers
)
description = description.replace("Raw", "Couldn't parse: falling back to Raw")
# Give hint that you have to tab for the response.
if description == "No content" and isinstance(message, HTTPRequest):
description = "No request content (press tab to view response)"
# If the users has a wide terminal, he gets fewer lines; this should not be an issue.
chars_per_line = 80
max_chars = max_lines * chars_per_line
total_chars = 0
text_objects = []
for line in lines:
txt = []
for (style, text) in line:
if total_chars + len(text) > max_chars:
text = text[:max_chars-total_chars]
txt.append((style, text))
total_chars += len(text)
if total_chars == max_chars:
break
# round up to the next line.
total_chars = int(math.ceil(total_chars / chars_per_line) * chars_per_line)
text_objects.append(urwid.Text(txt))
if total_chars == max_chars:
text_objects.append(urwid.Text([
("highlight", "Stopped displaying data after %d lines. Press " % max_lines),
("key", "f"),
("highlight", " to load all data.")
]))
break
return description, text_objects
def viewmode_get(self):
override = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "prettyview")
)
return self.state.default_body_view if override is None else override
def conn_text(self, conn):
if conn:
txt = common.format_keyvals(
[(h + ":", v) for (h, v) in conn.headers.fields],
key = "header",
val = "text"
)
viewmode = self.viewmode_get()
msg, body = self.content_view(viewmode, conn)
cols = [
urwid.Text(
[
("heading", msg),
]
),
urwid.Text(
[
" ",
('heading', "["),
('heading_key', "m"),
('heading', (":%s]" % viewmode.name)),
],
align="right"
)
]
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
txt.append(title)
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
)
]
return searchable.Searchable(self.state, txt)
def set_method_raw(self, m):
if m:
self.flow.request.method = m
signals.flow_change.send(self, flow = self.flow)
def edit_method(self, m):
if m == "e":
signals.status_prompt.send(
prompt = "Method",
text = self.flow.request.method,
callback = self.set_method_raw
)
else:
for i in common.METHOD_OPTIONS:
if i[1] == m:
self.flow.request.method = i[0].upper()
signals.flow_change.send(self, flow = self.flow)
def set_url(self, url):
request = self.flow.request
try:
request.url = str(url)
except ValueError:
return "Invalid URL."
signals.flow_change.send(self, flow = self.flow)
def set_resp_code(self, code):
response = self.flow.response
try:
response.status_code = int(code)
except ValueError:
return None
import BaseHTTPServer
if int(code) in BaseHTTPServer.BaseHTTPRequestHandler.responses:
response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[
int(code)][0]
signals.flow_change.send(self, flow = self.flow)
def set_resp_msg(self, msg):
response = self.flow.response
response.msg = msg
signals.flow_change.send(self, flow = self.flow)
def set_headers(self, fields, conn):
conn.headers = Headers(fields)
signals.flow_change.send(self, flow = self.flow)
def set_query(self, lst, conn):
conn.set_query(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def set_path_components(self, lst, conn):
conn.set_path_components(lst)
signals.flow_change.send(self, flow = self.flow)
def set_form(self, lst, conn):
conn.set_form_urlencoded(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def edit_form(self, conn):
self.master.view_grideditor(
grideditor.URLEncodedFormEditor(
self.master,
conn.get_form_urlencoded().lst,
self.set_form,
conn
)
)
def edit_form_confirm(self, key, conn):
if key == "y":
self.edit_form(conn)
def set_cookies(self, lst, conn):
od = odict.ODict(lst)
conn.set_cookies(od)
signals.flow_change.send(self, flow = self.flow)
def set_setcookies(self, data, conn):
conn.set_cookies(data)
signals.flow_change.send(self, flow = self.flow)
def edit(self, part):
if self.tab_offset == TAB_REQ:
message = self.flow.request
else:
if not self.flow.response:
self.flow.response = HTTPResponse(
self.flow.request.http_version,
200, "OK", Headers(), ""
)
self.flow.response.reply = controller.DummyReply()
message = self.flow.response
self.flow.backup()
if message == self.flow.request and part == "c":
self.master.view_grideditor(
grideditor.CookieEditor(
self.master,
message.get_cookies().lst,
self.set_cookies,
message
)
)
if message == self.flow.response and part == "c":
self.master.view_grideditor(
grideditor.SetCookieEditor(
self.master,
message.get_cookies(),
self.set_setcookies,
message
)
)
if part == "r":
with decoded(message):
# Fix an issue caused by some editors when editing a
# request/response body. Many editors make it hard to save a
# file without a terminating newline on the last line. When
# editing message bodies, this can cause problems. For now, I
# just strip the newlines off the end of the body when we return
# from an editor.
c = self.master.spawn_editor(message.content or "")
message.content = c.rstrip("\n")
elif part == "f":
if not message.get_form_urlencoded() and message.content:
signals.status_prompt_onekey.send(
prompt = "Existing body is not a URL-encoded form. Clear and edit?",
keys = [
("yes", "y"),
("no", "n"),
],
callback = self.edit_form_confirm,
args = (message,)
)
else:
self.edit_form(message)
elif part == "h":
self.master.view_grideditor(
grideditor.HeaderEditor(
self.master,
message.headers.fields,
self.set_headers,
message
)
)
elif part == "p":
p = message.get_path_components()
self.master.view_grideditor(
grideditor.PathEditor(
self.master,
p,
self.set_path_components,
message
)
)
elif part == "q":
self.master.view_grideditor(
grideditor.QueryEditor(
self.master,
message.get_query().lst,
self.set_query, message
)
)
elif part == "u":
signals.status_prompt.send(
prompt = "URL",
text = message.url,
callback = self.set_url
)
elif part == "m":
signals.status_prompt_onekey.send(
prompt = "Method",
keys = common.METHOD_OPTIONS,
callback = self.edit_method
)
elif part == "o":
signals.status_prompt.send(
prompt = "Code",
text = str(message.status_code),
callback = self.set_resp_code
)
elif part == "m":
signals.status_prompt.send(
prompt = "Message",
text = message.msg,
callback = self.set_resp_msg
)
signals.flow_change.send(self, flow = self.flow)
def _view_nextprev_flow(self, np, flow):
try:
idx = self.state.view.index(flow)
except IndexError:
return
if np == "next":
new_flow, new_idx = self.state.get_next(idx)
else:
new_flow, new_idx = self.state.get_prev(idx)
if new_flow is None:
signals.status_message.send(message="No more flows!")
else:
signals.pop_view_state.send(self)
self.master.view_flow(new_flow, self.tab_offset)
def view_next_flow(self, flow):
return self._view_nextprev_flow("next", flow)
def view_prev_flow(self, flow):
return self._view_nextprev_flow("prev", flow)
def change_this_display_mode(self, t):
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "prettyview"),
contentviews.get_by_shortcut(t)
)
signals.flow_change.send(self, flow = self.flow)
def delete_body(self, t):
if t == "m":
val = CONTENT_MISSING
else:
val = None
if self.tab_offset == TAB_REQ:
self.flow.request.content = val
else:
self.flow.response.content = val
signals.flow_change.send(self, flow = self.flow)
def keypress(self, size, key):
key = super(self.__class__, self).keypress(size, key)
if key == " ":
self.view_next_flow(self.flow)
return
key = common.shortcuts(key)
if self.tab_offset == TAB_REQ:
conn = self.flow.request
elif self.tab_offset == TAB_RESP:
conn = self.flow.response
else:
conn = None
if key in ("up", "down", "page up", "page down"):
# Why doesn't this just work??
self._w.keypress(size, key)
elif key == "a":
self.flow.accept_intercept(self.master)
signals.flow_change.send(self, flow = self.flow)
elif key == "A":
self.master.accept_all()
signals.flow_change.send(self, flow = self.flow)
elif key == "d":
if self.state.flow_count() == 1:
self.master.view_flowlist()
elif self.state.view.index(self.flow) == len(self.state.view) - 1:
self.view_prev_flow(self.flow)
else:
self.view_next_flow(self.flow)
f = self.flow
f.kill(self.master)
self.state.delete_flow(f)
elif key == "D":
f = self.master.duplicate_flow(self.flow)
self.master.view_flow(f)
signals.status_message.send(message="Duplicated.")
elif key == "p":
self.view_prev_flow(self.flow)
elif key == "r":
r = self.master.replay_request(self.flow)
if r:
signals.status_message.send(message=r)
signals.flow_change.send(self, flow = self.flow)
elif key == "V":
if not self.flow.modified():
signals.status_message.send(message="Flow not modified.")
return
self.state.revert(self.flow)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="Reverted.")
elif key == "W":
signals.status_prompt_path.send(
prompt = "Save this flow",
callback = self.master.save_one_flow,
args = (self.flow,)
)
elif key == "|":
signals.status_prompt_path.send(
prompt = "Send flow to script",
callback = self.master.run_script_once,
args = (self.flow,)
)
if not conn and key in set(list("befgmxvz")):
signals.status_message.send(
message = "Tab to the request or response",
expire = 1
)
elif conn:
if key == "b":
if self.tab_offset == TAB_REQ:
common.ask_save_body(
"q", self.master, self.state, self.flow
)
else:
common.ask_save_body(
"s", self.master, self.state, self.flow
)
elif key == "e":
if self.tab_offset == TAB_REQ:
signals.status_prompt_onekey.send(
prompt = "Edit request",
keys = (
("cookies", "c"),
("query", "q"),
("path", "p"),
("url", "u"),
("header", "h"),
("form", "f"),
("raw body", "r"),
("method", "m"),
),
callback = self.edit
)
else:
signals.status_prompt_onekey.send(
prompt = "Edit response",
keys = (
("cookies", "c"),
("code", "o"),
("message", "m"),
("header", "h"),
("raw body", "r"),
),
callback = self.edit
)
key = None
elif key == "f":
signals.status_message.send(message="Loading all body data...")
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
True
)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="")
elif key == "P":
if self.tab_offset == TAB_REQ:
scope = "q"
else:
scope = "s"
common.ask_copy_part(scope, self.flow, self.master, self.state)
elif key == "m":
p = list(contentviews.view_prompts)
p.insert(0, ("Clear", "C"))
signals.status_prompt_onekey.send(
self,
prompt = "Display mode",
keys = p,
callback = self.change_this_display_mode
)
key = None
elif key == "x":
signals.status_prompt_onekey.send(
prompt = "Delete body",
keys = (
("completely", "c"),
("mark as missing", "m"),
),
callback = self.delete_body
)
key = None
elif key == "v":
if conn.content:
t = conn.headers.get("content-type")
if "EDITOR" in os.environ or "PAGER" in os.environ:
self.master.spawn_external_viewer(conn.content, t)
else:
signals.status_message.send(
message = "Error! Set $EDITOR or $PAGER."
)
elif key == "z":
self.flow.backup()
e = conn.headers.get("content-encoding", "identity")
if e != "identity":
if not conn.decode():
signals.status_message.send(
message = "Could not decode - invalid data?"
)
else:
signals.status_prompt_onekey.send(
prompt = "Select encoding: ",
keys = (
("gzip", "z"),
("deflate", "d"),
),
callback = self.encode_callback,
args = (conn,)
)
signals.flow_change.send(self, flow = self.flow)
return key
def encode_callback(self, key, conn):
encoding_map = {
"z": "gzip",
"d": "deflate",
}
conn.encode(encoding_map[key])
signals.flow_change.send(self, flow = self.flow)
| {
"repo_name": "bazzinotti/mitmproxy",
"path": "libmproxy/console/flowview.py",
"copies": "2",
"size": "24132",
"license": "mit",
"hash": 1729532021034116400,
"line_mean": 33.5236051502,
"line_max": 96,
"alpha_frac": 0.4661445384,
"autogenerated": false,
"ratio": 4.394827900200328,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004482361422738261,
"num_lines": 699
} |
from __future__ import absolute_import, division
import os
import traceback
import sys
import math
import urwid
from netlib import odict
from netlib.http import Headers
from . import common, grideditor, signals, searchable, tabs
from . import flowdetailview
from .. import utils, controller, contentviews
from ..models import HTTPRequest, HTTPResponse, decoded
from ..exceptions import ContentViewException
class SearchError(Exception):
pass
def _mkhelp():
text = []
keys = [
("A", "accept all intercepted flows"),
("a", "accept this intercepted flow"),
("b", "save request/response body"),
("D", "duplicate flow"),
("d", "delete flow"),
("E", "export"),
("e", "edit request/response"),
("f", "load full body data"),
("m", "change body display mode for this entity"),
(None,
common.highlight_key("automatic", "a") +
[("text", ": automatic detection")]
),
(None,
common.highlight_key("hex", "e") +
[("text", ": Hex")]
),
(None,
common.highlight_key("html", "h") +
[("text", ": HTML")]
),
(None,
common.highlight_key("image", "i") +
[("text", ": Image")]
),
(None,
common.highlight_key("javascript", "j") +
[("text", ": JavaScript")]
),
(None,
common.highlight_key("json", "s") +
[("text", ": JSON")]
),
(None,
common.highlight_key("urlencoded", "u") +
[("text", ": URL-encoded data")]
),
(None,
common.highlight_key("raw", "r") +
[("text", ": raw data")]
),
(None,
common.highlight_key("xml", "x") +
[("text", ": XML")]
),
("M", "change default body display mode"),
("p", "previous flow"),
("P", "copy request/response (content/headers) to clipboard"),
("r", "replay request"),
("V", "revert changes to request"),
("v", "view body in external viewer"),
("w", "save all flows matching current limit"),
("W", "save this flow"),
("x", "delete body"),
("z", "encode/decode a request/response"),
("tab", "next tab"),
("h, l", "previous tab, next tab"),
("space", "next flow"),
("|", "run script on this flow"),
("/", "search (case sensitive)"),
("n", "repeat search forward"),
("N", "repeat search backwards"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
return text
help_context = _mkhelp()
footer = [
('heading_key', "?"), ":help ",
('heading_key', "q"), ":back ",
]
class FlowViewHeader(urwid.WidgetWrap):
def __init__(self, master, f):
self.master, self.flow = master, f
self._w = common.format_flow(
f,
False,
extended=True,
hostheader=self.master.showhost
)
signals.flow_change.connect(self.sig_flow_change)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self._w = common.format_flow(
flow,
False,
extended=True,
hostheader=self.master.showhost
)
cache = utils.LRUCache(200)
TAB_REQ = 0
TAB_RESP = 1
class FlowView(tabs.Tabs):
highlight_color = "focusfield"
def __init__(self, master, state, flow, tab_offset):
self.master, self.state, self.flow = master, state, flow
tabs.Tabs.__init__(self,
[
(self.tab_request, self.view_request),
(self.tab_response, self.view_response),
(self.tab_details, self.view_details),
],
tab_offset
)
self.show()
self.last_displayed_body = None
signals.flow_change.connect(self.sig_flow_change)
def tab_request(self):
if self.flow.intercepted and not self.flow.reply.acked and not self.flow.response:
return "Request intercepted"
else:
return "Request"
def tab_response(self):
if self.flow.intercepted and not self.flow.reply.acked and self.flow.response:
return "Response intercepted"
else:
return "Response"
def tab_details(self):
return "Detail"
def view_request(self):
return self.conn_text(self.flow.request)
def view_response(self):
return self.conn_text(self.flow.response)
def view_details(self):
return flowdetailview.flowdetails(self.state, self.flow)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self.show()
def content_view(self, viewmode, message):
if message.content is None:
msg, body = "", [urwid.Text([("error", "[content missing]")])]
return msg, body
else:
full = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
False
)
if full:
limit = sys.maxsize
else:
limit = contentviews.VIEW_CUTOFF
return cache.get(
self._get_content_view,
viewmode,
message,
limit,
(bytes(message.headers), message.content) # Cache invalidation
)
def _get_content_view(self, viewmode, message, max_lines, _):
try:
query = None
if isinstance(message, HTTPRequest):
query = message.query
description, lines = contentviews.get_content_view(
viewmode, message.content, headers=message.headers, query=query
)
except ContentViewException:
s = "Content viewer failed: \n" + traceback.format_exc()
signals.add_event(s, "error")
description, lines = contentviews.get_content_view(
contentviews.get("Raw"), message.content, headers=message.headers
)
description = description.replace("Raw", "Couldn't parse: falling back to Raw")
# Give hint that you have to tab for the response.
if description == "No content" and isinstance(message, HTTPRequest):
description = "No request content (press tab to view response)"
# If the users has a wide terminal, he gets fewer lines; this should not be an issue.
chars_per_line = 80
max_chars = max_lines * chars_per_line
total_chars = 0
text_objects = []
for line in lines:
txt = []
for (style, text) in line:
if total_chars + len(text) > max_chars:
text = text[:max_chars - total_chars]
txt.append((style, text))
total_chars += len(text)
if total_chars == max_chars:
break
# round up to the next line.
total_chars = int(math.ceil(total_chars / chars_per_line) * chars_per_line)
text_objects.append(urwid.Text(txt))
if total_chars == max_chars:
text_objects.append(urwid.Text([
("highlight", "Stopped displaying data after %d lines. Press " % max_lines),
("key", "f"),
("highlight", " to load all data.")
]))
break
return description, text_objects
def viewmode_get(self):
override = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "prettyview")
)
return self.state.default_body_view if override is None else override
def conn_text(self, conn):
if conn:
txt = common.format_keyvals(
[(h + ":", v) for (h, v) in conn.headers.fields],
key = "header",
val = "text"
)
viewmode = self.viewmode_get()
msg, body = self.content_view(viewmode, conn)
cols = [
urwid.Text(
[
("heading", msg),
]
),
urwid.Text(
[
" ",
('heading', "["),
('heading_key', "m"),
('heading', (":%s]" % viewmode.name)),
],
align="right"
)
]
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
txt.append(title)
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
)
]
return searchable.Searchable(self.state, txt)
def set_method_raw(self, m):
if m:
self.flow.request.method = m
signals.flow_change.send(self, flow = self.flow)
def edit_method(self, m):
if m == "e":
signals.status_prompt.send(
prompt = "Method",
text = self.flow.request.method,
callback = self.set_method_raw
)
else:
for i in common.METHOD_OPTIONS:
if i[1] == m:
self.flow.request.method = i[0].upper()
signals.flow_change.send(self, flow = self.flow)
def set_url(self, url):
request = self.flow.request
try:
request.url = str(url)
except ValueError:
return "Invalid URL."
signals.flow_change.send(self, flow = self.flow)
def set_resp_code(self, code):
response = self.flow.response
try:
response.status_code = int(code)
except ValueError:
return None
import BaseHTTPServer
if int(code) in BaseHTTPServer.BaseHTTPRequestHandler.responses:
response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[
int(code)][0]
signals.flow_change.send(self, flow = self.flow)
def set_resp_msg(self, msg):
response = self.flow.response
response.msg = msg
signals.flow_change.send(self, flow = self.flow)
def set_headers(self, fields, conn):
conn.headers = Headers(fields)
signals.flow_change.send(self, flow = self.flow)
def set_query(self, lst, conn):
conn.set_query(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def set_path_components(self, lst, conn):
conn.set_path_components(lst)
signals.flow_change.send(self, flow = self.flow)
def set_form(self, lst, conn):
conn.set_form_urlencoded(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def edit_form(self, conn):
self.master.view_grideditor(
grideditor.URLEncodedFormEditor(
self.master,
conn.get_form_urlencoded().lst,
self.set_form,
conn
)
)
def edit_form_confirm(self, key, conn):
if key == "y":
self.edit_form(conn)
def set_cookies(self, lst, conn):
od = odict.ODict(lst)
conn.set_cookies(od)
signals.flow_change.send(self, flow = self.flow)
def set_setcookies(self, data, conn):
conn.set_cookies(data)
signals.flow_change.send(self, flow = self.flow)
def edit(self, part):
if self.tab_offset == TAB_REQ:
message = self.flow.request
else:
if not self.flow.response:
self.flow.response = HTTPResponse(
self.flow.request.http_version,
200, "OK", Headers(), ""
)
self.flow.response.reply = controller.DummyReply()
message = self.flow.response
self.flow.backup()
if message == self.flow.request and part == "c":
self.master.view_grideditor(
grideditor.CookieEditor(
self.master,
message.get_cookies().lst,
self.set_cookies,
message
)
)
if message == self.flow.response and part == "c":
self.master.view_grideditor(
grideditor.SetCookieEditor(
self.master,
message.get_cookies(),
self.set_setcookies,
message
)
)
if part == "r":
with decoded(message):
# Fix an issue caused by some editors when editing a
# request/response body. Many editors make it hard to save a
# file without a terminating newline on the last line. When
# editing message bodies, this can cause problems. For now, I
# just strip the newlines off the end of the body when we return
# from an editor.
c = self.master.spawn_editor(message.content or "")
message.content = c.rstrip("\n")
elif part == "f":
if not message.get_form_urlencoded() and message.content:
signals.status_prompt_onekey.send(
prompt = "Existing body is not a URL-encoded form. Clear and edit?",
keys = [
("yes", "y"),
("no", "n"),
],
callback = self.edit_form_confirm,
args = (message,)
)
else:
self.edit_form(message)
elif part == "h":
self.master.view_grideditor(
grideditor.HeaderEditor(
self.master,
message.headers.fields,
self.set_headers,
message
)
)
elif part == "p":
p = message.get_path_components()
self.master.view_grideditor(
grideditor.PathEditor(
self.master,
p,
self.set_path_components,
message
)
)
elif part == "q":
self.master.view_grideditor(
grideditor.QueryEditor(
self.master,
message.get_query().lst,
self.set_query, message
)
)
elif part == "u":
signals.status_prompt.send(
prompt = "URL",
text = message.url,
callback = self.set_url
)
elif part == "m":
signals.status_prompt_onekey.send(
prompt = "Method",
keys = common.METHOD_OPTIONS,
callback = self.edit_method
)
elif part == "o":
signals.status_prompt.send(
prompt = "Code",
text = str(message.status_code),
callback = self.set_resp_code
)
elif part == "m":
signals.status_prompt.send(
prompt = "Message",
text = message.msg,
callback = self.set_resp_msg
)
signals.flow_change.send(self, flow = self.flow)
def _view_nextprev_flow(self, np, flow):
try:
idx = self.state.view.index(flow)
except IndexError:
return
if np == "next":
new_flow, new_idx = self.state.get_next(idx)
else:
new_flow, new_idx = self.state.get_prev(idx)
if new_flow is None:
signals.status_message.send(message="No more flows!")
else:
signals.pop_view_state.send(self)
self.master.view_flow(new_flow, self.tab_offset)
def view_next_flow(self, flow):
return self._view_nextprev_flow("next", flow)
def view_prev_flow(self, flow):
return self._view_nextprev_flow("prev", flow)
def change_this_display_mode(self, t):
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "prettyview"),
contentviews.get_by_shortcut(t)
)
signals.flow_change.send(self, flow = self.flow)
def delete_body(self, t):
if t == "m":
val = None
else:
val = None
if self.tab_offset == TAB_REQ:
self.flow.request.content = val
else:
self.flow.response.content = val
signals.flow_change.send(self, flow = self.flow)
def keypress(self, size, key):
key = super(self.__class__, self).keypress(size, key)
if key == " ":
self.view_next_flow(self.flow)
return
key = common.shortcuts(key)
if self.tab_offset == TAB_REQ:
conn = self.flow.request
elif self.tab_offset == TAB_RESP:
conn = self.flow.response
else:
conn = None
if key in ("up", "down", "page up", "page down"):
# Why doesn't this just work??
self._w.keypress(size, key)
elif key == "a":
self.flow.accept_intercept(self.master)
signals.flow_change.send(self, flow = self.flow)
elif key == "A":
self.master.accept_all()
signals.flow_change.send(self, flow = self.flow)
elif key == "d":
if self.state.flow_count() == 1:
self.master.view_flowlist()
elif self.state.view.index(self.flow) == len(self.state.view) - 1:
self.view_prev_flow(self.flow)
else:
self.view_next_flow(self.flow)
f = self.flow
f.kill(self.master)
self.state.delete_flow(f)
elif key == "D":
f = self.master.duplicate_flow(self.flow)
self.master.view_flow(f)
signals.status_message.send(message="Duplicated.")
elif key == "p":
self.view_prev_flow(self.flow)
elif key == "r":
r = self.master.replay_request(self.flow)
if r:
signals.status_message.send(message=r)
signals.flow_change.send(self, flow = self.flow)
elif key == "V":
if not self.flow.modified():
signals.status_message.send(message="Flow not modified.")
return
self.state.revert(self.flow)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="Reverted.")
elif key == "W":
signals.status_prompt_path.send(
prompt = "Save this flow",
callback = self.master.save_one_flow,
args = (self.flow,)
)
elif key == "E":
signals.status_prompt_onekey.send(
self,
prompt = "Export",
keys = (
("as curl command", "c"),
("as python code", "p"),
("as raw request", "r"),
("as locust code", "l"),
("as locust task", "t"),
),
callback = common.export_prompt,
args = (self.flow,)
)
elif key == "|":
signals.status_prompt_path.send(
prompt = "Send flow to script",
callback = self.master.run_script_once,
args = (self.flow,)
)
if not conn and key in set(list("befgmxvz")):
signals.status_message.send(
message = "Tab to the request or response",
expire = 1
)
elif conn:
if key == "b":
if self.tab_offset == TAB_REQ:
common.ask_save_body(
"q", self.master, self.state, self.flow
)
else:
common.ask_save_body(
"s", self.master, self.state, self.flow
)
elif key == "e":
if self.tab_offset == TAB_REQ:
signals.status_prompt_onekey.send(
prompt = "Edit request",
keys = (
("cookies", "c"),
("query", "q"),
("path", "p"),
("url", "u"),
("header", "h"),
("form", "f"),
("raw body", "r"),
("method", "m"),
),
callback = self.edit
)
else:
signals.status_prompt_onekey.send(
prompt = "Edit response",
keys = (
("cookies", "c"),
("code", "o"),
("message", "m"),
("header", "h"),
("raw body", "r"),
),
callback = self.edit
)
key = None
elif key == "f":
signals.status_message.send(message="Loading all body data...")
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
True
)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="")
elif key == "P":
if self.tab_offset == TAB_REQ:
scope = "q"
else:
scope = "s"
common.ask_copy_part(scope, self.flow, self.master, self.state)
elif key == "m":
p = list(contentviews.view_prompts)
p.insert(0, ("Clear", "C"))
signals.status_prompt_onekey.send(
self,
prompt = "Display mode",
keys = p,
callback = self.change_this_display_mode
)
key = None
elif key == "x":
signals.status_prompt_onekey.send(
prompt = "Delete body",
keys = (
("completely", "c"),
("mark as missing", "m"),
),
callback = self.delete_body
)
key = None
elif key == "v":
if conn.content:
t = conn.headers.get("content-type")
if "EDITOR" in os.environ or "PAGER" in os.environ:
self.master.spawn_external_viewer(conn.content, t)
else:
signals.status_message.send(
message = "Error! Set $EDITOR or $PAGER."
)
elif key == "z":
self.flow.backup()
e = conn.headers.get("content-encoding", "identity")
if e != "identity":
if not conn.decode():
signals.status_message.send(
message = "Could not decode - invalid data?"
)
else:
signals.status_prompt_onekey.send(
prompt = "Select encoding: ",
keys = (
("gzip", "z"),
("deflate", "d"),
),
callback = self.encode_callback,
args = (conn,)
)
signals.flow_change.send(self, flow = self.flow)
return key
def encode_callback(self, key, conn):
encoding_map = {
"z": "gzip",
"d": "deflate",
}
conn.encode(encoding_map[key])
signals.flow_change.send(self, flow = self.flow)
| {
"repo_name": "fimad/mitmproxy",
"path": "mitmproxy/console/flowview.py",
"copies": "1",
"size": "24703",
"license": "mit",
"hash": -5024280633010985000,
"line_mean": 33.501396648,
"line_max": 96,
"alpha_frac": 0.4631826094,
"autogenerated": false,
"ratio": 4.407314897413024,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5370497506813025,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import os.path as op
from click.testing import CliRunner
from cooler.cli import cli
testdir = op.realpath(op.dirname(__file__))
datadir = op.join(testdir, "data")
def test_cp():
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(
cli,
[
"cp",
op.join(datadir, "toy.symm.upper.2.cool"),
'test.cool',
]
)
assert result.exit_code == 0
result = runner.invoke(
cli,
[
"mv",
'test.cool',
'test2.cool::some/path',
]
)
assert result.exit_code == 0
result = runner.invoke(
cli,
[
"ln",
'test2.cool::some/path',
'test2.cool::hard/link',
]
)
assert result.exit_code == 0
result = runner.invoke(
cli,
[
"ln", "-s",
'test2.cool::some/path',
'test2.cool::soft/link',
]
)
assert result.exit_code == 0
result = runner.invoke(
cli,
[
"ln", "-s",
'test2.cool::some/path',
'test3.cool::ext/link',
]
)
assert result.exit_code == 0
def test_list_coolers():
runner = CliRunner()
result = runner.invoke(
cli,
[
"ls",
op.join(datadir, "toy.symm.upper.2.cool"),
]
)
assert result.exit_code == 0
result = runner.invoke(
cli,
[
"ls", "-l",
op.join(datadir, "toy.symm.upper.2.cool"),
]
)
assert result.exit_code == 0
def test_tree():
runner = CliRunner()
result = runner.invoke(
cli,
[
"tree",
op.join(datadir, "toy.symm.upper.2.cool"),
]
)
assert result.exit_code == 0
def test_attrs():
runner = CliRunner()
result = runner.invoke(
cli,
[
"attrs",
op.join(datadir, "toy.symm.upper.2.cool"),
]
)
assert result.exit_code == 0
| {
"repo_name": "mirnylab/cooler",
"path": "tests/test_cli_fileops.py",
"copies": "1",
"size": "2289",
"license": "bsd-3-clause",
"hash": -1496149620054023200,
"line_mean": 20.3925233645,
"line_max": 58,
"alpha_frac": 0.4311926606,
"autogenerated": false,
"ratio": 3.9195205479452055,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9850713208545205,
"avg_score": 0,
"num_lines": 107
} |
from __future__ import absolute_import, division
import os.path
from math import sqrt
from random import random
import numpy as np
def euler(angle, axis):
cos_a = np.cos(angle)
sin_a = np.sin(angle)
if axis == 'x':
matrix = [[1, 0, 0], [0, cos_a, -sin_a], [0, sin_a, cos_a]]
elif axis == 'y':
matrix = [[cos_a, 0, sin_a], [0, 1, 0], [-sin_a, 0, cos_a]]
elif axis == 'z':
matrix = [[cos_a, -sin_a, 0], [sin_a, cos_a, 0], [0, 0, 1]]
else:
raise ValueError('Axis is not recognized.')
return np.asmatrix(matrix, dtype=np.float64)
def euler_to_rotmat(angles, order='zyz'):
rotmat = np.asmatrix(np.identity(3, dtype=np.float64))
for angle, axis in zip(angles, order):
rotmat *= euler(angle, axis)
return rotmat
def quat_to_rotmat(quaternions):
w = quaternions[:, 0]
x = quaternions[:, 1]
y = quaternions[:, 2]
z = quaternions[:, 3]
Nq = w**2 + x**2 + y**2 + z**2
s = np.zeros(Nq.shape, dtype=np.float64)
s[Nq > 0.0] = 2.0/Nq[Nq > 0.0]
s[Nq <= 0.0] = 0
X = x*s
Y = y*s
Z = z*s
rotmat = np.zeros((quaternions.shape[0],3,3), dtype=np.float64)
rotmat[:,0,0] = 1.0 - (y*Y + z*Z)
rotmat[:,0,1] = x*Y - w*Z
rotmat[:,0,2] = x*Z + w*Y
rotmat[:,1,0] = x*Y + w*Z
rotmat[:,1,1] = 1.0 - (x*X + z*Z)
rotmat[:,1,2] = y*Z - w*X
rotmat[:,2,0] = x*Z - w*Y
rotmat[:,2,1] = y*Z + w*X
rotmat[:,2,2] = 1.0 - (x*X + y*Y)
np.around(rotmat, decimals=8, out=rotmat)
return rotmat
def random_rotmat():
"""Return a random rotation matrix"""
s1 = 1
while s1 >= 1.0:
e1 = random() * 2 - 1
e2 = random() * 2 - 1
s1 = e1**2 + e2**2
s2 = 1
while s2 >= 1.0:
e3 = random() * 2 - 1
e4 = random() * 2 - 1
s2 = e3**2 + e4**2
q0 = e1
q1 = e2
q2 = e3 * sqrt((1 - s1)/s2 )
q3 = e4 * sqrt((1 - s1)/s2 )
quat = [[q0, q1, q2, q3]]
return quat_to_rotmat(np.asarray(quat))[0]
def proportional_orientations(angle):
# orientation sets available: name of file: (Norientations, degree)
rot_sets = {'E.npy': (1, 360.0),
'c48u1.npy': (24, 62.8),
'c600v.npy': (60, 44.48),
'c48n9.npy': (216, 36.47),
'c600vc.npy': (360, 27.78),
'c48u27.npy': (648, 20.83),
'c48u83.npy': (1992, 16.29),
'c48u181.npy': (4344, 12.29),
'c48n309.npy': (7416, 9.72),
'c48n527.npy': (12648, 8.17),
'c48u815.npy': (19560, 7.4),
'c48u1153.npy': (27672, 6.6),
'c48u1201.npy': (28824, 6.48),
'c48u1641.npy': (39384, 5.75),
'c48u2219.npy': (53256, 5.27),
'c48u2947.npy': (70728, 4.71),
'c48u3733.npy': (89592, 4.37),
'c48u4749.npy': (113976, 4.0),
'c48u5879.npy': (141096, 3.74),
'c48u7111.npy': (170664, 3.53),
'c48u8649.npy': (207576, 3.26),
}
# determine the apropiate set to use
smallestdiff = None
for s, n in rot_sets.iteritems():
alpha = n[1]
diff = abs(angle - alpha)
if diff < smallestdiff or smallestdiff is None:
smallestdiff = diff
fname = s
# read file
infile = os.path.join(os.path.dirname(__file__), 'data', fname)
quat_weights = np.load(infile)
quat = quat_weights[:, :4]
weights = quat_weights[:, -1]
alpha = rot_sets[fname][1]
return quat, weights, alpha
| {
"repo_name": "haddocking/powerfit",
"path": "powerfit/rotations.py",
"copies": "1",
"size": "3630",
"license": "apache-2.0",
"hash": -1741803934811992000,
"line_mean": 26.7099236641,
"line_max": 71,
"alpha_frac": 0.4859504132,
"autogenerated": false,
"ratio": 2.6632428466617757,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8605200381355864,
"avg_score": 0.008798575701182466,
"num_lines": 131
} |
from __future__ import absolute_import, division
import re
import traceback
from bs4 import BeautifulSoup
import dateutil.parser
from django.utils import timezone
import stripe
from actstream import action
from users import udemy_scraping
from users.coursera_scraping import CourseraDownloader
from users.edx_scraping import scrape_for_user
from users.models import Course, Quiz, Progress
from users.udemy_scraping import Session
class CourseraScraper:
def __init__(self, ):
from selenium import webdriver
from pyvirtualdisplay import Display
self.display = Display(visible=0, size=(1024, 768))
self.display.start()
self.driver = webdriver.Firefox()
self.courses = []
def login(self, EMAIL, PASSWORD):
try:
self.driver.get('https://www.coursera.org/account/signin')
email_field = self.driver.find_element_by_id("signin-email")
password_field = self.driver.find_element_by_id("signin-password")
email_field.send_keys(EMAIL)
password_field.send_keys(PASSWORD)
password_field.submit()
except:
pass
def get_courses(self):
try:
if self.driver.current_url != 'https://www.coursera.org/':
return [], [], [], [], [], [], [], "Incorrect Login"
soup = BeautifulSoup(self.driver.page_source)
users_courses = soup.select(
'#coursera-feed-tabs-current .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-name')
info_links = soup.select(
'#coursera-feed-tabs-current .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-links .internal-home')
dates = soup.select(
'#coursera-feed-tabs-current .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-progress .progress-label')
start_dates = dates[::2]
end_dates = dates[1::2]
course_ids = soup.select('#coursera-feed-tabs-current .coursera-dashboard-course-listing-box')
course_ids = map(lambda x: int(x.attrs['data-course-id']), course_ids)
image_links = soup.select(
'#coursera-feed-tabs-current .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-icon')
image_links = map(lambda x: x.attrs['src'], image_links)
return map(lambda x: x.contents[0].contents[0], users_courses), map(lambda x: x.contents[0].attrs['href'],
users_courses), map(
lambda x: x.attrs['href'],
info_links), map(lambda x: str(x.contents[0] + ' 2014'), start_dates), map(
lambda x: str(x.contents[0] + ' 2014'), end_dates), course_ids, image_links, None
except:
return [], [], [], [], [], [], [], None
def get_upcoming_courses(self):
try:
soup = BeautifulSoup(self.driver.page_source)
users_courses = soup.select(
'#coursera-feed-tabs-future .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-name')
info_links = soup.select(
'#coursera-feed-tabs-future .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-links .internal-home')
course_ids = soup.select('#coursera-feed-tabs-current .coursera-dashboard-course-listing-box')
course_ids = map(lambda x: int(x.attrs['data-course-id']), course_ids)
image_links = soup.select(
'#coursera-feed-tabs-future .coursera-dashboard-course-listing-box .coursera-dashboard-course-listing-box-icon')
image_links = map(lambda x: x.attrs['src'], image_links)
return map(lambda x: x.contents[0].contents[0], users_courses), map(lambda x: x.contents[0].attrs['href'],
users_courses), map(
lambda x: x.attrs['href'],
info_links), course_ids, image_links
except:
return [], [], [], [], []
def get_quiz_link(self, course, link):
if course.info_link:
self.driver.get('https://www.coursera.org/' + course.info_link)
soup = BeautifulSoup(self.driver.page_source)
p_description = soup.select('.coursera-course-content p:first')
if p_description:
course.description = p_description[0].contents[0]
self.driver.get(link)
soup = BeautifulSoup(self.driver.page_source)
link = soup.find('a', {'data-ab-user-convert': 'navclick_Quizzes'}, href=True)
if not link:
link = soup.find('a', {'data-ab-user-convert': 'navclick_Homework_Quizzes'}, href=True)
if not link:
link = soup.find('a', {'data-ab-user-convert': 'navclick_Data_Sets_/_Quizzes'}, href=True)
if not link:
link = soup.find('a', {'data-ab-user-convert': 'navclick_Review_Questions'}, href=True)
if not link:
link = soup.find('a', {'data-ab-user-convert': 'navclick_Homework'}, href=True)
if link and link['href'] and link['href'] != '':
if link['href'].startswith('/'):
link['href'] = 'https://class.coursera.org' + link['href']
course.quiz_link = link['href']
self.driver.get(link['href'])
soup = BeautifulSoup(self.driver.page_source)
quiz_list = soup.select('div.course-item-list .course-item-list-header')
quiz_details = soup.select('ul.course-item-list-section-list')
for i, quiz_coursera in enumerate(quiz_list):
heading = quiz_coursera.select('h3')[0].find(text=True, recursive=False)
deadline = None
try:
deadline = dateutil.parser.parse(str(
quiz_details[i].select('.course-quiz-item-softdeadline .course-assignment-deadline')[
0].contents[
0].replace('\n', '')))
except IndexError:
pass
hard_deadline = None
try:
hard_deadline = dateutil.parser.parse(quiz_details[i].select(
'.course-quiz-item-harddeadline .course-assignment-deadline')[0].contents[
0].replace('\n', ''))
except IndexError:
pass
if hard_deadline is None:
hard_deadline = timezone.now()
if deadline is None:
deadline = hard_deadline
try:
Quiz.objects.get(heading=heading, course=course)
except Quiz.DoesNotExist:
Quiz.objects.create(heading=heading,
deadline=deadline,
hard_deadline=hard_deadline,
course=course)
course.save()
def get_course_progress(self, user, course):
if course.quiz_link and course.quiz_link != '':
self.driver.get(course.quiz_link)
soup = BeautifulSoup(self.driver.page_source)
quiz_list = soup.select('div.course-item-list .course-item-list-header')
quiz_details = soup.select('ul.course-item-list-section-list')
for i, quiz_coursera in enumerate(quiz_list):
try:
quiz = Quiz.objects.get(heading=quiz_coursera.select('h3')[0].find(text=True, recursive=False),
course=course)
try:
progress = Progress.objects.get(quiz=quiz, user=user.userprofile)
except Progress.DoesNotExist:
progress = Progress.objects.create(quiz=quiz, user=user.userprofile)
progress.score = quiz_details[i].select(
'.course-quiz-item-score td span')[0].contents[0]
progress.save()
except Quiz.DoesNotExist:
print "Not found"
def get_course_completion(self, courseraprofile, pledges):
self.driver.get('https://www.coursera.org/account/records')
btn = self.driver.find_element_by_xpath(
'//*[@id="origami"]/div/div/div[1]/div[3]/div[1]/div/div[2]/div[1]/div/a')
if btn.is_displayed():
btn.click()
soup = BeautifulSoup(self.driver.page_source)
course_ids = soup.select('.coursera-records-course-listing-box')
course_ids = map(lambda x: int(x.attrs['data-course-id']), course_ids)
grades = soup.select(
'.coursera-course-records-listings-without .coursera-course-listing-grade-section div[class~=hide] span')[
::2]
for i, grade in enumerate(grades):
if str(course_ids[i]) not in courseraprofile.counted_as_completed.split(','):
your_pledges = pledges.filter(course__course_id=course_ids[i])
for pledge in your_pledges:
mark = re.findall("\d+.\d+", grade.contents[0])
if mark:
pledge.actual_mark = float(mark[0]) / 100
pledge.is_complete = True
pledge.complete_date = timezone.now()
if pledge.is_active and pledge.charge != "":
if pledge.actual_mark > pledge.aim:
charge = stripe.Charge.retrive(pledge.charge)
charge.refund()
pledge.save()
if str(courseraprofile.counted_as_completed) != '':
courseraprofile.counted_as_completed += ',' + str(course_ids[i])
else:
courseraprofile.counted_as_completed += str(course_ids[i])
courseraprofile.save()
def end(self):
self.driver.close()
self.display.stop()
# def get_coursera_courses(profile):
# scraper = CourseraScraper()
# try:
# if str(profile.username) != '':
# print profile.username
# scraper.driver.implicitly_wait(5)
# scraper.login(str(profile.username), str(profile.password))
# scraper.driver.set_page_load_timeout(3)
# scraper.driver.set_script_timeout(5)
# time.sleep(3)
# courses, course_links, internal_links, start_dates, end_dates, course_ids, image_links, error = scraper.get_courses()
# if error is not None:
# profile.incorrect_login = True
# profile.last_updated = timezone.now()
# profile.save()
# scraper.end()
# return
# else:
# profile.incorrect_login = False
# print courses, image_links
# django_courses = []
# try:
# for i, course in enumerate(courses):
# try:
# get_course = Course.objects.get(title=course)
# if get_course.start_date is None:
# get_course.course_link = course_links[i]
# get_course.info_link = internal_links[i]
# get_course.course_id = course_ids[i]
# get_course.image_link = image_links[i]
# get_course.start_date = datetime.strptime(
# start_dates[i].replace('th', '').replace('st', '').replace('nd', '').replace('rd', ''),
# "%b %d %Y").date()
# get_course.end_date = datetime.strptime(
# end_dates[i].replace('th', '').replace('st', '').replace('nd', '').replace('rd', ''),
# "%b %d %Y").date()
# get_course.save()
# except Course.DoesNotExist:
# get_course = Course.objects.create(title=course, course_link=course_links[i],
# course_id=course_ids[i],
# info_link=internal_links[i], start_date=
# datetime.strptime(
# str(start_dates[i].replace('th', '').replace('st', '').replace('nd', '').replace('rd',
# '')),
# '%b %d %Y').date(),
# end_date=datetime.strptime(str(
# end_dates[i].replace('th', '').replace('st', '').replace(
# 'nd',
# '').replace(
# 'rd', '')), '%b %d %Y').date(),
# image_link=image_links[i])
# profile.courses.add(get_course)
# django_courses.append(get_course)
# except IndexError:
# pass
# except Exception as e:
# print e, "Inside"
# f_courses, f_course_links, f_internal_links, f_course_ids, f_image_links = scraper.get_upcoming_courses()
# print f_courses
# try:
# for i, course in enumerate(f_courses):
# try:
# get_course = Course.objects.get(title=course)
# except Course.DoesNotExist:
# get_course = Course.objects.create(title=course, course_link=f_course_links[i],
# course_id=f_course_ids[i],
# image_link=f_image_links[i])
# profile.courses.add(get_course)
# except IndexError:
# pass
# except Exception as e:
# print e, "Inside"
# profile.last_updated = timezone.now()
# profile.save()
# for i, course in enumerate(django_courses):
# get_course = course
# if get_course.end_date >= timezone.now().date():
# scraper.get_quiz_link(get_course, course_links[i])
# scraper.get_course_progress(profile.user, get_course)
# scraper.get_course_completion(profile,
# Pledge.objects.filter(user=profile.user.userprofile, is_complete=False))
#
# except Exception as e:
# print e
# finally:
# print "Coursera Done"
# scraper.end()
def get_coursera_courses(profile):
try:
if profile.username != "":
coursera = CourseraDownloader(profile.username, profile.password)
coursera.login('gamification-003', profile)
coursera.get_enrollments(profile)
print "Coursera Done"
except Exception as e:
profile.incorrect_login = True
profile.save()
print traceback.format_exc()
def get_edx_courses(edxprofile):
if edxprofile.email != '':
scrape_for_user(edxprofile)
else:
return
def get_udemy_courses(profile):
try:
print profile.user.email
session = Session(profile.email, profile.password)
print "Trying Udemy"
r = session.login()
if r:
print "logged into udemy"
courses = session.get_list_courses()
for course_id in courses:
course_dict = udemy_scraping.get_course(course_id)
try:
course = Course.objects.get(course_id=course_id)
if course not in profile.courses.all():
profile.courses.add(course)
#todo: added feed check
#action.send(actor=profile.user.userprofile, verb='enrolled in', target=course, sender=None)
except Course.DoesNotExist:
image_url = course_dict['images']['img_75x75']
title = course_dict['title']
try:
try:
description = re.sub('<[^>]*>', '', course_dict['promoAsset']['description'])
except:
description = ""
course_url = course_dict['url']
course = Course.objects.create(course_id=course_id, title=title,
course_link=course_url, description=description,
quiz_link='https://www.udemy.com/api-1.1/courses/' + course_id +
'/curriculum',
image_link=image_url)
profile.courses.add(course)
profile.last_updated = timezone.now()
profile.save()
except:
print traceback.format_exc()
continue
#todo: added feed check
#action.send(actor=profile.user.userprofile, verb='enrolled in', target=course, sender=None)
#action.send(actor=profile.user_profile, verb='enrolled in', target=course)
#todo: create course
ci = session.get_curriculum(course_id)
progress = session.get_course_progress(course_id)
#overall_completion = progress['completion_ratio']
progress = dict(progress['quiz_progress'].items() + progress['lectures_progress'].items())
quiz_ids = progress.keys()
quiz_marks = progress.values()
for c in ci:
try:
quiz = Quiz.objects.get(quizid=c['id'])
except Quiz.MultipleObjectsReturned:
quiz = Quiz.objects.filter(quizid=c['id'])[0]
except Quiz.DoesNotExist:
quiz = Quiz.objects.create(quizid=c['id'], course=course, heading=c['title'])
if c['id'] in quiz_ids:
index = quiz_ids.index(c['id'])
try:
mark = str(float(quiz_marks[index]['completionRatio']) / 100)
except:
mark = str(0)
else:
mark = str(0)
try:
progress = Progress.objects.get(user=profile.user.userprofile, quiz=quiz)
progress.score = mark
progress.save()
except Progress.DoesNotExist:
Progress.objects.create(user=profile.user.userprofile, quiz=quiz,
score=mark)
print "done udemy"
else:
profile.incorrect_login = True
profile.save()
except:
print traceback.format_exc()
| {
"repo_name": "TejasM/wisely",
"path": "wisely_project/users/tasks.py",
"copies": "1",
"size": "19813",
"license": "mit",
"hash": -1663280390979860500,
"line_mean": 50.5963541667,
"line_max": 149,
"alpha_frac": 0.4948771009,
"autogenerated": false,
"ratio": 4.2931744312026,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.52880515321026,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import requests
class BearerAuth(requests.auth.AuthBase):
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers['Authorization'] = "Bearer %s" % self.token
return r
def req(method, url, params=None, data=None):
c = getattr(requests, method.lower())
kwargs = {}
if params is not None:
kwargs['params'] = params
if data is not None:
kwargs['headers'] = {'content-type': 'application/json'}
kwargs['data'] = json.dumps(data)
return c(url, auth=BearerAuth(AUTH_TOKEN), **kwargs)
class HttpClientError(requests.exceptions.HTTPError):
"""The 4xx class of status code is intended for cases in which the client seems to have erred.
Except when responding to a HEAD request, the server SHOULD include an entity containing an
explanation of the error situation, and whether it is a temporary or permanent condition. These
status codes are applicable to any request method. User agents SHOULD display any included
entity to the user.
"""
_http_errors = {}
class HttpBadRequest(HttpClientError):
"""400 Bad Request
The request could not be understood by the server due to malformed syntax. The client SHOULD
NOT repeat the request without modifications.
"""
_http_errors[400] = HttpBadRequest
class HttpUnauthorized(HttpClientError):
"""401 Unauthorized
The request requires user authentication. The response MUST include a WWW-Authenticate header
field (section 14.47) containing a challenge applicable to the requested resource. The client
MAY repeat the request with a suitable Authorization header field (section 14.8). If the
request already included Authorization credentials, then the 401 response indicates that
authorization has been refused for those credentials. If the 401 response contains the same
challenge as the prior response, and the user agent has already attempted authentication at
least once, then the user SHOULD be presented the entity that was given in the response, since
that entity might include relevant diagnostic information. HTTP access authentication is
explained in "HTTP Authentication: Basic and Digest Access Authentication".
"""
_http_errors[401] = HttpUnauthorized
class HttpPaymentRequired(HttpClientError):
"""402 Payment Required
This code is reserved for future use.
"""
_http_errors[402] = HttpPaymentRequired
class HttpForbidden(HttpClientError):
"""403 Forbidden
The server understood the request, but is refusing to fulfill it. Authorization will not help
and the request SHOULD NOT be repeated. If the request method was not HEAD and the server
wishes to make public why the request has not been fulfilled, it SHOULD describe the reason for
the refusal in the entity. If the server does not wish to make this information available to
the client, the status code 404 (Not Found) can be used instead.
"""
_http_errors[403] = HttpForbidden
class HttpNotFound(HttpClientError):
"""404 Not Found
The server has not found anything matching the Request-URI. No indication is given of whether
the condition is temporary or permanent. The 410 (Gone) status code SHOULD be used if the
server knows, through some internally configurable mechanism, that an old resource is
permanently unavailable and has no forwarding address. This status code is commonly used when
the server does not wish to reveal exactly why the request has been refused, or when no other
response is applicable.
"""
_http_errors[404] = HttpNotFound
class HttpMethodNotAllowed(HttpClientError):
"""405 Method Not Allowed
The method specified in the Request-Line is not allowed for the resource identified by the
Request-URI. The response MUST include an Allow header containing a list of valid methods for
the requested resource.
"""
_http_errors[405] = HttpMethodNotAllowed
class HttpNotAcceptable(HttpClientError):
"""The resource identified by the request is only capable of generating response entities which
have content characteristics not acceptable according to the accept headers sent in the request.
Unless it was a HEAD request, the response SHOULD include an entity containing a list of
available entity characteristics and location(s) from which the user or user agent can choose
the one most appropriate. The entity format is specified by the media type given in the
Content-Type header field. Depending upon the format and the capabilities of the user agent,
selection of the most appropriate choice MAY be performed automatically. However, this
specification does not define any standard for such automatic selection.
Note: HTTP/1.1 servers are allowed to return responses which are not acceptable according
to the accept headers sent in the request. In some cases, this may even be preferable to
sending a 406 response. User agents are encouraged to inspect the headers of an incoming
response to determine if it is acceptable.
If the response could be unacceptable, a user agent SHOULD temporarily stop receipt of more
data and query the user for a decision on further actions.
"""
_http_errors[406] = HttpNotAcceptable
class HttpProxyAuthenticationRequired(HttpClientError):
"""407 Proxy Authentication Required
This code is similar to 401 (Unauthorized), but indicates that the client must first
authenticate itself with the proxy. The proxy MUST return a Proxy-Authenticate header field
(section 14.33) containing a challenge applicable to the proxy for the requested resource. The
client MAY repeat the request with a suitable Proxy-Authorization header field (section 14.34).
HTTP access authentication is explained in "HTTP Authentication: Basic and Digest Access
Authentication".
"""
_http_errors[407] = HttpProxyAuthenticationRequired
class HttpRequestTimeout(HttpClientError):
"""408 Request Timeout
The client did not produce a request within the time that the server was prepared to wait. The
client MAY repeat the request without modifications at any later time.
"""
_http_errors[408] = HttpRequestTimeout
class HttpConflict(HttpClientError):
"""409 Conflict
The request could not be completed due to a conflict with the current state of the resource.
This code is only allowed in situations where it is expected that the user might be able to
resolve the conflict and resubmit the request. The response body SHOULD include enough
information for the user to recognize the source of the conflict. Ideally, the response entity
would include enough information for the user or user agent to fix the problem; however, that
might not be possible and is not required.
Conflicts are most likely to occur in response to a PUT request. For example, if versioning
were being used and the entity being PUT included changes to a resource which conflict with
those made by an earlier (third-party) request, the server might use the 409 response to
indicate that it can't complete the request. In this case, the response entity would likely
contain a list of the differences between the two versions in a format defined by the response
Content-Type.
"""
_http_errors[409] = HttpConflict
class HttpGone(HttpClientError):
"""410 Gone
The requested resource is no longer available at the server and no forwarding address is known.
This condition is expected to be considered permanent. Clients with link editing capabilities
SHOULD delete references to the Request-URI after user approval. If the server does not know,
or has no facility to determine, whether or not the condition is permanent, the status code 404
(Not Found) SHOULD be used instead. This response is cacheable unless indicated otherwise.
The 410 response is primarily intended to assist the task of web maintenance by notifying the
recipient that the resource is intentionally unavailable and that the server owners desire that
remote links to that resource be removed. Such an event is common for limited-time, promotional
services and for resources belonging to individuals no longer working at the server's site. It
is not necessary to mark all permanently unavailable resources as "gone" or to keep the mark
for any length of time -- that is left to the discretion of the server owner.
"""
_http_errors[410] = HttpGone
class HttpLengthRequired(HttpClientError):
"""411 Length Required
The server refuses to accept the request without a defined Content-Length. The client MAY
repeat the request if it adds a valid Content-Length header field containing the length of the
message-body in the request message.
"""
_http_errors[411] = HttpLengthRequired
class HttpPreconditionFailed(HttpClientError):
"""412 Precondition Failed
The precondition given in one or more of the request-header fields evaluated to false when it
was tested on the server. This response code allows the client to place preconditions on the
current resource metainformation (header field data) and thus prevent the requested method from
being applied to a resource other than the one intended.
"""
_http_errors[412] = HttpPreconditionFailed
class HttpRequestEntityTooLarge(HttpClientError):
"""413 Request Entity Too Large
The server is refusing to process a request because the request entity is larger than the
server is willing or able to process. The server MAY close the connection to prevent the client
from continuing the request.
If the condition is temporary, the server SHOULD include a Retry-After header field to indicate
that it is temporary and after what time the client MAY try again.
"""
_http_errors[413] = HttpRequestEntityTooLarge
class HttpRequestUriTooLong(HttpClientError):
"""414 Request-URI Too Long
The server is refusing to service the request because the Request-URI is longer than the server
is willing to interpret. This rare condition is only likely to occur when a client has
improperly converted a POST request to a GET request with long query information, when the
client has descended into a URI "black hole" of redirection (e.g., a redirected URI prefix that
points to a suffix of itself), or when the server is under attack by a client attempting to
exploit security holes present in some servers using fixed-length buffers for reading or
manipulating the Request-URI.
"""
_http_errors[414] = HttpRequestUriTooLong
class HttpUnsupportedMediaType(HttpClientError):
"""415 Unsupported Media Type
The server is refusing to service the request because the entity of the request is in a format
not supported by the requested resource for the requested method.
"""
_http_errors[415] = HttpUnsupportedMediaType
class HttpRequestedRangeNotSatisfiable(HttpClientError):
"""416 Requested Range Not Satisfiable
A server SHOULD return a response with this status code if a request included a Range
request-header field (section 14.35), and none of the range-specifier values in this field
overlap the current extent of the selected resource, and the request did not include an
If-Range request-header field. (For byte-ranges, this means that the first-byte-pos of all of
the byte-range-spec values were greater than the current length of the selected resource.)
When this status code is returned for a byte-range request, the response SHOULD include a
Content-Range entity-header field specifying the current length of the selected resource (see
section 14.16). This response MUST NOT use the multipart/byteranges content-type.
"""
_http_errors[416] = HttpRequestedRangeNotSatisfiable
class HttpExpectationFailed(HttpClientError):
"""417 Expectation Failed
The expectation given in an Expect request-header field (see section 14.20) could not be met by
this server, or, if the server is a proxy, the server has unambiguous evidence that the request
could not be met by the next-hop server.
"""
_http_errors[417] = HttpExpectationFailed
class HttpTooManyRequests(HttpClientError):
"""429 Too Many Requests
The 429 status code indicates that the user has sent too many
requests in a given amount of time ("rate limiting")
https://www.hipchat.com/docs/apiv2/rate_limiting
"""
_http_errors[429] = HttpTooManyRequests
class HttpServerError(requests.exceptions.HTTPError):
"""Server Error 5xx
Response status codes beginning with the digit "5" indicate cases in which the server is aware
that it has erred or is incapable of performing the request. Except when responding to a HEAD
request, the server SHOULD include an entity containing an explanation of the error situation,
and whether it is a temporary or permanent condition. User agents SHOULD display any included
entity to the user. These response codes are applicable to any request method.
"""
class HttpInternalServerError(HttpServerError):
"""500 Internal Server Error
The server encountered an unexpected condition which prevented it from fulfilling the request.
"""
_http_errors[500] = HttpInternalServerError
class HttpNotImplemented(HttpServerError, NotImplementedError):
"""501 Not Implemented
The server does not support the functionality required to fulfill the request. This is the
appropriate response when the server does not recognize the request method and is not capable
of supporting it for any resource.
"""
_http_errors[501] = HttpNotImplemented
class HttpBadGateway(HttpServerError):
"""502 Bad Gateway
The server, while acting as a gateway or proxy, received an invalid response from the upstream
server it accessed in attempting to fulfill the request.
"""
_http_errors[502] = HttpBadGateway
class HttpServiceUnavailable(HttpServerError):
"""503 Service Unavailable
The server is currently unable to handle the request due to a temporary overloading or
maintenance of the server. The implication is that this is a temporary condition which will be
alleviated after some delay. If known, the length of the delay MAY be indicated in a
Retry-After header. If no Retry-After is given, the client SHOULD handle the response as it
would for a 500 response.
Note: The existence of the 503 status code does not imply that a server must use it when
becoming overloaded. Some servers may wish to simply refuse the connection.
"""
_http_errors[503] = HttpServiceUnavailable
class HttpGatewayTimeout(HttpServerError):
"""504 Gateway Timeout
The server, while acting as a gateway or proxy, did not receive a timely response from the
upstream server specified by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server
(e.g. DNS) it needed to access in attempting to complete the request.
Note: Note to implementors: some deployed proxies are known to return 400 or 500 when DNS
lookups time out.
"""
_http_errors[504] = HttpGatewayTimeout
class HttpVersionNotSupported(HttpServerError):
"""505 HTTP Version Not Supported
The server does not support, or refuses to support, the HTTP protocol version that was used in
the request message. The server is indicating that it is unable or unwilling to complete the
request using the same major version as the client, as described in section 3.1, other than
with this error message. The response SHOULD contain an entity describing why that version is
not supported and what other protocols are supported by that server.
"""
_http_errors[505] = HttpVersionNotSupported
class Requests(requests.sessions.Session):
"""
Class that extends the requests module in two ways:
* Supports default arguments, for things like repeated Auth
* Raise errors when requests go poorly
"""
def __init__(self, **kwargs):
self._template = kwargs.copy()
super(Requests, self).__init__()
def _kw(self, kwargs):
kw = self._template.copy()
kw.update(kwargs)
return kw
def request(self, method, url, **kwargs):
rv = super(Requests, self).request(method, url, **self._kw(kwargs))
# Raise one of our specific errors
if rv.status_code in _http_errors:
raise _http_errors[rv.status_code](rv.text, response=rv)
# Try to raise for errors we didn't code for
rv.raise_for_status()
return rv
| {
"repo_name": "dougkeen/HypChat",
"path": "hypchat/requests.py",
"copies": "2",
"size": "16053",
"license": "mit",
"hash": 4481182384008147500,
"line_mean": 44.3474576271,
"line_max": 97,
"alpha_frac": 0.7812869869,
"autogenerated": false,
"ratio": 4.188103313331594,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.05273656995110503,
"num_lines": 354
} |
from __future__ import absolute_import, division
import requests
class BearerAuth(requests.auth.AuthBase):
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers['Authorization'] = "Bearer %s" % self.token
return r
def req(method, url, params=None, data=None):
c = getattr(requests, method.lower())
kwargs = {}
if params is not None:
kwargs['params'] = params
if data is not None:
kwargs['headers'] = {'content-type': 'application/json'}
kwargs['data'] = json.dumps(data)
return c(url, auth=BearerAuth(AUTH_TOKEN), **kwargs)
class HttpClientError(requests.exceptions.HTTPError):
"""The 4xx class of status code is intended for cases in which the client seems to have erred.
Except when responding to a HEAD request, the server SHOULD include an entity containing an
explanation of the error situation, and whether it is a temporary or permanent condition. These
status codes are applicable to any request method. User agents SHOULD display any included
entity to the user.
"""
_http_errors = {}
class HttpBadRequest(HttpClientError):
"""400 Bad Request
The request could not be understood by the server due to malformed syntax. The client SHOULD
NOT repeat the request without modifications.
"""
_http_errors[400] = HttpBadRequest
class HttpUnauthorized(HttpClientError):
"""401 Unauthorized
The request requires user authentication. The response MUST include a WWW-Authenticate header
field (section 14.47) containing a challenge applicable to the requested resource. The client
MAY repeat the request with a suitable Authorization header field (section 14.8). If the
request already included Authorization credentials, then the 401 response indicates that
authorization has been refused for those credentials. If the 401 response contains the same
challenge as the prior response, and the user agent has already attempted authentication at
least once, then the user SHOULD be presented the entity that was given in the response, since
that entity might include relevant diagnostic information. HTTP access authentication is
explained in "HTTP Authentication: Basic and Digest Access Authentication".
"""
_http_errors[401] = HttpUnauthorized
class HttpPaymentRequired(HttpClientError):
"""402 Payment Required
This code is reserved for future use.
"""
_http_errors[402] = HttpPaymentRequired
class HttpForbidden(HttpClientError):
"""403 Forbidden
The server understood the request, but is refusing to fulfill it. Authorization will not help
and the request SHOULD NOT be repeated. If the request method was not HEAD and the server
wishes to make public why the request has not been fulfilled, it SHOULD describe the reason for
the refusal in the entity. If the server does not wish to make this information available to
the client, the status code 404 (Not Found) can be used instead.
"""
_http_errors[403] = HttpForbidden
class HttpNotFound(HttpClientError):
"""404 Not Found
The server has not found anything matching the Request-URI. No indication is given of whether
the condition is temporary or permanent. The 410 (Gone) status code SHOULD be used if the
server knows, through some internally configurable mechanism, that an old resource is
permanently unavailable and has no forwarding address. This status code is commonly used when
the server does not wish to reveal exactly why the request has been refused, or when no other
response is applicable.
"""
_http_errors[404] = HttpNotFound
class HttpMethodNotAllowed(HttpClientError):
"""405 Method Not Allowed
The method specified in the Request-Line is not allowed for the resource identified by the
Request-URI. The response MUST include an Allow header containing a list of valid methods for
the requested resource.
"""
_http_errors[405] = HttpMethodNotAllowed
class HttpNotAcceptable(HttpClientError):
"""The resource identified by the request is only capable of generating response entities which
have content characteristics not acceptable according to the accept headers sent in the request.
Unless it was a HEAD request, the response SHOULD include an entity containing a list of
available entity characteristics and location(s) from which the user or user agent can choose
the one most appropriate. The entity format is specified by the media type given in the
Content-Type header field. Depending upon the format and the capabilities of the user agent,
selection of the most appropriate choice MAY be performed automatically. However, this
specification does not define any standard for such automatic selection.
Note: HTTP/1.1 servers are allowed to return responses which are not acceptable according
to the accept headers sent in the request. In some cases, this may even be preferable to
sending a 406 response. User agents are encouraged to inspect the headers of an incoming
response to determine if it is acceptable.
If the response could be unacceptable, a user agent SHOULD temporarily stop receipt of more
data and query the user for a decision on further actions.
"""
_http_errors[406] = HttpNotAcceptable
class HttpProxyAuthenticationRequired(HttpClientError):
"""407 Proxy Authentication Required
This code is similar to 401 (Unauthorized), but indicates that the client must first
authenticate itself with the proxy. The proxy MUST return a Proxy-Authenticate header field
(section 14.33) containing a challenge applicable to the proxy for the requested resource. The
client MAY repeat the request with a suitable Proxy-Authorization header field (section 14.34).
HTTP access authentication is explained in "HTTP Authentication: Basic and Digest Access
Authentication".
"""
_http_errors[407] = HttpProxyAuthenticationRequired
class HttpRequestTimeout(HttpClientError):
"""408 Request Timeout
The client did not produce a request within the time that the server was prepared to wait. The
client MAY repeat the request without modifications at any later time.
"""
_http_errors[408] = HttpRequestTimeout
class HttpConflict(HttpClientError):
"""409 Conflict
The request could not be completed due to a conflict with the current state of the resource.
This code is only allowed in situations where it is expected that the user might be able to
resolve the conflict and resubmit the request. The response body SHOULD include enough
information for the user to recognize the source of the conflict. Ideally, the response entity
would include enough information for the user or user agent to fix the problem; however, that
might not be possible and is not required.
Conflicts are most likely to occur in response to a PUT request. For example, if versioning
were being used and the entity being PUT included changes to a resource which conflict with
those made by an earlier (third-party) request, the server might use the 409 response to
indicate that it can't complete the request. In this case, the response entity would likely
contain a list of the differences between the two versions in a format defined by the response
Content-Type.
"""
_http_errors[409] = HttpConflict
class HttpGone(HttpClientError):
"""410 Gone
The requested resource is no longer available at the server and no forwarding address is known.
This condition is expected to be considered permanent. Clients with link editing capabilities
SHOULD delete references to the Request-URI after user approval. If the server does not know,
or has no facility to determine, whether or not the condition is permanent, the status code 404
(Not Found) SHOULD be used instead. This response is cacheable unless indicated otherwise.
The 410 response is primarily intended to assist the task of web maintenance by notifying the
recipient that the resource is intentionally unavailable and that the server owners desire that
remote links to that resource be removed. Such an event is common for limited-time, promotional
services and for resources belonging to individuals no longer working at the server's site. It
is not necessary to mark all permanently unavailable resources as "gone" or to keep the mark
for any length of time -- that is left to the discretion of the server owner.
"""
_http_errors[410] = HttpGone
class HttpLengthRequired(HttpClientError):
"""411 Length Required
The server refuses to accept the request without a defined Content-Length. The client MAY
repeat the request if it adds a valid Content-Length header field containing the length of the
message-body in the request message.
"""
_http_errors[411] = HttpLengthRequired
class HttpPreconditionFailed(HttpClientError):
"""412 Precondition Failed
The precondition given in one or more of the request-header fields evaluated to false when it
was tested on the server. This response code allows the client to place preconditions on the
current resource metainformation (header field data) and thus prevent the requested method from
being applied to a resource other than the one intended.
"""
_http_errors[412] = HttpPreconditionFailed
class HttpRequestEntityTooLarge(HttpClientError):
"""413 Request Entity Too Large
The server is refusing to process a request because the request entity is larger than the
server is willing or able to process. The server MAY close the connection to prevent the client
from continuing the request.
If the condition is temporary, the server SHOULD include a Retry-After header field to indicate
that it is temporary and after what time the client MAY try again.
"""
_http_errors[413] = HttpRequestEntityTooLarge
class HttpRequestUriTooLong(HttpClientError):
"""414 Request-URI Too Long
The server is refusing to service the request because the Request-URI is longer than the server
is willing to interpret. This rare condition is only likely to occur when a client has
improperly converted a POST request to a GET request with long query information, when the
client has descended into a URI "black hole" of redirection (e.g., a redirected URI prefix that
points to a suffix of itself), or when the server is under attack by a client attempting to
exploit security holes present in some servers using fixed-length buffers for reading or
manipulating the Request-URI.
"""
_http_errors[414] = HttpRequestUriTooLong
class HttpUnsupportedMediaType(HttpClientError):
"""415 Unsupported Media Type
The server is refusing to service the request because the entity of the request is in a format
not supported by the requested resource for the requested method.
"""
_http_errors[415] = HttpUnsupportedMediaType
class HttpRequestedRangeNotSatisfiable(HttpClientError):
"""416 Requested Range Not Satisfiable
A server SHOULD return a response with this status code if a request included a Range
request-header field (section 14.35), and none of the range-specifier values in this field
overlap the current extent of the selected resource, and the request did not include an
If-Range request-header field. (For byte-ranges, this means that the first-byte-pos of all of
the byte-range-spec values were greater than the current length of the selected resource.)
When this status code is returned for a byte-range request, the response SHOULD include a
Content-Range entity-header field specifying the current length of the selected resource (see
section 14.16). This response MUST NOT use the multipart/byteranges content-type.
"""
_http_errors[416] = HttpRequestedRangeNotSatisfiable
class HttpExpectationFailed(HttpClientError):
"""417 Expectation Failed
The expectation given in an Expect request-header field (see section 14.20) could not be met by
this server, or, if the server is a proxy, the server has unambiguous evidence that the request
could not be met by the next-hop server.
"""
_http_errors[417] = HttpExpectationFailed
class HttpTooManyRequests(HttpClientError):
"""429 Too Many Requests
The 429 status code indicates that the user has sent too many
requests in a given amount of time ("rate limiting")
https://www.hipchat.com/docs/apiv2/rate_limiting
"""
_http_errors[429] = HttpTooManyRequests
class HttpServerError(requests.exceptions.HTTPError):
"""Server Error 5xx
Response status codes beginning with the digit "5" indicate cases in which the server is aware
that it has erred or is incapable of performing the request. Except when responding to a HEAD
request, the server SHOULD include an entity containing an explanation of the error situation,
and whether it is a temporary or permanent condition. User agents SHOULD display any included
entity to the user. These response codes are applicable to any request method.
"""
class HttpInternalServerError(HttpServerError):
"""500 Internal Server Error
The server encountered an unexpected condition which prevented it from fulfilling the request.
"""
_http_errors[500] = HttpInternalServerError
class HttpNotImplemented(HttpServerError, NotImplementedError):
"""501 Not Implemented
The server does not support the functionality required to fulfill the request. This is the
appropriate response when the server does not recognize the request method and is not capable
of supporting it for any resource.
"""
_http_errors[501] = HttpNotImplemented
class HttpBadGateway(HttpServerError):
"""502 Bad Gateway
The server, while acting as a gateway or proxy, received an invalid response from the upstream
server it accessed in attempting to fulfill the request.
"""
_http_errors[502] = HttpBadGateway
class HttpServiceUnavailable(HttpServerError):
"""503 Service Unavailable
The server is currently unable to handle the request due to a temporary overloading or
maintenance of the server. The implication is that this is a temporary condition which will be
alleviated after some delay. If known, the length of the delay MAY be indicated in a
Retry-After header. If no Retry-After is given, the client SHOULD handle the response as it
would for a 500 response.
Note: The existence of the 503 status code does not imply that a server must use it when
becoming overloaded. Some servers may wish to simply refuse the connection.
"""
_http_errors[503] = HttpServiceUnavailable
class HttpGatewayTimeout(HttpServerError):
"""504 Gateway Timeout
The server, while acting as a gateway or proxy, did not receive a timely response from the
upstream server specified by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server
(e.g. DNS) it needed to access in attempting to complete the request.
Note: Note to implementors: some deployed proxies are known to return 400 or 500 when DNS
lookups time out.
"""
_http_errors[504] = HttpGatewayTimeout
class HttpVersionNotSupported(HttpServerError):
"""505 HTTP Version Not Supported
The server does not support, or refuses to support, the HTTP protocol version that was used in
the request message. The server is indicating that it is unable or unwilling to complete the
request using the same major version as the client, as described in section 3.1, other than
with this error message. The response SHOULD contain an entity describing why that version is
not supported and what other protocols are supported by that server.
"""
_http_errors[505] = HttpVersionNotSupported
class Requests(requests.sessions.Session):
"""
Class that extends the requests module in two ways:
* Supports default arguments, for things like repeated Auth
* Raise errors when requests go poorly
"""
def __init__(self, **kwargs):
self._template = kwargs.copy()
super(Requests, self).__init__()
def _kw(self, kwargs):
kw = self._template.copy()
kw.update(kwargs)
return kw
def request(self, method, url, **kwargs):
rv = super(Requests, self).request(method, url, **self._kw(kwargs))
# Raise one of our specific errors
if rv.status_code in _http_errors:
raise _http_errors[rv.status_code](rv.text, response=rv)
# Try to raise for errors we didn't code for
rv.raise_for_status()
return rv
| {
"repo_name": "RidersDiscountCom/HypChat",
"path": "hypchat/requests.py",
"copies": "1",
"size": "16773",
"license": "mit",
"hash": 3086913363919868000,
"line_mean": 37.7367205543,
"line_max": 100,
"alpha_frac": 0.7477493591,
"autogenerated": false,
"ratio": 4.62193441719482,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.586968377629482,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import sys
try:
from django.urls import reverse
except ImportError: # Django < 1.10
from django.core.urlresolvers import reverse
from django.conf import settings
from mock import patch, Mock
from graphite.tags.localdatabase import LocalDatabaseTagDB
from graphite.tags.redis import RedisTagDB
from graphite.tags.http import HttpTagDB
from graphite.tags.utils import TaggedSeries
from graphite.util import json
from tests.base import TestCase
def json_bytes(obj, *args, **kwargs):
s = json.dumps(obj, *args, **kwargs)
if sys.version_info[0] >= 3:
return s.encode('utf-8')
return s
class TagsTest(TestCase):
def test_taggedseries(self):
# test path with tags
parsed = TaggedSeries.parse('test.a;hello=tiger;blah=blah')
self.assertIsInstance(parsed, TaggedSeries)
self.assertEqual(parsed.metric, 'test.a')
self.assertEqual(parsed.tags, {'blah': 'blah', 'hello': 'tiger', 'name': 'test.a'})
# test formatting
self.assertEqual(parsed.path, 'test.a;blah=blah;hello=tiger')
# test encoding
self.assertEqual(TaggedSeries.encode(parsed.path), '_tagged.2b0.2af.test_DOT_a;blah=blah;hello=tiger')
# test decoding
self.assertEqual(TaggedSeries.decode('_tagged.2b0.2af.test_DOT_a;blah=blah;hello=tiger'), parsed.path)
# test path without tags
parsed = TaggedSeries.parse('test.a')
self.assertIsInstance(parsed, TaggedSeries)
self.assertEqual(parsed.metric, 'test.a')
self.assertEqual(parsed.tags, {'name': 'test.a'})
# test formatting
self.assertEqual(parsed.path, 'test.a')
# test encoding
self.assertEqual(TaggedSeries.encode('test.a', sep='/'), 'test/a')
# test encoding
self.assertEqual(TaggedSeries.decode('test/a', sep='/'), 'test.a')
# test parsing openmetrics
parsed = TaggedSeries.parse(r'test.a{hello="tiger",blah="bla\"h"}')
self.assertIsInstance(parsed, TaggedSeries)
self.assertEqual(parsed.metric, 'test.a')
self.assertEqual(parsed.tags, {'blah': 'bla"h', 'hello': 'tiger', 'name': 'test.a'})
def _test_tagdb(self, db):
# query that shouldn't match anything
db.del_series('test.a;blah=blah;hello=tiger')
result = db.get_series('test.a;blah=blah;hello=tiger')
self.assertEqual(result, None)
# tag a series
result = db.tag_series('test.a;hello=tiger;blah=blah')
self.assertEqual(result, 'test.a;blah=blah;hello=tiger')
# get series details
result = db.get_series('test.a;blah=blah;hello=tiger')
self.assertIsInstance(result, TaggedSeries)
self.assertEqual(result.metric, 'test.a')
self.assertEqual(result.tags, {'blah': 'blah', 'hello': 'tiger', 'name': 'test.a'})
# tag the same series again
result = db.tag_series('test.a;blah=blah;hello=tiger')
self.assertEqual(result, 'test.a;blah=blah;hello=tiger')
# tag another series
result = db.tag_series('test.a;blah=blah;hello=lion')
self.assertEqual(result, 'test.a;blah=blah;hello=lion')
# get list of tags
result = db.list_tags()
tagList = [tag for tag in result if tag['tag'] in ['name', 'hello', 'blah']]
self.assertEqual(len(tagList), 3)
self.assertEqual(tagList[0]['tag'], 'blah')
self.assertEqual(tagList[1]['tag'], 'hello')
self.assertEqual(tagList[2]['tag'], 'name')
# get filtered list of tags
result = db.list_tags(tagFilter='hello|bla')
tagList = [tag for tag in result if tag['tag'] in ['name', 'hello', 'blah']]
self.assertEqual(len(tagList), 2)
self.assertEqual(tagList[0]['tag'], 'blah')
self.assertEqual(tagList[1]['tag'], 'hello')
# get filtered & limited list of tags
result = db.list_tags(tagFilter='hello|bla', limit=1)
tagList = [tag for tag in result if tag['tag'] in ['name', 'hello', 'blah']]
self.assertEqual(len(tagList), 1)
self.assertEqual(tagList[0]['tag'], 'blah')
# get tag & list of values
result = db.get_tag('hello')
self.assertEqual(result['tag'], 'hello')
valueList = [value for value in result['values'] if value['value'] in ['tiger', 'lion']]
self.assertEqual(len(valueList), 2)
self.assertEqual(valueList[0]['value'], 'lion')
self.assertEqual(valueList[1]['value'], 'tiger')
# get tag & limited list of values
result = db.get_tag('hello', limit=1)
self.assertEqual(result['tag'], 'hello')
valueList = [value for value in result['values'] if value['value'] in ['tiger', 'lion']]
self.assertEqual(len(valueList), 1)
self.assertEqual(valueList[0]['value'], 'lion')
# get tag & filtered list of values (match)
result = db.get_tag('hello', valueFilter='tig')
self.assertEqual(result['tag'], 'hello')
valueList = [value for value in result['values'] if value['value'] in ['tiger', 'lion']]
self.assertEqual(len(valueList), 1)
self.assertEqual(valueList[0]['value'], 'tiger')
# get tag & filtered list of values (no match)
result = db.get_tag('hello', valueFilter='^tigr')
self.assertEqual(result['tag'], 'hello')
valueList = [value for value in result['values'] if value['value'] in ['tiger', 'lion']]
self.assertEqual(len(valueList), 0)
# get nonexistent tag
result = db.get_tag('notarealtag')
self.assertIsNone(result)
# basic find
result = db.find_series(['hello=tiger'])
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
# find with regex
result = db.find_series(['blah=~b.*', 'hello=~^tiger', 'test=~.*'])
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
# find with not regex
result = db.find_series(['blah!=~$', 'hello=~tiger', 'test!=~.+'])
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
result = db.find_series(['hello=~lion', 'blah!=~$'])
self.assertEqual(result, ['test.a;blah=blah;hello=lion'])
# find with not equal
result = db.find_series(['hello=tiger', 'blah!=blah'])
self.assertEqual(result, [])
result = db.find_series(['hello=tiger', 'blah!=foo'])
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
result = db.find_series(['hello=tiger', 'blah!='])
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
result = db.find_series(['blah!=', 'hello!='])
self.assertEqual(result, ['test.a;blah=blah;hello=lion', 'test.a;blah=blah;hello=tiger'])
# complex find
result = db.find_series(['hello=~lion|tiger', 'blah!=foo'])
self.assertEqual(result, ['test.a;blah=blah;hello=lion', 'test.a;blah=blah;hello=tiger'])
# add series without 'hello' tag
result = db.tag_series('test.b;blah=blah')
self.assertEqual(result, 'test.b;blah=blah')
# find series without tag
result = db.find_series(['name=test.b', 'hello='])
self.assertEqual(result, ['test.b;blah=blah'])
# find that results in no matched values
result = db.find_series(['blah=~foo'])
self.assertEqual(result, [])
# find with invalid tagspec
with self.assertRaises(ValueError):
db.find_series('test')
# find with no specs that require non-empty match
with self.assertRaises(ValueError):
db.find_series('test=')
# tag multiple series
result = db.tag_multi_series([
'test.a;blah=blah;hello=lion',
'test.b;hello=lion;blah=blah',
'test.c;blah=blah;hello=lion',
])
self.assertEqual(result, [
'test.a;blah=blah;hello=lion',
'test.b;blah=blah;hello=lion',
'test.c;blah=blah;hello=lion',
])
# delete series we added
self.assertTrue(db.del_series('test.a;blah=blah;hello=tiger'))
self.assertTrue(db.del_series('test.a;blah=blah;hello=lion'))
self.assertTrue(db.del_multi_series([
'test.b;blah=blah;hello=lion',
'test.c;blah=blah;hello=lion',
]))
def test_local_tagdb(self):
return self._test_tagdb(LocalDatabaseTagDB(settings))
def test_redis_tagdb(self):
return self._test_tagdb(RedisTagDB(settings))
def test_tagdb_autocomplete(self):
self.maxDiff = None
db = LocalDatabaseTagDB(settings)
self._test_autocomplete(db, 'graphite.tags.localdatabase.LocalDatabaseTagDB.find_series')
def _test_autocomplete(self, db, patch_target):
search_exprs = ['name=test.a']
find_result = [('test.a;tag1=value1.%3d;tag2=value2.%3d' % (i, 201 - i)) for i in range(1,201)]
def mock_find_series(self, exprs, requestContext=None):
if search_exprs[0] not in exprs:
raise Exception('Unexpected exprs %s' % str(exprs))
return find_result
with patch(patch_target, mock_find_series):
result = db.auto_complete_tags(search_exprs)
self.assertEqual(result, [
'tag1',
'tag2',
])
result = db.auto_complete_tags(search_exprs, limit=1)
self.assertEqual(result, [
'tag1',
])
result = db.auto_complete_values(search_exprs, 'tag2')
self.assertEqual(result, [('value2.%3d' % i) for i in range(1,101)])
result = db.auto_complete_values(search_exprs, 'tag2', limit=50)
self.assertEqual(result, [('value2.%3d' % i) for i in range(1,51)])
result = db.auto_complete_values(search_exprs, 'tag1', 'value1.1')
self.assertEqual(result, [('value1.%3d' % i) for i in range(100,200)])
result = db.auto_complete_values(search_exprs, 'tag1', 'value1.1', limit=50)
self.assertEqual(result, [('value1.%3d' % i) for i in range(100,150)])
result = db.auto_complete_values(search_exprs, 'nonexistenttag1', 'value1.1')
self.assertEqual(result, [])
find_result = [('test.a;tag1=value1.%3d;tag2=value2.%3d' % (i // 2, (201 - i) // 2)) for i in range(2,202)]
result = db.auto_complete_values(search_exprs, 'tag1', 'value1.', limit=50)
self.assertEqual(result, [('value1.%3d' % i) for i in range(1,51)])
def test_find_series_cached(self):
mockCache = Mock()
mockCache.get.return_value = ['test.a;blah=blah;hello=tiger']
result = LocalDatabaseTagDB(settings, cache=mockCache).find_series(['name=test.a','hello=tiger'])
self.assertEqual(mockCache.get.call_count, 1)
self.assertEqual(mockCache.get.call_args[0][0], 'TagDB.find_series:hello=tiger:name=test.a')
self.assertEqual(result, ['test.a;blah=blah;hello=tiger'])
def test_tagdb_cached(self):
mockCache = Mock()
mockCache.get.return_value = []
mockLog = Mock()
result = LocalDatabaseTagDB(settings, cache=mockCache, log=mockLog).find_series(['tag2=value2', 'tag1=value1'])
self.assertEqual(mockCache.get.call_count, 1)
self.assertEqual(mockCache.get.call_args[0][0], 'TagDB.find_series:tag1=value1:tag2=value2')
self.assertEqual(result, [])
self.assertEqual(mockLog.info.call_count, 1)
self.assertRegexpMatches(
mockLog.info.call_args[0][0],
'graphite\.tags\.localdatabase\.LocalDatabaseTagDB\.find_series :: completed \(cached\) in [-.e0-9]+s'
)
def test_http_tagdb(self):
# test http tagdb using django client
db = HttpTagDB(settings)
db.base_url = reverse('tagList').replace('/tags', '')
db.username = ''
db.password = ''
# helper class to mock urllib3 response object
class mockResponse(object):
def __init__(self, status, data):
self.status = status
self.data = data
# mock http request that forwards requests using django client
def mockRequest(method, url, fields=None, headers=None, timeout=None):
if db.username and db.password:
self.assertEqual(headers, {'Authorization': 'Basic dGVzdDp0ZXN0\n'})
else:
self.assertEqual(headers, {})
req_fields = {}
for (field, value) in fields:
if field in req_fields:
req_fields[field].append(value)
else:
req_fields[field] = [value]
if method == 'POST':
result = self.client.post(url, req_fields)
elif method == 'GET':
result = self.client.get(url, req_fields)
else:
raise Exception('Invalid HTTP method %s' % method)
return mockResponse(result.status_code, result.content)
# use mockRequest to send http requests to live django running configured tagdb
with patch('graphite.http_pool.http.request', mockRequest):
self._test_tagdb(db)
with self.assertRaisesRegexp(Exception, 'HTTP Error from remote tagdb: 405'):
db.get_tag('delSeries')
db.username = 'test'
db.password = 'test'
result = db.tag_series('test.a;hello=tiger;blah=blah')
self.assertEqual(result, 'test.a;blah=blah;hello=tiger')
result = db.list_values('hello')
valueList = [value for value in result if value['value'] in ['tiger', 'lion']]
self.assertEqual(len(valueList), 1)
self.assertEqual(valueList[0]['value'], 'tiger')
result = db.list_values('notarealtag')
self.assertEqual(result, [])
self.assertTrue(db.del_series('test.a;blah=blah;hello=tiger'))
# test auto complete forwarding to remote host
with self.settings(TAGDB_HTTP_AUTOCOMPLETE=True):
self.maxDiff = None
self._test_autocomplete(db, settings.TAGDB + '.find_series')
# test auto complete using find_series
with self.settings(TAGDB_HTTP_AUTOCOMPLETE=False):
self._test_autocomplete(db, settings.TAGDB + '.find_series')
def test_tag_views(self):
url = reverse('tagList')
## tagSeries
# get should fail
response = self.client.get(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'})
self.assertEqual(response.status_code, 405)
# post without path should fail
response = self.client.post(url + '/tagSeries', {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# tagging a series should succeed
expected = 'test.a;blah=blah;hello=tiger'
response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
## list tags
# put should fail
response = self.client.put(url, {})
self.assertEqual(response.status_code, 405)
# filtered list
expected = [{"tag": "hello"}]
response = self.client.get(url, {'filter': 'hello$'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
result = json.loads(response.content)
self.assertEqual(len(result), len(expected))
self.assertEqual(result[0]['tag'], expected[0]['tag'])
# pretty output
response = self.client.get(url, {'filter': 'hello$', 'pretty': 1})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
result = json.loads(response.content)
self.assertEqual(len(result), len(expected))
self.assertEqual(result[0]['tag'], expected[0]['tag'])
## tag details
# put should fail
response = self.client.put(url + '/hello', {})
self.assertEqual(response.status_code, 405)
expected = {"tag": "hello", "values": [{"count": 1, "value": "tiger"}]}
response = self.client.get(url + '/hello', {'filter': 'tiger$'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
result = json.loads(response.content)
self.assertEqual(result['tag'], expected['tag'])
self.assertEqual(len(result['values']), len(expected['values']))
self.assertEqual(result['values'][0]['count'], expected['values'][0]['count'])
self.assertEqual(result['values'][0]['value'], expected['values'][0]['value'])
# pretty output
response = self.client.get(url + '/hello', {'filter': 'tiger$', 'pretty': 1})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
result = json.loads(response.content)
self.assertEqual(result['tag'], expected['tag'])
self.assertEqual(len(result['values']), len(expected['values']))
self.assertEqual(result['values'][0]['count'], expected['values'][0]['count'])
self.assertEqual(result['values'][0]['value'], expected['values'][0]['value'])
## findSeries
# put should fail
response = self.client.put(url + '/findSeries', {})
self.assertEqual(response.status_code, 405)
# expr is required
response = self.client.post(url + '/findSeries', {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# basic find
expected = ['test.a;blah=blah;hello=tiger']
response = self.client.get(url + '/findSeries?expr[]=name=test.a&expr[]=hello=tiger&expr[]=blah=blah&pretty=1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
# tag another series
expected = 'test.a;blah=blah;hello=lion'
response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=lion;blah=blah'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
## autocomplete tags
response = self.client.put(url + '/autoComplete/tags', {})
self.assertEqual(response.status_code, 405)
expected = [
'hello',
]
response = self.client.get(url + '/autoComplete/tags?tagPrefix=hello&pretty=1')
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
expected = [
'blah',
'hello',
]
response = self.client.get(url + '/autoComplete/tags?expr[]=name=test.a&pretty=1')
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
expected = [
'hello',
]
response = self.client.get(url + '/autoComplete/tags?expr=name=test.a&tagPrefix=hell&pretty=1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
## autocomplete values
response = self.client.put(url + '/autoComplete/values', {})
self.assertEqual(response.status_code, 405)
expected = {'error': 'no tag specified'}
response = self.client.get(url + '/autoComplete/values', {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected))
expected = ['lion','tiger']
response = self.client.get(url + '/autoComplete/values?tag=hello&pretty=1')
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
expected = ['lion','tiger']
response = self.client.get(url + '/autoComplete/values?expr[]=name=test.a&tag=hello&pretty=1')
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
expected = ['lion']
response = self.client.get(url + '/autoComplete/values?expr=name=test.a&tag=hello&valuePrefix=li&pretty=1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
## delSeries
# PUT should fail
response = self.client.put(url + '/delSeries', {})
self.assertEqual(response.status_code, 405)
# path is required
response = self.client.post(url + '/delSeries', {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# delete first series we added
expected = True
response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=tiger'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
# delete second series
expected = True
response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=lion'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
# delete nonexistent series
expected = True
response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=lion'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
# find nonexistent series
expected = []
response = self.client.get(url + '/findSeries?expr=name=test.a&expr=hello=tiger&expr=blah=blah')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, sort_keys=True))
# tag multiple series
# get should fail
response = self.client.get(url + '/tagMultiSeries', {'path': 'test.a;hello=tiger;blah=blah'})
self.assertEqual(response.status_code, 405)
# post without path should fail
response = self.client.post(url + '/tagMultiSeries', {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# multiple path should succeed
expected = [
'test.a;blah=blah;hello=tiger',
'test.b;blah=blah;hello=tiger',
]
response = self.client.post(url + '/tagMultiSeries', {
'path': [
'test.a;hello=tiger;blah=blah',
'test.b;hello=tiger;blah=blah',
],
'pretty': '1',
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
# multiple path[] should succeed
expected = [
'test.a;blah=blah;hello=tiger',
'test.b;blah=blah;hello=tiger',
]
response = self.client.post(url + '/tagMultiSeries', {
'path[]': [
'test.a;hello=tiger;blah=blah',
'test.b;hello=tiger;blah=blah',
],
'pretty': '1',
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
# remove multiple series
expected = True
response = self.client.post(url + '/delSeries', {
'path': [
'test.a;hello=tiger;blah=blah',
'test.b;hello=tiger;blah=blah',
],
'pretty': '1',
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
expected = True
response = self.client.post(url + '/delSeries', {
'path[]': [
'test.a;hello=tiger;blah=blah',
'test.b;hello=tiger;blah=blah',
],
'pretty': '1',
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(response.content, json_bytes(expected, indent=2, sort_keys=True))
| {
"repo_name": "DanCech/graphite-web",
"path": "webapp/tests/test_tags.py",
"copies": "1",
"size": "23686",
"license": "apache-2.0",
"hash": 9024469980847303000,
"line_mean": 35.8940809969,
"line_max": 115,
"alpha_frac": 0.6615300177,
"autogenerated": false,
"ratio": 3.44123202092111,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9579358966320244,
"avg_score": 0.00468061446017333,
"num_lines": 642
} |
# from __future__ import(absolute_import, division)
# Import the backtrader platform
import backtrader as bt
from org.tradesafe.db import sqlite_db as db
import pandas as pd
from org.tradesafe.data.TsDataFeed import PandasDirectDataX
import talib
import numpy as np
# Create a Stratey
class TestStrategy(bt.Strategy):
params = (
('maperiod', 60),
('stake', 10),
)
def log(self, txt, dt=None):
''' Logging function fot this strategy'''
dt = dt or self.datas[0].datetime.date(0)
print('%s, %s' % (dt.isoformat(), txt))
def __init__(self):
# print self.data.close.array
# Keep a reference to the "close" line in the data[0] dataseries
self.dataclose = self.datas[0].close
# Set the sizer stake from the params
# self.sizer.setsizing(self.params.stake)
# To keep track of pending orders and buy price/commission
self.order = None
self.buyprice = None
self.buycomm = None
# Add a MovingAverageSimple indicator
self.sma = bt.indicators.SimpleMovingAverage(
self.datas[0], period=self.params.maperiod)
self.ma5 = bt.talib.SMA(self.datas[0].high, timeperiod=5)
print type(self.ma5)
# self.ma5 = talib.SMA(np.array(self.data.high.array), timeperiod=5)
# print type(self.ma5)
# Indicators for the plotting show
bt.indicators.ExponentialMovingAverage(self.datas[0], period=25)
bt.indicators.WeightedMovingAverage(self.datas[0], period=25,
subplot=True)
bt.indicators.StochasticSlow(self.datas[0])
bt.indicators.MACDHisto(self.datas[0])
rsi = bt.indicators.RSI(self.datas[0])
bt.indicators.SmoothedMovingAverage(rsi, period=10)
bt.indicators.ATR(self.datas[0], plot=False)
def notify_order(self, order):
if order.status in [order.Submitted, order.Accepted]:
# Buy/Sell order submitted/accepted to/by broker - Nothing to do
return
# Check if an order has been completed
# Attention: broker could reject order if not enougth cash
if order.status in [order.Completed, order.Canceled, order.Margin]:
if order.isbuy():
self.log(
'BUY EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f' %
(order.executed.price,
order.executed.value,
order.executed.comm))
self.buyprice = order.executed.price
self.buycomm = order.executed.comm
else: # Sell
self.log('SELL EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f' %
(order.executed.price,
order.executed.value,
order.executed.comm))
self.bar_executed = len(self)
# Write down: no pending order
self.order = None
def notify_trade(self, trade):
if not trade.isclosed:
return
self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' %
(trade.pnl, trade.pnlcomm))
def next(self):
# Simply log the closing price of the series from the reference
# self.log('Close, %.2f' % self.dataclose[0])
# Check if an order is pending ... if yes, we cannot send a 2nd one
if self.order:
return
# Check if we are in the market
if not self.position:
# Not yet ... we MIGHT BUY if ...
if self.dataclose[0] > self.sma[0]:
# BUY, BUY, BUY!!! (with all possible default parameters)
self.log('BUY CREATE, %.2f' % self.dataclose[0])
# Keep track of the created order to avoid a 2nd order
self.order = self.buy()
else:
if self.dataclose[0] < self.sma[0]:
# SELL, SELL, SELL!!! (with all possible default parameters)
self.log('SELL CREATE, %.2f' % self.dataclose[0])
# Keep track of the created order to avoid a 2nd order
self.order = self.sell()
# if (self.dataclose[0]-self.sma[0])/self.sma[0] > 0.2:
# self.log('SELL CREATE, %.2f' % self.dataclose[0])
# self.order = self.sell()
if __name__ == '__main__':
# Create a cerebro entity
cerebro = bt.Cerebro()
cerebro.addsizer(bt.sizers.SizerFix, stake=2000)
# Datas are in a subfolder of the samples. Need to find where the script is
# because it could have been called from anywhere
# modpath = os.path.dirname(os.path.abspath(sys.argv[0]))
# datapath = os.path.join(modpath, '../../datas/orcl-1995-2014.txt')
conn = db.get_history_data_db('D')
df = pd.read_sql_query(
"select * from history_data where code='%s' order by date([date]) asc" % '600526', conn)
# print df.head()
# Add the Data Feed to Cerebro
data = PandasDirectDataX(dataname=df)
cerebro.adddata(data)
# Set our desired cash start
cerebro.broker.setcash(100000.0)
cerebro.broker.setcommission(commission=0.008)
# Add a strategy
cerebro.addstrategy(TestStrategy)
# Print out the starting conditions
print('Starting Portfolio Value: %.2f' % cerebro.broker.getvalue())
# Run over everything
cerebro.run()
# Print out the final result
print dir(cerebro.strats)
print('Final Portfolio Value: %.2f' % cerebro.broker.getvalue())
cerebro.plot() | {
"repo_name": "shenbai/tradesafe",
"path": "org/tradesafe/backtrade.py",
"copies": "1",
"size": "5530",
"license": "mit",
"hash": 6943727822876604000,
"line_mean": 35.8733333333,
"line_max": 96,
"alpha_frac": 0.5895117541,
"autogenerated": false,
"ratio": 3.5862516212710767,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46757633753710764,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
import unittest
import itertools
import random
import time
import chainlet.driver
from chainlet_unittests.utility import Adder, Buffer, MultiprocessBuffer, produce
class DriverMixin(object):
driver_class = chainlet.driver.ChainDriver
buffer_class = Buffer
def test_drive_single(self):
"""Drive a single chain"""
driver = self.driver_class()
elements = [Adder(val) for val in (2, -2, 1E6, random.randint(-256, 256))]
for run_async in (False, True):
results = []
self.assertFalse(driver.mounts)
with self.subTest(run_async=run_async):
for elements in itertools.product(elements, repeat=3):
initials = [0, 2, 1E6, -1232527]
expected = [initial + sum(element.value for element in elements) for initial in initials]
a, b, c = elements
buffer = self.buffer_class()
chain = produce(initials) >> a >> b >> c >> buffer
driver.mount(chain)
results.append([expected, buffer])
if run_async:
driver.start()
driver.start() # starting multiple times is allowed
time.sleep(0.05) # let the driver startup
while driver.running:
time.sleep(0.05)
else:
driver.run()
for expected, buffer in results:
self.assertEqual(expected, buffer.buffer)
class TestChainDriver(DriverMixin, unittest.TestCase):
driver_class = chainlet.driver.ChainDriver
class TestThreadedChainDriver(DriverMixin, unittest.TestCase):
driver_class = chainlet.driver.ThreadedChainDriver
class TestMultiprocessChainDriver(DriverMixin, unittest.TestCase):
driver_class = chainlet.driver.MultiprocessChainDriver
buffer_class = MultiprocessBuffer
| {
"repo_name": "maxfischer2781/chainlet",
"path": "chainlet_unittests/test_chainlet/test_driver.py",
"copies": "1",
"size": "1995",
"license": "mit",
"hash": 8530994657327950000,
"line_mean": 35.9444444444,
"line_max": 109,
"alpha_frac": 0.6025062657,
"autogenerated": false,
"ratio": 4.473094170403588,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006173004409854248,
"num_lines": 54
} |
from __future__ import absolute_import, division
import unittest
import chainlet
from chainlet.primitives import link, neutral, bundle
from chainlet_unittests.utility import Adder
class ClosableLink(link.ChainLink):
def __init__(self):
self.closed = False
def close(self):
self.closed = True
@chainlet.genlet
def pingpong():
value = yield
while True:
value = yield value
class TestClose(unittest.TestCase):
"""Closing chainlets to release resources"""
def test_link(self):
"""close basic links without side effects"""
for link_class in link.ChainLink, neutral.NeutralLink:
with self.subTest(link=link_class):
link_instance = link_class()
link_instance.close()
link_instance.close()
def test_chain(self):
"""close chain with children"""
pure_chain = ClosableLink() >> ClosableLink() >> ClosableLink() >> ClosableLink()
for linklet in pure_chain.elements:
self.assertFalse(linklet.closed)
pure_chain.close()
for linklet in pure_chain.elements:
self.assertTrue(linklet.close)
pure_chain.close()
for linklet in pure_chain.elements:
self.assertTrue(linklet.close)
def test_bundle(self):
"""close bundle with children"""
pure_bundle = bundle.Bundle((ClosableLink(), ClosableLink(), ClosableLink(), ClosableLink()))
for linklet in pure_bundle.elements:
self.assertFalse(linklet.closed)
pure_bundle.close()
for linklet in pure_bundle.elements:
self.assertTrue(linklet.close)
pure_bundle.close()
for linklet in pure_bundle.elements:
self.assertTrue(linklet.close)
def test_bundle_chain(self):
"""close bested chain and bundle with children"""
chain_bundle = ClosableLink() >> (ClosableLink(), ClosableLink() >> ClosableLink()) >> ClosableLink()
def get_elements(test_chain):
yield test_chain[0]
yield test_chain[1][0]
yield test_chain[1][1][0]
yield test_chain[1][1][1]
yield test_chain[2]
for linklet in get_elements(chain_bundle):
self.assertFalse(linklet.closed)
chain_bundle.close()
for linklet in get_elements(chain_bundle):
self.assertTrue(linklet.close)
chain_bundle.close()
for linklet in get_elements(chain_bundle):
self.assertTrue(linklet.close)
def test_generator(self):
generator_chain = Adder(1) >> pingpong() >> Adder(1)
for val in (-10, 0, 1, 3):
self.assertEqual(generator_chain.send(val), val + 2)
generator_chain.close()
with self.assertRaises(StopIteration):
generator_chain.send(1)
class TestContext(unittest.TestCase):
"""Context chainlets to manage resources"""
def test_link(self):
"""with basic links without side effects"""
for link_class in link.ChainLink, neutral.NeutralLink:
with self.subTest(link=link_class):
with link_class() as link_instance:
link_instance.close()
link_instance.close()
def test_chain(self):
"""with chain with children"""
with ClosableLink() >> ClosableLink() >> ClosableLink() >> ClosableLink() as pure_chain:
for linklet in pure_chain.elements:
self.assertFalse(linklet.closed)
for linklet in pure_chain.elements:
self.assertTrue(linklet.close)
pure_chain.close()
for linklet in pure_chain.elements:
self.assertTrue(linklet.close)
def test_bundle(self):
"""with bundle with children"""
with bundle.Bundle((ClosableLink(), ClosableLink(), ClosableLink(), ClosableLink())) as pure_bundle:
for linklet in pure_bundle.elements:
self.assertFalse(linklet.closed)
for linklet in pure_bundle.elements:
self.assertTrue(linklet.close)
pure_bundle.close()
for linklet in pure_bundle.elements:
self.assertTrue(linklet.close)
def test_bundle_chain(self):
"""with bested chain and bundle with children"""
def get_elements(test_chain):
yield test_chain[0]
yield test_chain[1][0]
yield test_chain[1][1][0]
yield test_chain[1][1][1]
yield test_chain[2]
with ClosableLink() >> (ClosableLink(), ClosableLink() >> ClosableLink()) >> ClosableLink() as chain_bundle:
for linklet in get_elements(chain_bundle):
self.assertFalse(linklet.closed)
for linklet in get_elements(chain_bundle):
self.assertTrue(linklet.close)
chain_bundle.close()
for linklet in get_elements(chain_bundle):
self.assertTrue(linklet.close)
def test_generator(self):
with Adder(1) >> pingpong() >> Adder(1) as generator_chain:
for val in (-10, 0, 1, 3):
self.assertEqual(generator_chain.send(val), val + 2)
with self.assertRaises(StopIteration):
generator_chain.send(1)
| {
"repo_name": "maxfischer2781/chainlet",
"path": "chainlet_unittests/test_interface/test_resources.py",
"copies": "1",
"size": "5227",
"license": "mit",
"hash": -6164474348300269000,
"line_mean": 36.0709219858,
"line_max": 116,
"alpha_frac": 0.6091448249,
"autogenerated": false,
"ratio": 3.980959634424981,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5090104459324981,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from array import array
import logging
from collections import defaultdict
from collections import namedtuple
from pyleus.storm import SimpleBolt
from bandwith_monitoring.access_log_generator import Request
log = logging.getLogger('traffic_aggregator')
Traffic = namedtuple("Traffic", "ip_address traffic")
class SlotsCounter(object):
def __init__(self, size):
self.slots = array('I', [0] * size)
self.counter = 0
def __repr__(self):
return "{0} : {1}".format(self.counter, self.slots)
class TrafficAggregatorBolt(SimpleBolt):
OUTPUT_FIELDS = Traffic
OPTIONS = ["time_window", "threshold"]
def initialize(self):
self.time_window = self.options["time_window"]
self.threshold = self.options["threshold"]
if self.time_window % self.conf.tick_tuple_freq != 0:
raise ValueError("Time window must be a multiple of"
" tick_tuple_freq_secs")
self.N = int(self.time_window / self.conf.tick_tuple_freq)
self.slot_counters = defaultdict(lambda: SlotsCounter(self.N))
self.curr = 0
def process_tick(self):
for ip_address, slcnt in self.slot_counters.iteritems():
if slcnt.counter > self.threshold:
log.debug(Traffic(ip_address, slcnt.counter))
self.emit(Traffic(ip_address, slcnt.counter))
self.advance_window()
def process_tuple(self, tup):
request = Request(*tup.values)
slcnt = self.slot_counters[request.ip_address]
slcnt.counter += request.size
slcnt.slots[self.curr] += request.size
def advance_window(self):
log.debug("----------------------------")
log.debug("BEFORE (curr={0}): {1}"
.format(self.curr, str(self.slot_counters)))
self.curr = (self.curr + 1) % self.N
for ip_address, slcnt in self.slot_counters.iteritems():
slcnt.counter -= slcnt.slots[self.curr]
slcnt.slots[self.curr] = 0
for ip_address in self.slot_counters.keys():
if self.slot_counters[ip_address].counter == 0:
del self.slot_counters[ip_address]
log.debug("AFTER (curr={0}): {1}"
.format(self.curr, self.slot_counters))
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
filename='/tmp/bandwith_monitoring_traffic_aggregator.log',
filemode='a',
)
TrafficAggregatorBolt().run()
| {
"repo_name": "ecanzonieri/pyleus",
"path": "examples/bandwith_monitoring/bandwith_monitoring/traffic_aggregator.py",
"copies": "9",
"size": "2541",
"license": "apache-2.0",
"hash": -932914074023619000,
"line_mean": 31.5769230769,
"line_max": 70,
"alpha_frac": 0.6119637938,
"autogenerated": false,
"ratio": 3.832579185520362,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 78
} |
from __future__ import absolute_import, division
from astropy.io import fits
import numpy as np
#-------------------------------------------------------------------------------
class InputError(Exception):
pass
#-------------------------------------------------------------------------------
class SuperDark(object):
def __init__(self, obs_list=[]):
self._check_input(obs_list)
hdu = fits.open(obs_list[0])
self.detector = hdu[0].header['DETECTOR']
self.segment = hdu[0].header['SEGMENT']
self.source_files = obs_list[:]
if self.detector == 'FUV':
self.shape = (1024, 16384)
elif self.detector == 'NUV':
self.shape = (1024, 1024)
else:
raise ValueError('Detector {} not understood'.format(detector))
def _check_input(self, obs_list):
"""Verify that input datasets are all the same detector, segment"""
if len(obs_list) == 0:
raise InputError('Please supply a list of inputs')
for keyword in ['DETECTOR']:
firstval = fits.getval(obs_list[0], keyword, 0)
for obs in obs_list[1:]:
if fits.getval(obs, keyword, 0) != firstval:
raise InputError("Multiple values of {} found"
.format(keyword))
#-------------------------------------------------------------------------------
class NUVDark(SuperDark):
def __init__(self, obs_list):
SuperDark.__init__(self, obs_list)
self.xlim = (0, 1024)
self.ylim = (0, 1024)
self.dark = np.zeros(self.shape)
#-------------------------------------------------------------------------------
class FUVDark(SuperDark):
def __init__(self, obs_list):
SuperDark.__init__(self, obs_list)
xlim = {'FUVA':(1200, 15099),
'FUVB':(950, 15049)}
ylim = {'FUVA':(380, 680),
'FUVB':(440, 720)}
pha_lim = {'FUVA':(2, 23),
'FUVB':(2, 23)}
self.pha = pha_lim[self.segment]
self.xlim = xlim[self.segment]
self.ylim = ylim[self.segment]
self.dark_a = np.zeros(self.shape)
self.dark_b = np.zeros(self.shape)
#-------------------------------------------------------------------------------
| {
"repo_name": "mfixstsci/peewee4cosmo",
"path": "cosmo_peewee/dark/superdark.py",
"copies": "2",
"size": "2327",
"license": "bsd-3-clause",
"hash": -3536864252280277000,
"line_mean": 29.2207792208,
"line_max": 80,
"alpha_frac": 0.4443489471,
"autogenerated": false,
"ratio": 4.054006968641115,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5498355915741115,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from ._base import LearningRateFuncBase
import numpy as np
class DynamicStepSizeLR(LearningRateFuncBase):
def __init__(self, eta_0, gamma, k, threshold_0):
self.eta_0 = eta_0
self.threshold_0 = threshold_0
self.gamma = gamma
self.k = k
self.drop = 0
self.last_update = None
def apply(self, t, net):
self._calc_drop(t, net)
return self.eta_0 \
* pow(self.gamma, self.drop)
def _calc_drop(self, t, net):
latest_update = len(net.losses)
if self.last_update == latest_update:
# Already checked for this update.
pass
elif latest_update < self.k:
# Not enough data.
pass
else:
self.last_update = latest_update
mean_loss = np.mean(
net.losses[latest_update-self.k:latest_update, 3]
)
# Increase drop if the last mean loss of
# last k update is smaller than current threshold.
if mean_loss < self.threshold_0 * pow(self.gamma, self.drop):
self.drop += 1
| {
"repo_name": "zhaoyan1117/NeuralNet",
"path": "nnet/learning_rate_func/_dynamic_step_size_lr.py",
"copies": "1",
"size": "1178",
"license": "bsd-2-clause",
"hash": -7112429029324063000,
"line_mean": 28.45,
"line_max": 73,
"alpha_frac": 0.5568760611,
"autogenerated": false,
"ratio": 3.9006622516556293,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49575383127556294,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division
from collections import Callable, Iterable
import warnings
import param
import numpy as np
import pandas as pd
import xarray as xr
import datashader as ds
import datashader.reductions as rd
import datashader.transfer_functions as tf
import dask.dataframe as dd
from param.parameterized import bothmethod
try:
from datashader.bundling import (directly_connect_edges as connect_edges,
hammer_bundle)
except:
hammer_bundle, connect_edges = object, object
from ..core import (Operation, Element, Dimension, NdOverlay,
CompositeOverlay, Dataset, Overlay)
from ..core.data import PandasInterface, XArrayInterface
from ..core.sheetcoords import BoundingBox
from ..core.util import (
LooseVersion, basestring, cftime_types, cftime_to_timestamp,
datetime_types, dt_to_int, get_param_values)
from ..element import (Image, Path, Curve, RGB, Graph, TriMesh, QuadMesh, Contours)
from ..streams import RangeXY, PlotSize
ds_version = LooseVersion(ds.__version__)
class LinkableOperation(Operation):
"""
Abstract baseclass for operations supporting linked inputs.
"""
link_inputs = param.Boolean(default=True, doc="""
By default, the link_inputs parameter is set to True so that
when applying an operation, backends that support linked
streams update RangeXY streams on the inputs of the operation.
Disable when you do not want the resulting plot to be
interactive, e.g. when trying to display an interactive plot a
second time.""")
class ResamplingOperation(LinkableOperation):
"""
Abstract baseclass for resampling operations
"""
dynamic = param.Boolean(default=True, doc="""
Enables dynamic processing by default.""")
expand = param.Boolean(default=True, doc="""
Whether the x_range and y_range should be allowed to expand
beyond the extent of the data. Setting this value to True is
useful for the case where you want to ensure a certain size of
output grid, e.g. if you are doing masking or other arithmetic
on the grids. A value of False ensures that the grid is only
just as large as it needs to be to contain the data, which will
be faster and use less memory if the resulting aggregate is
being overlaid on a much larger background.""")
height = param.Integer(default=400, doc="""
The height of the output image in pixels.""")
width = param.Integer(default=400, doc="""
The width of the output image in pixels.""")
x_range = param.NumericTuple(default=None, length=2, doc="""
The x_range as a tuple of min and max x-value. Auto-ranges
if set to None.""")
y_range = param.NumericTuple(default=None, length=2, doc="""
The x_range as a tuple of min and max y-value. Auto-ranges
if set to None.""")
x_sampling = param.Number(default=None, doc="""
Specifies the smallest allowed sampling interval along the y-axis.""")
y_sampling = param.Number(default=None, doc="""
Specifies the smallest allowed sampling interval along the y-axis.""")
target = param.ClassSelector(class_=Image, doc="""
A target Image which defines the desired x_range, y_range,
width and height.
""")
streams = param.List(default=[PlotSize, RangeXY], doc="""
List of streams that are applied if dynamic=True, allowing
for dynamic interaction with the plot.""")
element_type = param.ClassSelector(class_=(Dataset,), instantiate=False,
is_instance=False, default=Image,
doc="""
The type of the returned Elements, must be a 2D Dataset type.""")
precompute = param.Boolean(default=False, doc="""
Whether to apply precomputing operations. Precomputing can
speed up resampling operations by avoiding unnecessary
recomputation if the supplied element does not change between
calls. The cost of enabling this option is that the memory
used to represent this internal state is not freed between
calls.""")
@bothmethod
def instance(self_or_cls,**params):
inst = super(ResamplingOperation, self_or_cls).instance(**params)
inst._precomputed = {}
return inst
def _get_sampling(self, element, x, y):
target = self.p.target
if target:
x_range, y_range = target.range(x), target.range(y)
height, width = target.dimension_values(2, flat=False).shape
else:
if x is None or y is None:
x_range = self.p.x_range or (-0.5, 0.5)
y_range = self.p.y_range or (-0.5, 0.5)
else:
if self.p.expand or not self.p.x_range:
x_range = self.p.x_range or element.range(x)
else:
x0, x1 = self.p.x_range
ex0, ex1 = element.range(x)
x_range = (np.min([np.max([x0, ex0]), ex1]),
np.max([np.min([x1, ex1]), ex0]))
if self.p.expand or not self.p.y_range:
y_range = self.p.y_range or element.range(y)
else:
y0, y1 = self.p.y_range
ey0, ey1 = element.range(y)
y_range = (np.min([np.max([y0, ey0]), ey1]),
np.max([np.min([y1, ey1]), ey0]))
width, height = self.p.width, self.p.height
(xstart, xend), (ystart, yend) = x_range, y_range
xtype = 'numeric'
if isinstance(xstart, datetime_types) or isinstance(xend, datetime_types):
xstart, xend = dt_to_int(xstart, 'ns'), dt_to_int(xend, 'ns')
xtype = 'datetime'
elif not np.isfinite(xstart) and not np.isfinite(xend):
xstart, xend = 0, 0
if element.get_dimension_type(x) in datetime_types:
xtype = 'datetime'
x_range = (xstart, xend)
ytype = 'numeric'
if isinstance(ystart, datetime_types) or isinstance(yend, datetime_types):
ystart, yend = dt_to_int(ystart, 'ns'), dt_to_int(yend, 'ns')
ytype = 'datetime'
elif not np.isfinite(ystart) and not np.isfinite(yend):
ystart, yend = 0, 0
if element.get_dimension_type(y) in datetime_types:
ytype = 'datetime'
y_range = (ystart, yend)
# Compute highest allowed sampling density
xspan = xend - xstart
yspan = yend - ystart
if self.p.x_sampling:
width = int(min([(xspan/self.p.x_sampling), width]))
if self.p.y_sampling:
height = int(min([(yspan/self.p.y_sampling), height]))
if xstart == xend or width == 0:
xunit, width = 0, 0
else:
xunit = float(xspan)/width
if ystart == yend or height == 0:
yunit, height = 0, 0
else:
yunit = float(yspan)/height
xs, ys = (np.linspace(xstart+xunit/2., xend-xunit/2., width),
np.linspace(ystart+yunit/2., yend-yunit/2., height))
return (x_range, y_range), (xs, ys), (width, height), (xtype, ytype)
class AggregationOperation(ResamplingOperation):
"""
AggregationOperation extends the ResamplingOperation defining an
aggregator parameter used to define a datashader Reduction.
"""
aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, basestring),
default=ds.count(), doc="""
Datashader reduction function used for aggregating the data.
The aggregator may also define a column to aggregate; if
no column is defined the first value dimension of the element
will be used. May also be defined as a string.""")
_agg_methods = {
'any': rd.any,
'count': rd.count,
'first': rd.first,
'last': rd.last,
'mode': rd.mode,
'mean': rd.mean,
'sum': rd.sum,
'var': rd.var,
'std': rd.std,
'min': rd.min,
'max': rd.max
}
def _get_aggregator(self, element, add_field=True):
agg = self.p.aggregator
if isinstance(agg, basestring):
if agg not in self._agg_methods:
agg_methods = sorted(self._agg_methods)
raise ValueError('Aggregation method %r is not known; '
'aggregator must be one of: %r' %
(agg, agg_methods))
agg = self._agg_methods[agg]()
elements = element.traverse(lambda x: x, [Element])
if add_field and agg.column is None and not isinstance(agg, (rd.count, rd.any)):
if not elements:
raise ValueError('Could not find any elements to apply '
'%s operation to.' % type(self).__name__)
inner_element = elements[0]
if isinstance(inner_element, TriMesh) and inner_element.nodes.vdims:
field = inner_element.nodes.vdims[0].name
elif inner_element.vdims:
field = inner_element.vdims[0].name
elif isinstance(element, NdOverlay):
field = element.kdims[0].name
else:
raise ValueError('Could not determine dimension to apply '
'%s operation to. Declare the dimension '
'to aggregate as part of the datashader '
'aggregator.' % type(self).__name__)
agg = type(agg)(field)
return agg
class aggregate(AggregationOperation):
"""
aggregate implements 2D binning for any valid HoloViews Element
type using datashader. I.e., this operation turns a HoloViews
Element or overlay of Elements into an Image or an overlay of
Images by rasterizing it. This allows quickly aggregating large
datasets computing a fixed-sized representation independent
of the original dataset size.
By default it will simply count the number of values in each bin
but other aggregators can be supplied implementing mean, max, min
and other reduction operations.
The bins of the aggregate are defined by the width and height and
the x_range and y_range. If x_sampling or y_sampling are supplied
the operation will ensure that a bin is no smaller than the minimum
sampling distance by reducing the width and height when zoomed in
beyond the minimum sampling distance.
By default, the PlotSize stream is applied when this operation
is used dynamically, which means that the height and width
will automatically be set to match the inner dimensions of
the linked plot.
"""
@classmethod
def get_agg_data(cls, obj, category=None):
"""
Reduces any Overlay or NdOverlay of Elements into a single
xarray Dataset that can be aggregated.
"""
paths = []
if isinstance(obj, Graph):
obj = obj.edgepaths
kdims = list(obj.kdims)
vdims = list(obj.vdims)
dims = obj.dimensions()[:2]
if isinstance(obj, Path):
glyph = 'line'
for p in obj.split(datatype='dataframe'):
paths.append(p)
elif isinstance(obj, CompositeOverlay):
element = None
for key, el in obj.data.items():
x, y, element, glyph = cls.get_agg_data(el)
dims = (x, y)
df = PandasInterface.as_dframe(element)
if isinstance(obj, NdOverlay):
df = df.assign(**dict(zip(obj.dimensions('key', True), key)))
paths.append(df)
if element is None:
dims = None
else:
kdims += element.kdims
vdims = element.vdims
elif isinstance(obj, Element):
glyph = 'line' if isinstance(obj, Curve) else 'points'
paths.append(PandasInterface.as_dframe(obj))
if dims is None or len(dims) != 2:
return None, None, None, None
else:
x, y = dims
if len(paths) > 1:
if glyph == 'line':
path = paths[0][:1]
if isinstance(path, dd.DataFrame):
path = path.compute()
empty = path.copy()
empty.iloc[0, :] = (np.NaN,) * empty.shape[1]
paths = [elem for p in paths for elem in (p, empty)][:-1]
if all(isinstance(path, dd.DataFrame) for path in paths):
df = dd.concat(paths)
else:
paths = [p.compute() if isinstance(p, dd.DataFrame) else p for p in paths]
df = pd.concat(paths)
else:
df = paths[0] if paths else pd.DataFrame([], columns=[x.name, y.name])
if category and df[category].dtype.name != 'category':
df[category] = df[category].astype('category')
is_dask = isinstance(df, dd.DataFrame)
if any((not is_dask and len(df[d.name]) and isinstance(df[d.name].values[0], cftime_types)) or
df[d.name].dtype.kind == 'M' for d in (x, y)):
df = df.copy()
for d in (x, y):
vals = df[d.name]
if not is_dask and len(vals) and isinstance(vals.values[0], cftime_types):
vals = cftime_to_timestamp(vals, 'ns')
elif df[d.name].dtype.kind == 'M':
vals = vals.astype('datetime64[ns]')
else:
continue
df[d.name] = vals.astype('int64')
return x, y, Dataset(df, kdims=kdims, vdims=vdims), glyph
def _aggregate_ndoverlay(self, element, agg_fn):
"""
Optimized aggregation for NdOverlay objects by aggregating each
Element in an NdOverlay individually avoiding having to concatenate
items in the NdOverlay. Works by summing sum and count aggregates and
applying appropriate masking for NaN values. Mean aggregation
is also supported by dividing sum and count aggregates. count_cat
aggregates are grouped by the categorical dimension and a separate
aggregate for each category is generated.
"""
# Compute overall bounds
x, y = element.last.dimensions()[0:2]
info = self._get_sampling(element, x, y)
(x_range, y_range), (xs, ys), (width, height), (xtype, ytype) = info
if xtype == 'datetime':
x_range = tuple((np.array(x_range)/1e3).astype('datetime64[us]'))
if ytype == 'datetime':
y_range = tuple((np.array(y_range)/1e3).astype('datetime64[us]'))
agg_params = dict({k: v for k, v in dict(self.get_param_values(), **self.p).items()
if k in aggregate.params()},
x_range=x_range, y_range=y_range)
bbox = BoundingBox(points=[(x_range[0], y_range[0]), (x_range[1], y_range[1])])
# Optimize categorical counts by aggregating them individually
if isinstance(agg_fn, ds.count_cat):
agg_params.update(dict(dynamic=False, aggregator=ds.count()))
agg_fn1 = aggregate.instance(**agg_params)
if element.ndims == 1:
grouped = element
else:
grouped = element.groupby([agg_fn.column], container_type=NdOverlay,
group_type=NdOverlay)
groups = []
for k, v in grouped.items():
agg = agg_fn1(v)
groups.append((k, agg.clone(agg.data, bounds=bbox)))
return grouped.clone(groups)
# Create aggregate instance for sum, count operations, breaking mean
# into two aggregates
column = agg_fn.column or 'Count'
if isinstance(agg_fn, ds.mean):
agg_fn1 = aggregate.instance(**dict(agg_params, aggregator=ds.sum(column)))
agg_fn2 = aggregate.instance(**dict(agg_params, aggregator=ds.count()))
else:
agg_fn1 = aggregate.instance(**agg_params)
agg_fn2 = None
is_sum = isinstance(agg_fn1.aggregator, ds.sum)
# Accumulate into two aggregates and mask
agg, agg2, mask = None, None, None
mask = None
for v in element:
# Compute aggregates and mask
new_agg = agg_fn1.process_element(v, None)
if is_sum:
new_mask = np.isnan(new_agg.data[column].values)
new_agg.data = new_agg.data.fillna(0)
if agg_fn2:
new_agg2 = agg_fn2.process_element(v, None)
if agg is None:
agg = new_agg
if is_sum: mask = new_mask
if agg_fn2: agg2 = new_agg2
else:
agg.data += new_agg.data
if is_sum: mask &= new_mask
if agg_fn2: agg2.data += new_agg2.data
# Divide sum by count to compute mean
if agg2 is not None:
agg2.data.rename({'Count': agg_fn.column}, inplace=True)
with np.errstate(divide='ignore', invalid='ignore'):
agg.data /= agg2.data
# Fill masked with with NaNs
if is_sum:
agg.data[column].values[mask] = np.NaN
return agg.clone(bounds=bbox)
def _process(self, element, key=None):
agg_fn = self._get_aggregator(element)
category = agg_fn.column if isinstance(agg_fn, ds.count_cat) else None
if (isinstance(element, NdOverlay) and
((isinstance(agg_fn, (ds.count, ds.sum, ds.mean)) and
(agg_fn.column is None or agg_fn.column not in element.kdims)) or
(isinstance(agg_fn, ds.count_cat) and agg_fn.column in element.kdims))):
return self._aggregate_ndoverlay(element, agg_fn)
if element._plot_id in self._precomputed:
x, y, data, glyph = self._precomputed[element._plot_id]
else:
x, y, data, glyph = self.get_agg_data(element, category)
if self.p.precompute:
self._precomputed[element._plot_id] = x, y, data, glyph
(x_range, y_range), (xs, ys), (width, height), (xtype, ytype) = self._get_sampling(element, x, y)
(x0, x1), (y0, y1) = x_range, y_range
if xtype == 'datetime':
x0, x1 = (np.array([x0, x1])/1e3).astype('datetime64[us]')
xs = (xs/1e3).astype('datetime64[us]')
if ytype == 'datetime':
y0, y1 = (np.array([y0, y1])/1e3).astype('datetime64[us]')
ys = (ys/1e3).astype('datetime64[us]')
bounds = (x0, y0, x1, y1)
params = dict(get_param_values(element), kdims=[x, y],
datatype=['xarray'], bounds=bounds)
column = agg_fn.column if agg_fn else None
if column:
dims = [d for d in element.dimensions('ranges') if d == column]
if not dims:
raise ValueError("Aggregation column %s not found on %s element. "
"Ensure the aggregator references an existing "
"dimension." % (column,element))
name = '%s Count' % column if isinstance(agg_fn, ds.count_cat) else column
vdims = [dims[0](name)]
else:
vdims = Dimension('Count')
params['vdims'] = vdims
if x is None or y is None or width == 0 or height == 0:
x = x.name if x else 'x'
y = y.name if x else 'y'
xarray = xr.DataArray(np.full((height, width), np.NaN),
dims=[y, x], coords={x: xs, y: ys})
if width == 0:
params['xdensity'] = 1
if height == 0:
params['ydensity'] = 1
el = self.p.element_type(xarray, **params)
if isinstance(agg_fn, ds.count_cat):
vals = element.dimension_values(agg_fn.column, expanded=False)
dim = element.get_dimension(agg_fn.column)
return NdOverlay({v: el for v in vals}, dim)
return el
elif not len(data):
xarray = xr.DataArray(np.full((height, width), np.NaN),
dims=[y.name, x.name], coords={x.name: xs, y.name: ys})
return self.p.element_type(xarray, **params)
cvs = ds.Canvas(plot_width=width, plot_height=height,
x_range=x_range, y_range=y_range)
dfdata = PandasInterface.as_dframe(data)
agg = getattr(cvs, glyph)(dfdata, x.name, y.name, agg_fn)
if 'x_axis' in agg.coords and 'y_axis' in agg.coords:
agg = agg.rename({'x_axis': x, 'y_axis': y})
if xtype == 'datetime':
agg[x.name] = (agg[x.name]/1e3).astype('datetime64[us]')
if ytype == 'datetime':
agg[y.name] = (agg[y.name]/1e3).astype('datetime64[us]')
if agg.ndim == 2:
# Replacing x and y coordinates to avoid numerical precision issues
eldata = agg if ds_version > '0.5.0' else (xs, ys, agg.data)
return self.p.element_type(eldata, **params)
else:
layers = {}
for c in agg.coords[column].data:
cagg = agg.sel(**{column: c})
eldata = cagg if ds_version > '0.5.0' else (xs, ys, cagg.data)
layers[c] = self.p.element_type(eldata, **dict(params, vdims=vdims))
return NdOverlay(layers, kdims=[data.get_dimension(column)])
class regrid(AggregationOperation):
"""
regrid allows resampling a HoloViews Image type using specified
up- and downsampling functions defined using the aggregator and
interpolation parameters respectively. By default upsampling is
disabled to avoid unnecessarily upscaling an image that has to be
sent to the browser. Also disables expanding the image beyond its
original bounds avoiding unnecessarily padding the output array
with NaN values.
"""
aggregator = param.ClassSelector(default=ds.mean(),
class_=(ds.reductions.Reduction, basestring))
expand = param.Boolean(default=False, doc="""
Whether the x_range and y_range should be allowed to expand
beyond the extent of the data. Setting this value to True is
useful for the case where you want to ensure a certain size of
output grid, e.g. if you are doing masking or other arithmetic
on the grids. A value of False ensures that the grid is only
just as large as it needs to be to contain the data, which will
be faster and use less memory if the resulting aggregate is
being overlaid on a much larger background.""")
interpolation = param.ObjectSelector(default='nearest',
objects=['linear', 'nearest', 'bilinear', None, False], doc="""
Interpolation method""")
upsample = param.Boolean(default=False, doc="""
Whether to allow upsampling if the source array is smaller
than the requested array. Setting this value to True will
enable upsampling using the interpolation method, when the
requested width and height are larger than what is available
on the source grid. If upsampling is disabled (the default)
the width and height are clipped to what is available on the
source array.""")
def _get_xarrays(self, element, coords, xtype, ytype):
x, y = element.kdims
dims = [y.name, x.name]
irregular = any(element.interface.irregular(element, d)
for d in dims)
if irregular:
coord_dict = {x.name: (('y', 'x'), coords[0]),
y.name: (('y', 'x'), coords[1])}
else:
coord_dict = {x.name: coords[0], y.name: coords[1]}
arrays = {}
for vd in element.vdims:
if element.interface is XArrayInterface:
xarr = element.data[vd.name]
if 'datetime' in (xtype, ytype):
xarr = xarr.copy()
if dims != xarr.dims and not irregular:
xarr = xarr.transpose(*dims)
elif irregular:
arr = element.dimension_values(vd, flat=False)
xarr = xr.DataArray(arr, coords=coord_dict, dims=['y', 'x'])
else:
arr = element.dimension_values(vd, flat=False)
xarr = xr.DataArray(arr, coords=coord_dict, dims=dims)
if xtype == "datetime":
xarr[x.name] = [dt_to_int(v, 'ns') for v in xarr[x.name].values]
if ytype == "datetime":
xarr[y.name] = [dt_to_int(v, 'ns') for v in xarr[y.name].values]
arrays[vd.name] = xarr
return arrays
def _process(self, element, key=None):
if ds_version <= '0.5.0':
raise RuntimeError('regrid operation requires datashader>=0.6.0')
# Compute coords, anges and size
x, y = element.kdims
coords = tuple(element.dimension_values(d, expanded=False) for d in [x, y])
info = self._get_sampling(element, x, y)
(x_range, y_range), (xs, ys), (width, height), (xtype, ytype) = info
# Disable upsampling by clipping size and ranges
(xstart, xend), (ystart, yend) = (x_range, y_range)
xspan, yspan = (xend-xstart), (yend-ystart)
interp = self.p.interpolation or None
if interp == 'bilinear': interp = 'linear'
if not (self.p.upsample or interp is None) and self.p.target is None:
(x0, x1), (y0, y1) = element.range(0), element.range(1)
if isinstance(x0, datetime_types):
x0, x1 = dt_to_int(x0, 'ns'), dt_to_int(x1, 'ns')
if isinstance(y0, datetime_types):
y0, y1 = dt_to_int(y0, 'ns'), dt_to_int(y1, 'ns')
exspan, eyspan = (x1-x0), (y1-y0)
if np.isfinite(exspan) and exspan > 0:
width = min([int((xspan/exspan) * len(coords[0])), width])
else:
width = 0
if np.isfinite(eyspan) and eyspan > 0:
height = min([int((yspan/eyspan) * len(coords[1])), height])
else:
height = 0
xunit = float(xspan)/width if width else 0
yunit = float(yspan)/height if height else 0
xs, ys = (np.linspace(xstart+xunit/2., xend-xunit/2., width),
np.linspace(ystart+yunit/2., yend-yunit/2., height))
# Compute bounds (converting datetimes)
if xtype == 'datetime':
xstart, xend = (np.array([xstart, xend])/1e3).astype('datetime64[us]')
xs = (xs/1e3).astype('datetime64[us]')
if ytype == 'datetime':
ystart, yend = (np.array([ystart, yend])/1e3).astype('datetime64[us]')
ys = (ys/1e3).astype('datetime64[us]')
bbox = BoundingBox(points=[(xstart, ystart), (xend, yend)])
params = dict(bounds=bbox)
if width == 0 or height == 0:
if width == 0: params['xdensity'] = 1
if height == 0: params['ydensity'] = 1
return element.clone((xs, ys, np.zeros((height, width))), **params)
cvs = ds.Canvas(plot_width=width, plot_height=height,
x_range=x_range, y_range=y_range)
# Apply regridding to each value dimension
regridded = {}
arrays = self._get_xarrays(element, coords, xtype, ytype)
agg_fn = self._get_aggregator(element, add_field=False)
for vd, xarr in arrays.items():
rarray = cvs.raster(xarr, upsample_method=interp,
downsample_method=agg_fn)
# Convert datetime coordinates
if xtype == "datetime":
rarray[x.name] = (rarray[x.name]/1e3).astype('datetime64[us]')
if ytype == "datetime":
rarray[y.name] = (rarray[y.name]/1e3).astype('datetime64[us]')
regridded[vd] = rarray
regridded = xr.Dataset(regridded)
return element.clone(regridded, bounds=bbox, datatype=['xarray']+element.datatype)
class contours_rasterize(aggregate):
"""
Rasterizes the Contours element by weighting the aggregation by
the iso-contour levels if a value dimension is defined, otherwise
default to any aggregator.
"""
aggregator = param.ClassSelector(default=ds.mean(),
class_=(ds.reductions.Reduction, basestring))
def _get_aggregator(self, element, add_field=True):
agg = self.p.aggregator
if not element.vdims and agg.column is None and not isinstance(agg, (rd.count, rd.any)):
return ds.any()
return super(contours_rasterize, self)._get_aggregator(element, add_field)
class trimesh_rasterize(aggregate):
"""
Rasterize the TriMesh element using the supplied aggregator. If
the TriMesh nodes or edges define a value dimension, will plot
filled and shaded polygons; otherwise returns a wiremesh of the
data.
"""
aggregator = param.ClassSelector(default=ds.mean(),
class_=(ds.reductions.Reduction, basestring))
interpolation = param.ObjectSelector(default='bilinear',
objects=['bilinear', 'linear', None, False], doc="""
The interpolation method to apply during rasterization.""")
def _precompute(self, element, agg):
from datashader.utils import mesh
if element.vdims and getattr(agg, 'column', None) not in element.nodes.vdims:
simplices = element.dframe([0, 1, 2, 3])
verts = element.nodes.dframe([0, 1])
elif element.nodes.vdims:
simplices = element.dframe([0, 1, 2])
verts = element.nodes.dframe([0, 1, 3])
for c, dtype in zip(simplices.columns[:3], simplices.dtypes):
if dtype.kind != 'i':
simplices[c] = simplices[c].astype('int')
return {'mesh': mesh(verts, simplices), 'simplices': simplices,
'vertices': verts}
def _precompute_wireframe(self, element, agg):
if hasattr(element, '_wireframe'):
segments = element._wireframe.data
else:
simplexes = element.array([0, 1, 2, 0]).astype('int')
verts = element.nodes.array([0, 1])
segments = pd.DataFrame(verts[simplexes].reshape(len(simplexes), -1),
columns=['x0', 'y0', 'x1', 'y1', 'x2', 'y2', 'x3', 'y3'])
element._wireframe = Dataset(segments, datatype=['dataframe', 'dask'])
return {'segments': segments}
def _process(self, element, key=None):
if isinstance(element, TriMesh):
x, y = element.nodes.kdims[:2]
else:
x, y = element.kdims
info = self._get_sampling(element, x, y)
(x_range, y_range), (xs, ys), (width, height), (xtype, ytype) = info
agg = self.p.aggregator
interp = self.p.interpolation or None
precompute = self.p.precompute
if interp == 'linear': interp = 'bilinear'
wireframe = False
if (not (element.vdims or (isinstance(element, TriMesh) and element.nodes.vdims))) and ds_version <= '0.6.9':
self.p.aggregator = ds.any() if isinstance(agg, ds.any) or agg == 'any' else ds.count()
return aggregate._process(self, element, key)
elif ((not interp and (isinstance(agg, (ds.any, ds.count)) or
agg in ['any', 'count']))
or not (element.vdims or element.nodes.vdims)):
wireframe = True
precompute = False # TriMesh itself caches wireframe
agg = self._get_aggregator(element) if isinstance(agg, (ds.any, ds.count)) else ds.any()
vdim = 'Count' if isinstance(agg, ds.count) else 'Any'
elif getattr(agg, 'column', None):
if agg.column in element.vdims:
vdim = element.get_dimension(agg.column)
elif isinstance(element, TriMesh) and agg.column in element.nodes.vdims:
vdim = element.nodes.get_dimension(agg.column)
else:
raise ValueError("Aggregation column %s not found on TriMesh element."
% agg.column)
else:
if isinstance(element, TriMesh) and element.nodes.vdims:
vdim = element.nodes.vdims[0]
else:
vdim = element.vdims[0]
agg = self._get_aggregator(element)
if element._plot_id in self._precomputed:
precomputed = self._precomputed[element._plot_id]
elif wireframe:
precomputed = self._precompute_wireframe(element, agg)
else:
precomputed = self._precompute(element, agg)
params = dict(get_param_values(element), kdims=[x, y],
datatype=['xarray'], vdims=[vdim])
if width == 0 or height == 0:
if width == 0: params['xdensity'] = 1
if height == 0: params['ydensity'] = 1
bounds = (x_range[0], y_range[0], x_range[1], y_range[1])
return Image((xs, ys, np.zeros((height, width))), bounds=bounds, **params)
if wireframe:
segments = precomputed['segments']
else:
simplices = precomputed['simplices']
pts = precomputed['vertices']
mesh = precomputed['mesh']
if precompute:
self._precomputed = {element._plot_id: precomputed}
cvs = ds.Canvas(plot_width=width, plot_height=height,
x_range=x_range, y_range=y_range)
if wireframe:
agg = cvs.line(segments, x=['x0', 'x1', 'x2', 'x3'],
y=['y0', 'y1', 'y2', 'y3'], axis=1,
agg=agg)
else:
interpolate = bool(self.p.interpolation)
agg = cvs.trimesh(pts, simplices, agg=agg,
interp=interpolate, mesh=mesh)
return Image(agg, **params)
class quadmesh_rasterize(trimesh_rasterize):
"""
Rasterize the QuadMesh element using the supplied aggregator.
Simply converts to a TriMesh and lets trimesh_rasterize
handle the actual rasterization.
"""
def _precompute(self, element, agg):
return super(quadmesh_rasterize, self)._precompute(element.trimesh(), agg)
class rasterize(AggregationOperation):
"""
Rasterize is a high-level operation that will rasterize any
Element or combination of Elements, aggregating them with the supplied
aggregator and interpolation method.
The default aggregation method depends on the type of Element but
usually defaults to the count of samples in each bin. Other
aggregators can be supplied implementing mean, max, min and other
reduction operations.
The bins of the aggregate are defined by the width and height and
the x_range and y_range. If x_sampling or y_sampling are supplied
the operation will ensure that a bin is no smaller than the minimum
sampling distance by reducing the width and height when zoomed in
beyond the minimum sampling distance.
By default, the PlotSize and RangeXY streams are applied when this
operation is used dynamically, which means that the width, height,
x_range and y_range will automatically be set to match the inner
dimensions of the linked plot and the ranges of the axes.
"""
aggregator = param.ClassSelector(class_=(ds.reductions.Reduction, basestring),
default=None)
interpolation = param.ObjectSelector(
default='bilinear', objects=['linear', 'nearest', 'bilinear', None, False], doc="""
The interpolation method to apply during rasterization.
Defaults to linear interpolation and None and False are aliases
of each other.""")
_transforms = [(Image, regrid),
(TriMesh, trimesh_rasterize),
(QuadMesh, quadmesh_rasterize),
(lambda x: (isinstance(x, NdOverlay) and
issubclass(x.type, Dataset)
and not issubclass(x.type, Image)),
aggregate),
(Contours, contours_rasterize),
(lambda x: (isinstance(x, Dataset) and
(not isinstance(x, Image))),
aggregate)]
def _process(self, element, key=None):
for predicate, transform in self._transforms:
op_params = dict({k: v for k, v in self.p.items()
if k in transform.params()
and not (v is None and k == 'aggregator')},
dynamic=False)
op = transform.instance(**op_params)
op._precomputed = self._precomputed
element = element.map(op, predicate)
self._precomputed = op._precomputed
return element
class shade(LinkableOperation):
"""
shade applies a normalization function followed by colormapping to
an Image or NdOverlay of Images, returning an RGB Element.
The data must be in the form of a 2D or 3D DataArray, but NdOverlays
of 2D Images will be automatically converted to a 3D array.
In the 2D case data is normalized and colormapped, while a 3D
array representing categorical aggregates will be supplied a color
key for each category. The colormap (cmap) for the 2D case may be
supplied as an Iterable or a Callable.
"""
cmap = param.ClassSelector(class_=(Iterable, Callable, dict), doc="""
Iterable or callable which returns colors as hex colors
or web color names (as defined by datashader), to be used
for the colormap of single-layer datashader output.
Callable type must allow mapping colors between 0 and 1.
The default value of None reverts to Datashader's default
colormap.""")
color_key = param.ClassSelector(class_=(Iterable, Callable, dict), doc="""
Iterable or callable that returns colors as hex colors, to
be used for the color key of categorical datashader output.
Callable type must allow mapping colors for supplied values
between 0 and 1.""")
normalization = param.ClassSelector(default='eq_hist',
class_=(basestring, Callable),
doc="""
The normalization operation applied before colormapping.
Valid options include 'linear', 'log', 'eq_hist', 'cbrt',
and any valid transfer function that accepts data, mask, nbins
arguments.""")
clims = param.NumericTuple(default=None, length=2, doc="""
Min and max data values to use for colormap interpolation, when
wishing to override autoranging.
""")
min_alpha = param.Number(default=40, doc="""
The minimum alpha value to use for non-empty pixels when doing
colormapping, in [0, 255]. Use a higher value to avoid
undersaturation, i.e. poorly visible low-value datapoints, at
the expense of the overall dynamic range..""")
@classmethod
def concatenate(cls, overlay):
"""
Concatenates an NdOverlay of Image types into a single 3D
xarray Dataset.
"""
if not isinstance(overlay, NdOverlay):
raise ValueError('Only NdOverlays can be concatenated')
xarr = xr.concat([v.data.transpose() for v in overlay.values()],
pd.Index(overlay.keys(), name=overlay.kdims[0].name))
params = dict(get_param_values(overlay.last),
vdims=overlay.last.vdims,
kdims=overlay.kdims+overlay.last.kdims)
return Dataset(xarr.transpose(), datatype=['xarray'], **params)
@classmethod
def uint32_to_uint8(cls, img):
"""
Cast uint32 RGB image to 4 uint8 channels.
"""
return np.flipud(img.view(dtype=np.uint8).reshape(img.shape + (4,)))
@classmethod
def rgb2hex(cls, rgb):
"""
Convert RGB(A) tuple to hex.
"""
if len(rgb) > 3:
rgb = rgb[:-1]
return "#{0:02x}{1:02x}{2:02x}".format(*(int(v*255) for v in rgb))
@classmethod
def to_xarray(cls, element):
if issubclass(element.interface, XArrayInterface):
return element
data = tuple(element.dimension_values(kd, expanded=False)
for kd in element.kdims)
data += tuple(element.dimension_values(vd, flat=False)
for vd in element.vdims)
dtypes = [dt for dt in element.datatype if dt != 'xarray']
return element.clone(data, datatype=['xarray']+dtypes,
bounds=element.bounds,
xdensity=element.xdensity,
ydensity=element.ydensity)
def _process(self, element, key=None):
element = element.map(self.to_xarray, Image)
if isinstance(element, NdOverlay):
bounds = element.last.bounds
xdensity = element.last.xdensity
ydensity = element.last.ydensity
element = self.concatenate(element)
elif isinstance(element, Overlay):
return element.map(self._process, [Element])
else:
xdensity = element.xdensity
ydensity = element.ydensity
bounds = element.bounds
vdim = element.vdims[0].name
array = element.data[vdim]
kdims = element.kdims
# Compute shading options depending on whether
# it is a categorical or regular aggregate
shade_opts = dict(how=self.p.normalization, min_alpha=self.p.min_alpha)
if element.ndims > 2:
kdims = element.kdims[1:]
categories = array.shape[-1]
if not self.p.color_key:
pass
elif isinstance(self.p.color_key, dict):
shade_opts['color_key'] = self.p.color_key
elif isinstance(self.p.color_key, Iterable):
shade_opts['color_key'] = [c for i, c in
zip(range(categories), self.p.color_key)]
else:
colors = [self.p.color_key(s) for s in np.linspace(0, 1, categories)]
shade_opts['color_key'] = map(self.rgb2hex, colors)
elif not self.p.cmap:
pass
elif isinstance(self.p.cmap, Callable):
colors = [self.p.cmap(s) for s in np.linspace(0, 1, 256)]
shade_opts['cmap'] = map(self.rgb2hex, colors)
else:
shade_opts['cmap'] = self.p.cmap
if self.p.clims:
shade_opts['span'] = self.p.clims
elif ds_version > '0.5.0' and self.p.normalization != 'eq_hist':
shade_opts['span'] = element.range(vdim)
for d in kdims:
if array[d.name].dtype.kind == 'M':
array[d.name] = array[d.name].astype('datetime64[us]').astype('int64')
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered in true_divide')
if np.isnan(array.data).all():
arr = np.zeros(array.data.shape, dtype=np.uint32)
img = array.copy()
img.data = arr
else:
img = tf.shade(array, **shade_opts)
params = dict(get_param_values(element), kdims=kdims,
bounds=bounds, vdims=RGB.vdims[:],
xdensity=xdensity, ydensity=ydensity)
return RGB(self.uint32_to_uint8(img.data), **params)
class datashade(rasterize, shade):
"""
Applies the aggregate and shade operations, aggregating all
elements in the supplied object and then applying normalization
and colormapping the aggregated data returning RGB elements.
See aggregate and shade operations for more details.
"""
def _process(self, element, key=None):
agg = rasterize._process(self, element, key)
shaded = shade._process(self, agg, key)
return shaded
class stack(Operation):
"""
The stack operation allows compositing multiple RGB Elements using
the defined compositing operator.
"""
compositor = param.ObjectSelector(objects=['add', 'over', 'saturate', 'source'],
default='over', doc="""
Defines how the compositing operation combines the images""")
def uint8_to_uint32(self, element):
img = np.dstack([element.dimension_values(d, flat=False)
for d in element.vdims])
if img.shape[2] == 3: # alpha channel not included
alpha = np.ones(img.shape[:2])
if img.dtype.name == 'uint8':
alpha = (alpha*255).astype('uint8')
img = np.dstack([img, alpha])
if img.dtype.name != 'uint8':
img = (img*255).astype(np.uint8)
N, M, _ = img.shape
return img.view(dtype=np.uint32).reshape((N, M))
def _process(self, overlay, key=None):
if not isinstance(overlay, CompositeOverlay):
return overlay
elif len(overlay) == 1:
return overlay.last if isinstance(overlay, NdOverlay) else overlay.get(0)
imgs = []
for rgb in overlay:
if not isinstance(rgb, RGB):
raise TypeError('stack operation expect RGB type elements, '
'not %s name.' % type(rgb).__name__)
rgb = rgb.rgb
dims = [kd.name for kd in rgb.kdims][::-1]
coords = {kd.name: rgb.dimension_values(kd, False)
for kd in rgb.kdims}
imgs.append(tf.Image(self.uint8_to_uint32(rgb), coords=coords, dims=dims))
try:
imgs = xr.align(*imgs, join='exact')
except ValueError:
raise ValueError('RGB inputs to stack operation could not be aligned, '
'ensure they share the same grid sampling.')
stacked = tf.stack(*imgs, how=self.p.compositor)
arr = shade.uint32_to_uint8(stacked.data)[::-1]
data = (coords[dims[1]], coords[dims[0]], arr[:, :, 0],
arr[:, :, 1], arr[:, :, 2])
if arr.shape[-1] == 4:
data = data + (arr[:, :, 3],)
return rgb.clone(data, datatype=[rgb.interface.datatype]+rgb.datatype)
class SpreadingOperation(LinkableOperation):
"""
Spreading expands each pixel in an Image based Element a certain
number of pixels on all sides according to a given shape, merging
pixels using a specified compositing operator. This can be useful
to make sparse plots more visible.
"""
how = param.ObjectSelector(default='source',
objects=['source', 'over', 'saturate', 'add'], doc="""
The name of the compositing operator to use when combining
pixels.""")
shape = param.ObjectSelector(default='circle', objects=['circle', 'square'],
doc="""
The shape to spread by. Options are 'circle' [default] or 'square'.""")
@classmethod
def uint8_to_uint32(cls, img):
shape = img.shape
flat_shape = np.multiply.reduce(shape[:2])
rgb = img.reshape((flat_shape, 4)).view('uint32').reshape(shape[:2])
return rgb
def _apply_spreading(self, array):
"""Apply the spread function using the indicated parameters."""
raise NotImplementedError
def _process(self, element, key=None):
if not isinstance(element, RGB):
raise ValueError('spreading can only be applied to RGB Elements.')
rgb = element.rgb
new_data = {kd.name: rgb.dimension_values(kd, expanded=False)
for kd in rgb.kdims}
rgbarray = np.dstack([element.dimension_values(vd, flat=False)
for vd in element.vdims])
data = self.uint8_to_uint32(rgbarray)
array = self._apply_spreading(data)
img = datashade.uint32_to_uint8(array)
for i, vd in enumerate(element.vdims):
if i < img.shape[-1]:
new_data[vd.name] = np.flipud(img[..., i])
return element.clone(new_data)
class spread(SpreadingOperation):
"""
Spreading expands each pixel in an Image based Element a certain
number of pixels on all sides according to a given shape, merging
pixels using a specified compositing operator. This can be useful
to make sparse plots more visible.
See the datashader documentation for more detail:
http://datashader.org/api.html#datashader.transfer_functions.spread
"""
px = param.Integer(default=1, doc="""
Number of pixels to spread on all sides.""")
def _apply_spreading(self, array):
img = tf.Image(array)
return tf.spread(img, px=self.p.px,
how=self.p.how, shape=self.p.shape).data
class dynspread(SpreadingOperation):
"""
Spreading expands each pixel in an Image based Element a certain
number of pixels on all sides according to a given shape, merging
pixels using a specified compositing operator. This can be useful
to make sparse plots more visible. Dynamic spreading determines
how many pixels to spread based on a density heuristic.
See the datashader documentation for more detail:
http://datashader.org/api.html#datashader.transfer_functions.dynspread
"""
max_px = param.Integer(default=3, doc="""
Maximum number of pixels to spread on all sides.""")
threshold = param.Number(default=0.5, bounds=(0,1), doc="""
When spreading, determines how far to spread.
Spreading starts at 1 pixel, and stops when the fraction
of adjacent non-empty pixels reaches this threshold.
Higher values give more spreading, up to the max_px
allowed.""")
def _apply_spreading(self, array):
img = tf.Image(array)
return tf.dynspread(img, max_px=self.p.max_px,
threshold=self.p.threshold,
how=self.p.how, shape=self.p.shape).data
def split_dataframe(path_df):
"""
Splits a dataframe of paths separated by NaNs into individual
dataframes.
"""
splits = np.where(path_df.iloc[:, 0].isnull())[0]+1
return [df for df in np.split(path_df, splits) if len(df) > 1]
class _connect_edges(Operation):
split = param.Boolean(default=False, doc="""
Determines whether bundled edges will be split into individual edges
or concatenated with NaN separators.""")
def _bundle(self, position_df, edges_df):
raise NotImplementedError('_connect_edges is an abstract baseclass '
'and does not implement any actual bundling.')
def _process(self, element, key=None):
index = element.nodes.kdims[2].name
rename_edges = {d.name: v for d, v in zip(element.kdims[:2], ['source', 'target'])}
rename_nodes = {d.name: v for d, v in zip(element.nodes.kdims[:2], ['x', 'y'])}
position_df = element.nodes.redim(**rename_nodes).dframe([0, 1, 2]).set_index(index)
edges_df = element.redim(**rename_edges).dframe([0, 1])
paths = self._bundle(position_df, edges_df)
paths = paths.rename(columns={v: k for k, v in rename_nodes.items()})
paths = split_dataframe(paths) if self.p.split else [paths]
return element.clone((element.data, element.nodes, paths))
class bundle_graph(_connect_edges, hammer_bundle):
"""
Iteratively group edges and return as paths suitable for datashading.
Breaks each edge into a path with multiple line segments, and
iteratively curves this path to bundle edges into groups.
"""
def _bundle(self, position_df, edges_df):
from datashader.bundling import hammer_bundle
return hammer_bundle.__call__(self, position_df, edges_df, **self.p)
class directly_connect_edges(_connect_edges, connect_edges):
"""
Given a Graph object will directly connect all nodes.
"""
def _bundle(self, position_df, edges_df):
return connect_edges.__call__(self, position_df, edges_df)
| {
"repo_name": "basnijholt/holoviews",
"path": "holoviews/operation/datashader.py",
"copies": "1",
"size": "52541",
"license": "bsd-3-clause",
"hash": -4628391605720371000,
"line_mean": 41.2695092518,
"line_max": 117,
"alpha_frac": 0.5826497402,
"autogenerated": false,
"ratio": 3.952828769184472,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001493649090724522,
"num_lines": 1243
} |
from __future__ import absolute_import, division
from collections import defaultdict
from datetime import datetime, timedelta
from hashlib import sha1
from sqlalchemy.sql import func
from changes.config import db
from changes.constants import Status, Result
from changes.models import Build, FailureReason, TestCase, Source
from changes.utils.http import build_uri
SLOW_TEST_THRESHOLD = 3000 # ms
ONE_DAY = 60 * 60 * 24
def percent(value, total):
if not (value and total):
return 0
return int(value / total * 100)
class BuildReport(object):
def __init__(self, projects):
self.projects = set(projects)
def generate(self, end_period=None, days=7):
if end_period is None:
end_period = datetime.utcnow()
days_delta = timedelta(days=days)
start_period = end_period - days_delta
# if we're pulling data for a select number of days let's use the
# previous week as the previous period
if days < 7:
previous_end_period = end_period - timedelta(days=7)
else:
previous_end_period = start_period
previous_start_period = previous_end_period - days_delta
current_results = self.get_project_stats(
start_period, end_period)
previous_results = self.get_project_stats(
previous_start_period, previous_end_period)
for project, stats in current_results.items():
# exclude projects that had no builds in this period
if not stats['total_builds']:
del current_results[project]
continue
previous_stats = previous_results.get(project)
if not previous_stats:
green_change = None
duration_change = None
elif stats['green_percent'] is None:
green_change = None
duration_change = None
elif previous_stats['green_percent'] is None:
green_change = None
duration_change = None
else:
green_change = stats['green_percent'] - previous_stats['green_percent']
duration_change = stats['avg_duration'] - previous_stats['avg_duration']
if not previous_stats:
total_change = None
elif previous_stats['total_builds'] is None:
total_change = None
else:
total_change = stats['total_builds'] - previous_stats['total_builds']
stats['avg_duration'] = stats['avg_duration']
stats['total_change'] = total_change
stats['percent_change'] = green_change
stats['duration_change'] = duration_change
project_stats = sorted(
current_results.items(), key=lambda x: (
-(x[1]['total_builds'] or 0), abs(x[1]['green_percent'] or 0),
x[0].name,
))
current_failure_stats = self.get_failure_stats(
start_period, end_period)
previous_failure_stats = self.get_failure_stats(
previous_start_period, previous_end_period)
failure_stats = []
for stat_name, current_stat_value in current_failure_stats['reasons'].iteritems():
previous_stat_value = previous_failure_stats['reasons'].get(stat_name, 0)
failure_stats.append({
'name': stat_name,
'current': {
'value': current_stat_value,
'percent': percent(current_stat_value, current_failure_stats['total'])
},
'previous': {
'value': previous_stat_value,
'percent': percent(previous_stat_value, previous_failure_stats['total'])
},
})
slow_tests = self.get_slow_tests(start_period, end_period)
title = 'Build Report ({0} through {1})'.format(
start_period.strftime('%b %d, %Y'),
end_period.strftime('%b %d, %Y'),
)
if len(self.projects) == 1:
title = '[%s] %s' % (iter(self.projects).next().name, title)
return {
'title': title,
'period': [start_period, end_period],
'failure_stats': failure_stats,
'project_stats': project_stats,
'tests': {
'slow_list': slow_tests,
},
}
def get_project_stats(self, start_period, end_period):
projects_by_id = dict((p.id, p) for p in self.projects)
project_ids = projects_by_id.keys()
# fetch overall build statistics per project
query = db.session.query(
Build.project_id, Build.result,
func.count(Build.id).label('num'),
func.avg(Build.duration).label('duration'),
).join(
Source, Source.id == Build.source_id,
).filter(
Source.patch_id == None, # NOQA
Build.project_id.in_(project_ids),
Build.status == Status.finished,
Build.result.in_([Result.failed, Result.passed]),
Build.date_created >= start_period,
Build.date_created < end_period,
).group_by(Build.project_id, Build.result)
project_results = {}
for project in self.projects:
project_results[project] = {
'total_builds': 0,
'green_builds': 0,
'green_percent': None,
'avg_duration': 0,
'link': build_uri('/projects/{0}/'.format(project.slug)),
}
for project_id, result, num_builds, duration in query:
if duration is None:
duration = 0
project = projects_by_id[project_id]
if result == Result.passed:
project_results[project]['avg_duration'] = duration
project_results[project]['total_builds'] += num_builds
if result == Result.passed:
project_results[project]['green_builds'] += num_builds
for project, stats in project_results.iteritems():
if stats['total_builds']:
stats['green_percent'] = percent(stats['green_builds'], stats['total_builds'])
else:
stats['green_percent'] = None
return project_results
def get_failure_stats(self, start_period, end_period):
failure_stats = {
'total': 0,
'reasons': defaultdict(int),
}
for project in self.projects:
for stat, value in self.get_failure_stats_for_project(
project, start_period, end_period).iteritems():
failure_stats['reasons'][stat] += value
failure_stats['total'] = Build.query.join(
Source, Source.id == Build.source_id,
).filter(
Source.patch_id == None, # NOQA
Build.project_id.in_(p.id for p in self.projects),
Build.status == Status.finished,
Build.result == Result.failed,
Build.date_created >= start_period,
Build.date_created < end_period,
).count()
return failure_stats
def get_failure_stats_for_project(self, project, start_period, end_period):
base_query = db.session.query(
FailureReason.reason, FailureReason.build_id
).join(
Build, Build.id == FailureReason.build_id,
).join(
Source, Source.id == Build.source_id,
).filter(
Source.patch_id == None, # NOQA
Build.project_id == project.id,
Build.date_created >= start_period,
Build.date_created < end_period,
).group_by(
FailureReason.reason, FailureReason.build_id
).subquery()
return dict(
db.session.query(
base_query.c.reason,
func.count(),
).group_by(
base_query.c.reason,
)
)
def get_slow_tests(self, start_period, end_period):
slow_tests = []
for project in self.projects:
slow_tests.extend(self.get_slow_tests_for_project(
project, start_period, end_period))
slow_tests.sort(key=lambda x: x['duration_raw'], reverse=True)
return slow_tests[:10]
def get_slow_tests_for_project(self, project, start_period, end_period):
latest_build = Build.query.filter(
Build.project == project,
Build.status == Status.finished,
Build.result == Result.passed,
Build.date_created >= start_period,
Build.date_created < end_period,
).order_by(
Build.date_created.desc(),
).limit(1).first()
if not latest_build:
return []
job_list = list(latest_build.jobs)
if not job_list:
return []
queryset = db.session.query(
TestCase.name, TestCase.duration,
).filter(
TestCase.job_id.in_(j.id for j in job_list),
TestCase.result == Result.passed,
TestCase.date_created > start_period,
TestCase.date_created <= end_period,
).group_by(
TestCase.name, TestCase.duration,
).order_by(TestCase.duration.desc())
slow_list = []
for name, duration in queryset[:10]:
slow_list.append({
'project': project,
'name': name,
'package': '', # TODO
'duration': '%.2f s' % (duration / 1000.0,),
'duration_raw': duration,
'link': build_uri('/projects/{0}/tests/{1}/'.format(
project.slug, sha1(name).hexdigest())),
})
return slow_list
def _date_to_key(self, dt):
return int(dt.replace(
minute=0, hour=0, second=0, microsecond=0
).strftime('%s'))
| {
"repo_name": "alex/changes",
"path": "changes/reports/build.py",
"copies": "1",
"size": "9947",
"license": "apache-2.0",
"hash": 6852758058527804000,
"line_mean": 34.6523297491,
"line_max": 94,
"alpha_frac": 0.5437820448,
"autogenerated": false,
"ratio": 4.238176395398381,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0003193993953865347,
"num_lines": 279
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.