code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import math
from service.fake_api_results import ALL_TITLES, OFFICIAL_COPY_RESULT, SELECTED_FULL_RESULTS
SEARCH_RESULTS_PER_PAGE = 20
def get_title(title_number):
return SELECTED_FULL_RESULTS.get(title_number)
def _get_titles(page_number):
nof_results = len(ALL_TITLES)
number_pages = math.ceil(nof_results / SEARCH_RESULTS_PER_PAGE)
start_index = page_number * SEARCH_RESULTS_PER_PAGE
end_index = start_index + SEARCH_RESULTS_PER_PAGE
return {
'number_pages': number_pages,
'number_results': nof_results,
'page_number': page_number,
'titles': ALL_TITLES[start_index:end_index],
}
def get_titles_by_postcode(postcode, page_number):
return _get_titles(page_number)
def get_titles_by_address(address, page_number):
return _get_titles(page_number)
def get_official_copy_data(title_number):
return OFFICIAL_COPY_RESULT
| LandRegistry/drv-flask-based-prototype | service/api_client.py | Python | mit | 898 |
#!/usr/bin/env python3
#
# grmpy documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 18 13:05:32 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# Set variable so that todos are shown in local build
on_rtd = os.environ.get("READTHEDOCS") == "True"
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.imgconverter",
]
bibtex_bibfiles = ["source/refs.bib"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "grmpy"
copyright_ = "2018, grmpy-dev team"
author = "grmpy-dev team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. We
# want to supress the output on readthedocs.
if on_rtd:
todo_include_todos = False
else:
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "grmpydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
"pointsize": "12pt",
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
"figure_align": "htbp",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "grmpy.tex", "grmpy Documentation", "Development Team", "manual")
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "grmpy", "grmpy Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"grmpy",
"grmpy Documentation",
author,
"grmpy",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright_
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
| grmToolbox/grmpy | docs/conf.py | Python | mit | 5,902 |
import datetime
import re
import sys
from collections import deque
from decimal import Decimal
from enum import Enum
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path
from types import GeneratorType
from typing import Any, Callable, Dict, Type, Union
from uuid import UUID
if sys.version_info >= (3, 7):
Pattern = re.Pattern
else:
# python 3.6
Pattern = re.compile('a').__class__
from .color import Color
from .types import SecretBytes, SecretStr
__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'
def isoformat(o: Union[datetime.date, datetime.time]) -> str:
return o.isoformat()
def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
"""
Encodes a Decimal as int of there's no exponent, otherwise float
This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
where a integer (but not int typed) is used. Encoding this as a float
results in failed round-tripping between encode and prase.
Our Id type is a prime example of this.
>>> decimal_encoder(Decimal("1.0"))
1.0
>>> decimal_encoder(Decimal("1"))
1
"""
if dec_value.as_tuple().exponent >= 0:
return int(dec_value)
else:
return float(dec_value)
ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
bytes: lambda o: o.decode(),
Color: str,
datetime.date: isoformat,
datetime.datetime: isoformat,
datetime.time: isoformat,
datetime.timedelta: lambda td: td.total_seconds(),
Decimal: decimal_encoder,
Enum: lambda o: o.value,
frozenset: list,
deque: list,
GeneratorType: list,
IPv4Address: str,
IPv4Interface: str,
IPv4Network: str,
IPv6Address: str,
IPv6Interface: str,
IPv6Network: str,
Path: str,
Pattern: lambda o: o.pattern,
SecretBytes: str,
SecretStr: str,
set: list,
UUID: str,
}
def pydantic_encoder(obj: Any) -> Any:
from dataclasses import asdict, is_dataclass
from .main import BaseModel
if isinstance(obj, BaseModel):
return obj.dict()
elif is_dataclass(obj):
return asdict(obj)
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = ENCODERS_BY_TYPE[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = type_encoders[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
return pydantic_encoder(obj)
def timedelta_isoformat(td: datetime.timedelta) -> str:
"""
ISO 8601 encoding for timedeltas.
"""
minutes, seconds = divmod(td.seconds, 60)
hours, minutes = divmod(minutes, 60)
return f'P{td.days}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
| glenngillen/dotfiles | .vscode/extensions/ms-python.python-2021.5.842923320/pythonFiles/lib/jedilsp/pydantic/json.py | Python | mit | 3,365 |
import ROOT
from math import pi, sqrt, pow, exp
import scipy.integrate
import numpy
from array import array
alpha = 7.2973e-3
m_e = 0.51099892
Z_Xe = 54
Q = 2.4578
def F(Z, KE):
E = KE + m_e
W = E/m_e
Z0 = Z + 2
if W <= 1:
W = 1 + 1e-4
if W > 2.2:
a = -8.46e-2 + 2.48e-2*Z0 + 2.37e-4*Z0**2
b = 1.15e-2 + 3.58e-4*Z0 - 6.17e-5*Z0**2
else:
a = -0.811 + 4.46e-2*Z0 + 1.08e-4*Z0**2
b = 0.673 - 1.82e-2*Z0 + 6.38e-5*Z0**2
x = sqrt(W-1)
p = sqrt(W**2 - 1)
if (p <= 0):
result = 1
else:
result = W/p*exp(a + b*x)
return result
def D(D, K, i):
Z = Z_Xe
T0 = Q/m_e
E1 = 0.5*(K+D) + 1
E2 = 0.5*(K+D) + 1
p1 = sqrt(E1**2 - 1)
p2 = sqrt(E2**2 - 1)
T1 = E1 - 1
T2 = E2 - 1
return p1*E1*F(Z, T1*m_e)*p2*E2*F(Z, T1*m_e)*pow(T0 - K, i)
def SumSpectrum(K, i):
if K < 0:
return 0
elif K > Q:
return 0
a = -K/m_e
b = K/m_e
x = scipy.integrate.quad(D, a, b, (K/m_e, i))[0]
if x < 0:
return 0
else:
return x
def gauss_conv(x, y, res):
N = len(x)
mu = numpy.mean(x)
s = res*mu
gauss = [1.0/(s*sqrt(2*pi))*exp(-0.5*((a-mu)/s)**2) for a in x]
convolution = numpy.convolve(y, gauss,'same')
return convolution
def normalize(y, eps, f):
return [a*f for a in y]
N = 1000
min_E = 0.0
max_E = 1.2
E_scaled = array('d', numpy.linspace(min_E, max_E, N, False))
Es = array('d', (E*Q for E in E_scaled))
eps = (max_E - min_E)/N
bb0n = [0.5/eps if abs(E-Q)<eps else 0 for E in Es]
bb2n = [SumSpectrum(E, 5) for E in Es]
bb0n_smeared = gauss_conv(Es, bb0n, 0.02)
bb2n_smeared = gauss_conv(Es, bb2n, 0.02)
bb0n_int = scipy.integrate.simps(bb0n_smeared, None, eps)
bb0n_norm = array('d', normalize(bb0n_smeared, eps, 1e-2/bb0n_int))
bb2n_int = scipy.integrate.simps(bb2n_smeared, None, eps)
bb2n_norm = array('d', normalize(bb2n_smeared, eps, 1/bb2n_int))
g_bb0n = ROOT.TGraph(N, E_scaled, bb0n_norm)
g_bb0n.SetTitle("")
g_bb0n.SetLineStyle(ROOT.kDashed)
g_bb2n = ROOT.TGraph(N, E_scaled, bb2n_norm)
g_bb2n.SetTitle("")
bb0nX = []
bb0nX.append([0.5/eps if abs(E-Q)<eps else 0 for E in Es])
for i in [1, 2, 3, 5, 7]:
bb0nX.append([SumSpectrum(E, i) for E in Es])
bb0nX_graphs = []
for bb0nXn in bb0nX:
bb0nX_int = scipy.integrate.simps(bb0nXn, None, eps)
bb0nX_norm = array('d', normalize(bb0nXn, eps, 1/bb0nX_int))
g_bb0nX = ROOT.TGraph(N, E_scaled, bb0nX_norm)
bb0nX_graphs.append(g_bb0nX)
min_E = 0.9
max_E = 1.1
E_scaled_z = array('d', numpy.linspace(min_E, max_E, N, False))
Es_z = array('d', (E*Q for E in E_scaled_z))
eps_z = (max_E - min_E)/N
bb0n_z = [0.5/eps_z if abs(E-Q)<eps_z else 0 for E in Es_z]
bb2n_z = [SumSpectrum(E, 5) for E in Es_z]
bb0n_smeared_z = gauss_conv(Es_z, bb0n_z, 0.02)
bb2n_smeared_z = gauss_conv(Es_z, bb2n_z, 0.02)
bb0n_norm_z = array('d', normalize(bb0n_smeared_z, eps, 1e-6/bb0n_int))
bb2n_norm_z = array('d', normalize(bb2n_smeared_z, eps, 1.0/bb2n_int))
g_bb0n_z = ROOT.TGraph(N, E_scaled_z, bb0n_norm_z)
g_bb0n_z.SetTitle("")
g_bb0n_z.SetLineStyle(ROOT.kDashed)
g_bb2n_z = ROOT.TGraph(N, E_scaled_z, bb2n_norm_z)
g_bb2n_z.SetTitle("")
#print("bb0n %f"%(sum((y*eps for y in bb0n_norm))))
#print("bb2n %f"%(sum((y*eps for y in bb2n_norm))))
c_both = ROOT.TCanvas("c_both","c_both")
p = ROOT.TPad("p", "p", 0, 0, 1, 1)
p.SetRightMargin(0.02)
p.SetTopMargin(0.02)
p.Draw()
p.cd()
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
g_bb2n.GetYaxis().SetTitle("dN/dE")
g_bb2n.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
c_both.cd()
p_inset = ROOT.TPad("p_inset","p_inset",0.5, 0.5, 0.995, 0.995)
p_inset.SetRightMargin(0.05)
p_inset.SetTopMargin(0.05)
p_inset.Draw()
p_inset.cd()
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
g_bb2n_z.GetYaxis().SetTitle("dN/dE")
g_bb2n_z.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
g_bb2n_z.GetYaxis().SetNoExponent(False)
# Zoom in so we can't see edge effects of the convolution
g_bb2n_z.GetXaxis().SetRangeUser(1-0.25*(1-min_E), 1+0.25*(max_E-1))
g_bb2n_z.GetYaxis().SetRangeUser(0, 0.0004)
c_z = ROOT.TCanvas("c_z","c_z")
c_z.SetRightMargin(0.05)
c_z.SetTopMargin(0.05)
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
c = ROOT.TCanvas("c","c")
c.SetRightMargin(0.05)
c.SetTopMargin(0.05)
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
c_majoron = ROOT.TCanvas("c_majoron")
c_majoron.SetRightMargin(0.05)
c_majoron.SetTopMargin(0.05)
colors = [ROOT.kBlack, ROOT.kRed, ROOT.kGreen, ROOT.kBlue,
ROOT.kMagenta, ROOT.kCyan]
draw_opt = "AL"
for i in xrange(len(bb0nX_graphs)):
bb0nX_graphs[-(i+1)].SetLineColor(colors[-(i+1)])
bb0nX_graphs[-(i+1)].Draw(draw_opt)
draw_opt = "L"
# Draw bb0n last so it doesn't scale others to 0
bb0nX_graphs[-1].SetTitle("")
bb0nX_graphs[-1].GetXaxis().SetRangeUser(0, 1.1)
bb0nX_graphs[-1].GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
bb0nX_graphs[-1].GetYaxis().SetTitle("dN/dE")
l_majoron = ROOT.TLegend(0.45, 0.77, 0.85, 0.94)
l_majoron.SetFillColor(ROOT.kWhite)
l_majoron.SetNColumns(2)
l_majoron.AddEntry(bb0nX_graphs[0], "0#nu#beta#beta", "l")
l_majoron.AddEntry(bb0nX_graphs[1], "0#nu#beta#beta#chi^{0} (n=1)", "l")
l_majoron.AddEntry(bb0nX_graphs[4], "2#nu#beta#beta (n=5)", "l")
l_majoron.AddEntry(bb0nX_graphs[2], "0#nu#beta#beta#chi^{0} (n=2)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[3], "0#nu#beta#beta#chi^{0}(#chi^{0}) (n=3)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[5], "0#nu#beta#beta#chi^{0}#chi^{0} (n=7)", "l")
l_majoron.Draw()
dummy = raw_input("Press Enter...")
| steveherrin/PhDThesis | Thesis/scripts/make_bb_spectrum_plot.py | Python | mit | 5,583 |
class SubscriptionTracking(object):
def __init__(self, enable=None, text=None, html=None, substitution_tag=None):
self._enable = None
self._text = None
self._html = None
self._substitution_tag = None
if enable is not None:
self.enable = enable
if text is not None:
self.text = text
if html is not None:
self.html = html
if substitution_tag is not None:
self.substitution_tag = substitution_tag
@property
def enable(self):
return self._enable
@enable.setter
def enable(self, value):
self._enable = value
@property
def text(self):
return self._text
@text.setter
def text(self, value):
self._text = value
@property
def html(self):
return self._html
@html.setter
def html(self, value):
self._html = value
@property
def substitution_tag(self):
return self._substitution_tag
@substitution_tag.setter
def substitution_tag(self, value):
self._substitution_tag = value
def get(self):
subscription_tracking = {}
if self.enable is not None:
subscription_tracking["enable"] = self.enable
if self.text is not None:
subscription_tracking["text"] = self.text
if self.html is not None:
subscription_tracking["html"] = self.html
if self.substitution_tag is not None:
subscription_tracking["substitution_tag"] = self.substitution_tag
return subscription_tracking
| galihmelon/sendgrid-python | sendgrid/helpers/mail/subscription_tracking.py | Python | mit | 1,603 |
import novaclient
from novaclient.exceptions import NotFound
import novaclient.client
from keystoneauth1 import loading
from keystoneauth1 import session
import neutronclient.v2_0.client
import cinderclient.v2.client
from osc_lib.utils import wait_for_delete
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow.patterns import graph_flow as gf
from taskflow import task
import logging
import os
import json
import time
NOVACLIENT_VERSION = "2.37"
def get_openstack_nova_client(config):
return get_openstack_clients(config)[0]
def get_openstack_neutron_client(config):
return get_openstack_clients(config)[1]
def get_openstack_cinder_client(config):
return get_openstack_clients(config)[2]
def get_openstack_clients(config):
""" gets a tuple of various openstack clients.
(novaclient, neutronclient, cinderclient).
Caller can pick up one or all of the returned clients.
"""
if config:
if config.get('M2M_CREDENTIAL_STORE'):
logging.debug("loading credentials from %s" % config.get('M2M_CREDENTIAL_STORE'))
m2m_config = json.load(open(config.get('M2M_CREDENTIAL_STORE')))
source_config = m2m_config
else:
logging.debug("using config as provided")
source_config = config
else:
logging.debug("no config, trying environment vars")
source_config = os.environ
os_username = source_config['OS_USERNAME']
os_password = source_config['OS_PASSWORD']
os_tenant_name = source_config['OS_TENANT_NAME']
os_auth_url = source_config['OS_AUTH_URL']
loader = loading.get_plugin_loader('password')
auth = loader.load_from_options(auth_url=os_auth_url,
username=os_username,
password=os_password,
project_name=os_tenant_name
)
sess = session.Session(auth=auth, verify=False)
return (novaclient.client.Client(NOVACLIENT_VERSION,
session=sess),
neutronclient.v2_0.client.Client(session=sess),
cinderclient.v2.client.Client(NOVACLIENT_VERSION, session=sess)
)
def _format_nics(nics):
""" Create a networks data structure for python-novaclient.
**Note** "auto" is the safest default to pass to novaclient
:param nics: either None, one of strings "auto" or "none"or string with a
comma-separated list of nic IDs from OpenStack.
:return: A data structure that can be passed as Nics
"""
if not nics:
return "auto"
if nics == "none":
return "none"
if nics.lower() == "auto":
return "auto"
return [{"net-id": item, "v4-fixed-ip": ""}
for item in nics.strip().split(",")]
class GetServer(task.Task):
def execute(self, server_id, config):
logging.debug("getting server %s" % server_id)
nc = get_openstack_nova_client(config)
return nc.servers.get(server_id)
class GetImage(task.Task):
def execute(self, image_name, config):
logging.debug("getting image %s" % image_name)
nc = get_openstack_nova_client(config)
return nc.glance.find_image(image_name)
def revert(self, *args, **kwargs):
pass
class ListImages(task.Task):
def execute(self, image_name, config):
logging.debug("getting images")
nc = get_openstack_nova_client(config)
return nc.glance.list()
def revert(self, *args, **kwargs):
pass
class GetFlavor(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavor %s" % flavor_name)
nc = get_openstack_nova_client(config)
return nc.flavors.find(name=flavor_name)
def revert(self, *args, **kwargs):
pass
class ListFlavors(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavors")
nc = get_openstack_nova_client(config)
return nc.flavors.list()
def revert(self, *args, **kwargs):
pass
class CreateSecurityGroup(task.Task):
# note this uses neutron client
secgroup_id = ""
def execute(self, display_name, master_sg_name, config):
logging.debug("create security group %s" % display_name)
security_group_name = display_name
nc = get_openstack_neutron_client(config)
self.secgroup = nc.create_security_group({"security_group": {
"name": security_group_name,
"description": "Security group generated by Pebbles"
}})
self.secgroup_id = self.secgroup["security_group"]["id"]
self.secgroup_name = self.secgroup["security_group"]["name"]
if master_sg_name:
master_sg = nc.find_resource("security_group", master_sg_name)
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='tcp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='udp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='icmp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=255,
remote_group_id=master_sg["id"]
)})
logging.info("Created security group %s" % self.secgroup_id)
return self.secgroup_id
def revert(self, config, **kwargs):
logging.debug("revert: delete security group")
nc = get_openstack_neutron_client(config)
nc.delete_security_group(self.secgroup_id)
class CreateRootVolume(task.Task):
def execute(self, display_name, image, root_volume_size, config):
if root_volume_size:
logging.debug("creating a root volume for instance %s from image %s" % (display_name, image))
nc = get_openstack_cinder_client(config)
volume_name = '%s-root' % display_name
volume = nc.volumes.create(
size=root_volume_size,
imageRef=image.id,
name=volume_name
)
self.volume_id = volume.id
retries = 0
while nc.volumes.get(volume.id).status not in ('available',):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume creation %s is stuck')
return volume.id
else:
logging.debug("no root volume defined")
return ""
def revert(self, config, **kwargs):
logging.debug("revert: delete root volume")
try:
if getattr(self, 'volume_id', None):
nc = get_openstack_cinder_client(config)
nc.volumes.delete(
nc.volumes.get(self.volume_id))
else:
logging.debug("revert: no volume_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting volume failed: %s' % e)
class CreateDataVolume(task.Task):
def execute(self, display_name, data_volume_size, data_volume_type, config):
if data_volume_size:
logging.debug("creating a data volume for instance %s, %d" % (display_name, data_volume_size))
nc = get_openstack_cinder_client(config)
volume_name = '%s-data' % display_name
volume = nc.volumes.create(
size=data_volume_size,
name=volume_name,
volume_type=data_volume_type,
)
self.volume_id = volume.id
retries = 0
while nc.volumes.get(volume.id).status not in ('available',):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume creation %s is stuck')
return volume.id
else:
logging.debug("no root volume defined")
return None
def revert(self, config, **kwargs):
logging.debug("revert: delete root volume")
try:
if getattr(self, 'volume_id', None):
nc = get_openstack_cinder_client(config)
nc.volumes.delete(
nc.volumes.get(self.volume_id))
else:
logging.debug("revert: no volume_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting volume failed: %s' % e)
class ProvisionInstance(task.Task):
def execute(self, display_name, image, flavor, security_group, extra_sec_groups,
root_volume_id, nics, userdata, config):
logging.debug("provisioning instance %s" % display_name)
nc = get_openstack_nova_client(config)
sgs = [security_group]
if extra_sec_groups:
sgs.extend(extra_sec_groups)
try:
if len(root_volume_id):
bdm = {'vda': '%s:::1' % (root_volume_id)}
else:
bdm = None
instance = nc.servers.create(
display_name,
image.id,
flavor.id,
key_name=display_name,
security_groups=sgs,
block_device_mapping=bdm,
nics=_format_nics(nics),
userdata=userdata,)
except Exception as e:
logging.error("error provisioning instance: %s" % e)
raise e
self.instance_id = instance.id
logging.debug("instance provisioning successful")
return instance.id
def revert(self, config, **kwargs):
logging.debug("revert: deleting instance %s", kwargs)
try:
if getattr(self, 'instance_id', None):
nc = get_openstack_nova_client(config)
nc.servers.delete(self.instance_id)
else:
logging.debug("revert: no instance_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting instance failed: %s' % e)
class DeprovisionInstance(task.Task):
def execute(self, server_id, config):
logging.debug("deprovisioning instance %s" % server_id)
nc = get_openstack_nova_client(config)
try:
server = nc.servers.get(server_id)
except NotFound:
logging.warn("Server %s not found" % server_id)
return
if hasattr(server, "security_groups"):
for sg in server.security_groups:
try:
server.remove_security_group(sg['name'])
except:
logging.warn("Unable to remove security group from server (%s)" % sg)
else:
logging.warn("no security groups on server!")
try:
nc.servers.delete(server_id)
wait_for_delete(nc.servers, server_id)
except Exception as e:
logging.warn("Unable to deprovision server %s" % e)
return server.name
def revert(self, *args, **kwargs):
logging.debug("revert: deprovisioning instance failed")
class AllocateIPForInstance(task.Task):
# user beware, i have not done comprehensive testing on this
# but the parts of the refactoring should be correct
# suvileht -2017-08-24
def execute(self, server_id, allocate_public_ip, config):
logging.info("Allocate IP for server %s" % server_id)
novaclient = get_openstack_nova_client(config)
neutronclient = get_openstack_neutron_client(config)
retries = 0
while novaclient.servers.get(server_id).status is "BUILDING" or not novaclient.servers.get(server_id).networks:
logging.debug("...waiting for server to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Server %s is stuck in building' % server_id)
server = novaclient.servers.get(server_id)
if allocate_public_ip:
ips = neutronclient.list_floatingips()
allocated_from_pool = False
free_ips = [ip for ip in ips["floatingips"] if ip["status"] != "ACTIVE"]
if not free_ips:
logging.debug("No allocated free IPs left, trying to allocate one")
try:
# for backwards compatibility reasons we assume the
# network is called "public"
network_id = neutronclient.find_resource("network",
"public")
ip = neutronclient.create_floatingip({
"floating_network_id": network_id})
allocated_from_pool = True
except neutronclient.exceptions.ClientException as e:
logging.warning("Cannot allocate IP, quota exceeded?")
raise e
else:
ip = free_ips[0]["floating_ip_address"]
logging.info("IP assigned IS %s" % ip)
try:
server.add_floating_ip(ip)
except Exception as e:
logging.error(e)
address_data = {
'public_ip': ip,
'allocated_from_pool': allocated_from_pool,
'private_ip': list(server.networks.values())[0][0],
}
else:
address_data = {
'public_ip': None,
'allocated_from_pool': False,
'private_ip': list(server.networks.values())[0][0],
}
return address_data
def revert(self, *args, **kwargs):
pass
class ListInstanceVolumes(task.Task):
def execute(self, server_id, config):
nc = get_openstack_nova_client(config)
return nc.volumes.get_server_volumes(server_id)
def revert(self):
pass
class AttachDataVolume(task.Task):
def execute(self, server_id, data_volume_id, config):
logging.debug("Attach data volume for server %s" % server_id)
if data_volume_id:
nc = get_openstack_nova_client(config)
retries = 0
while nc.servers.get(server_id).status is "BUILDING" or not nc.servers.get(server_id).networks:
logging.debug("...waiting for server to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Server %s is stuck in building' % server_id)
nc.volumes.create_server_volume(server_id, data_volume_id, '/dev/vdc')
def revert(self, *args, **kwargs):
pass
class AddUserPublicKey(task.Task):
def execute(self, display_name, public_key, config):
logging.debug("adding user public key")
nc = get_openstack_nova_client(config)
self.keypair_added = False
nc.keypairs.create(display_name, public_key)
self.keypair_added = True
def revert(self, display_name, public_key, config, **kwargs):
logging.debug("revert: remove user public key")
if getattr(self, 'keypair_added', None):
nc = get_openstack_nova_client(config)
nc.keypairs.find(name=display_name).delete()
class RemoveUserPublicKey(task.Task):
def execute(self, display_name, config):
logging.debug("removing user public key")
nc = get_openstack_nova_client(config)
try:
nc.keypairs.find(name=display_name).delete()
except:
pass
def revert(self, *args, **kwargs):
pass
class DeleteSecurityGroup(task.Task):
def execute(self, server, config):
logging.debug("delete security group")
nc = get_openstack_neutron_client(config)
security_group = nc.find_resource("security_group",
server.name)
try:
if security_group:
nc.delete_security_group(security_group["id"])
except Exception as e:
logging.warn("Could not delete security group: %s" % e)
def revert(self, *args, **kwargs):
pass
class DeleteVolumes(task.Task):
def execute(self, server, config):
nova = get_openstack_nova_client(config)
cinder = get_openstack_cinder_client(config)
for volume in nova.volumes.get_server_volumes(server.id):
retries = 0
while cinder.volumes.get(volume.id).status not in \
('available', 'error'):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume %s is stuck' % volume.id)
try:
cinder.volumes.delete(volume.id)
except NotFound:
pass
def revert(self, *args, **kwargs):
pass
def get_provision_flow():
"""
Provisioning flow consisting of three graph flows, each consisting of set of
tasks that can execute in parallel.
Returns tuple consisting of the whole flow and a dictionary including
references to three graph flows for pre-execution customisations.
"""
pre_flow = gf.Flow('PreBootInstance').add(
AddUserPublicKey('add_user_public_key'),
GetImage('get_image', provides='image'),
GetFlavor('get_flavor', provides='flavor'),
CreateRootVolume('create_root_volume', provides='root_volume_id')
)
main_flow = gf.Flow('BootInstance').add(
CreateSecurityGroup('create_security_group', provides='security_group'),
CreateDataVolume('create_data_volume', provides='data_volume_id'),
ProvisionInstance('provision_instance', provides='server_id')
)
post_flow = gf.Flow('PostBootInstance').add(
AllocateIPForInstance('allocate_ip_for_instance', provides='address_data'),
AttachDataVolume('attach_data_volume'),
RemoveUserPublicKey('remove_user_public_key')
)
return (lf.Flow('ProvisionInstance').add(pre_flow, main_flow, post_flow),
{'pre': pre_flow, 'main': main_flow, 'post': post_flow})
def get_deprovision_flow():
pre_flow = gf.Flow('PreDestroyInstance').add(
GetServer('get_server', provides="server")
)
main_flow = gf.Flow('DestroyInstance').add(
DeprovisionInstance('deprovision_instance')
)
post_flow = gf.Flow('PostDestroyInstance').add(
DeleteSecurityGroup('delete_security_group')
)
return (lf.Flow('DeprovisionInstance').add(pre_flow, main_flow, post_flow),
{'pre': pre_flow, 'main': main_flow, 'post': post_flow})
def get_upload_key_flow():
return lf.Flow('UploadKey').add(
AddUserPublicKey('upload_key')
)
class OpenStackService(object):
def __init__(self, config=None):
self._config = config
def provision_instance(self, display_name, image_name, flavor_name, public_key, extra_sec_groups=None,
master_sg_name=None, allocate_public_ip=True, root_volume_size=0,
data_volume_size=0, data_volume_type=None,
nics=None,
userdata=None):
try:
flow, _ = get_provision_flow()
return taskflow.engines.run(flow, engine='parallel', store=dict(
image_name=image_name,
flavor_name=flavor_name,
display_name=display_name,
master_sg_name=master_sg_name,
public_key=public_key,
extra_sec_groups=extra_sec_groups,
allocate_public_ip=allocate_public_ip,
root_volume_size=root_volume_size,
data_volume_size=data_volume_size,
data_volume_type=data_volume_type,
nics=nics,
userdata=userdata,
config=self._config))
except Exception as e:
logging.error(e)
return {'error': 'flow failed due to: %s' % e}
def deprovision_instance(self, server_id, display_name=None, delete_attached_volumes=False):
flow, subflows = get_deprovision_flow()
if delete_attached_volumes:
subflows['main'].add(DeleteVolumes())
try:
return taskflow.engines.run(flow, engine='parallel', store=dict(
server_id=server_id,
config=self._config))
except Exception as e:
logging.error(e)
return {'error': 'flow failed due to: %s' % (e)}
def get_instance_state(self, instance_id):
nc = get_openstack_nova_client(self._config)
return nc.servers.get(instance_id).status
def get_instance_networks(self, instance_id):
nc = get_openstack_nova_client(self._config)
return nc.servers.get(instance_id).networks
def list_images(self):
nc = get_openstack_nova_client(self._config)
return nc.glance.list()
def list_flavors(self):
nc = get_openstack_nova_client(self._config)
return nc.flavors.list()
def upload_key(self, key_name, public_key):
try:
return taskflow.engines.run(
get_upload_key_flow(),
engine='parallel',
store=dict(
config=self._config,
display_name=key_name,
public_key=public_key
)
)
except Exception as e:
logging.error(e)
return {'error': 'flow failed'}
def delete_key(self, key_name):
logging.debug('Deleting key: %s' % key_name)
nc = get_openstack_nova_client(self._config)
try:
key = nc.keypairs.find(name=key_name)
key.delete()
except:
logging.warning('Key not found: %s' % key_name)
def clear_security_group_rules(self, group_id):
nc = get_openstack_neutron_client(self._config)
sg = nc.show_security_group(group_id)
sec_group_rules = sg['security_group']['security_group_rules']
for rule in sec_group_rules:
if rule['direction'] == 'ingress':
nc.delete_security_group_rule(rule['id'])
def create_security_group(self, security_group_name, security_group_description):
nc = get_openstack_neutron_client(self._config)
nc.security_groups.create(
security_group_name,
"Security group generated by Pebbles")
def create_security_group_rule(self, security_group_id, from_port, to_port, cidr, ip_protocol='tcp',
group_id=None):
nc = get_openstack_neutron_client(self._config)
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=security_group_id,
protocol=ip_protocol,
ethertype='ipv4',
port_range_min=from_port,
direction='ingress',
port_range_max=to_port,
remote_ip_prefix=cidr,
remote_group_id=group_id
)})
| CSC-IT-Center-for-Science/pouta-blueprints | pebbles/services/openstack_service.py | Python | mit | 23,846 |
#!/usr/bin/env python3
import unittest
import greatest_common_divisor as gcd
class TestGreatestCommonDivisor(unittest.TestCase):
def setUp(self):
# use tuple of tuples instead of list of tuples because data won't change
# https://en.wikipedia.org/wiki/Algorithm
# a, b, expected
self.test_data = ((12, 8, 4),
(9, 12, 3),
(54, 24, 6),
(3009, 884, 17),
(40902, 24140, 34),
(14157, 5950, 1)
)
def test_greatest_common_divisor_zero(self):
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(12, 0)
self.assertEqual(0, actual)
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(0, 13)
self.assertEqual(0, actual)
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(-5, 13)
self.assertEqual(0, actual)
def test_greatest_common_divisor(self):
for test_case in self.test_data:
expected = test_case[2]
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(test_case[0], test_case[1])
fail_message = str.format("expected {0} but got {1}", expected, actual)
self.assertEqual(expected, actual, fail_message)
def test_next_smaller_divisor(self):
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(8, 8)
self.assertEqual(4, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 12)
self.assertEqual(6, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 6)
self.assertEqual(4, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 4)
self.assertEqual(3, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 3)
self.assertEqual(2, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 2)
self.assertEqual(1, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 1)
self.assertEqual(1, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(54, 18)
self.assertEqual(9, actual)
if __name__ == "__main__":
unittest.main()
| beepscore/greatest_common_divisor | test/test_greatest_common_divisor.py | Python | mit | 2,281 |
# coding: utf-8
"""
Модуль с преднастроенными панелями-деевьями
"""
from __future__ import absolute_import
from m3.actions.urls import get_url
from m3_ext.ui import containers
from m3_ext.ui import controls
from m3_ext.ui import menus
from m3_ext.ui import render_component
from m3_ext.ui.fields import ExtSearchField
class ExtObjectTree(containers.ExtTree):
"""
Панель с деревом для управления списком объектов.
"""
#==========================================================================
# Внутренние классы для ExtObjectTree
#==========================================================================
class TreeContextMenu(menus.ExtContextMenu):
"""
Внутренний класс для удобной работы с контекстным меню дерева
"""
def __init__(self, *args, **kwargs):
super(
ExtObjectTree.TreeContextMenu, self
).__init__(
*args, **kwargs
)
self.menuitem_new = menus.ExtContextMenuItem(
text=u'Новый в корне',
icon_cls='add_item',
handler='contextMenuNewRoot'
)
self.menuitem_new_child = menus.ExtContextMenuItem(
text=u'Новый дочерний',
icon_cls='add_item',
handler='contextMenuNewChild'
)
self.menuitem_edit = menus.ExtContextMenuItem(
text=u'Изменить',
icon_cls='edit_item',
handler='contextMenuEdit'
)
self.menuitem_delete = menus.ExtContextMenuItem(
text=u'Удалить',
icon_cls='delete_item',
handler='contextMenuDelete'
)
self.menuitem_separator = menus.ExtContextMenuSeparator()
self.init_component()
class TreeTopBar(containers.ExtToolBar):
"""
Внутренний класс для удобной работы топбаром грида
"""
def __init__(self, *args, **kwargs):
super(ExtObjectTree.TreeTopBar, self).__init__(*args, **kwargs)
self.button_new = menus.ExtContextMenuItem(
text=u'Новый в корне',
icon_cls='add_item',
handler='topBarNewRoot'
)
self.button_new_child = menus.ExtContextMenuItem(
text=u'Новый дочерний',
icon_cls='add_item',
handler='topBarNewChild'
)
self.button_edit = controls.ExtButton(
text=u'Изменить',
icon_cls='edit_item',
handler='topBarEdit'
)
self.button_delete = controls.ExtButton(
text=u'Удалить',
icon_cls='delete_item',
handler='topBarDelete'
)
self.button_refresh = controls.ExtButton(
text=u'Обновить',
icon_cls='refresh-icon-16',
handler='topBarRefresh'
)
menu = menus.ExtContextMenu()
menu.items.append(self.button_new)
menu.items.append(self.button_new_child)
self.add_menu = containers.ExtToolbarMenu(
icon_cls="add_item",
menu=menu,
text=u'Добавить'
)
self.init_component()
#==========================================================================
# Собственно определение класса ExtObjectTree
#==========================================================================
def __init__(self, *args, **kwargs):
super(ExtObjectTree, self).__init__(*args, **kwargs)
self.template = 'ext-trees/ext-object-tree.js'
#======================================================================
# Действия, выполняемые изнутри грида
#======================================================================
self.action_new = None
self.action_edit = None
self.action_delete = None
self.action_data = None
#======================================================================
# Источник данных для грида
#======================================================================
self.load_mask = True
self.row_id_name = 'id'
self.parent_id_name = 'parent_id'
self.allow_paging = False
#======================================================================
# Контекстное меню и бары дерева
#======================================================================
self.context_menu_row = ExtObjectTree.TreeContextMenu()
self.context_menu_tree = ExtObjectTree.TreeContextMenu()
self.top_bar = ExtObjectTree.TreeTopBar()
self.top_bar.items.append(self.top_bar.add_menu)
self.top_bar.items.append(self.top_bar.button_edit)
self.top_bar.items.append(self.top_bar.button_delete)
self.top_bar.items.append(self.top_bar.button_refresh)
self.dblclick_handler = 'onEditRecord'
# Признак "Сортировки папок"
# если true, то папки всегда будут выше простых элементов
# иначе, сортируются как элементы
self.folder_sort = True
# Возможность сортировки в дереве
self.enable_tree_sort = True
# После редактирования и добавления обновляется только тот узел дерева,
# в котором произошли изменения
self.incremental_update = False
# Список исключений для make_read_only
self._mro_exclude_list = []
self.init_component()
def add_search_field(self):
u"""Добавляет строку поиска в гриде."""
self.top_bar.search_field = ExtSearchField(
empty_text=u'Поиск', width=200, component_for_search=self)
self.top_bar.add_fill()
self.top_bar.items.append(self.top_bar.search_field)
self._mro_exclude_list.append(self.top_bar.search_field)
def render(self):
"""
Переопределяем рендер дерева для того,
чтобы модифицировать содержимое его
панелей и контекстных меню
"""
if self.action_new:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_new)
self.context_menu_row.items.append(
self.context_menu_row.menuitem_new_child)
self.context_menu_tree.items.append(
self.context_menu_tree.menuitem_new)
if self.action_edit:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_edit)
self.handler_dblclick = self.dblclick_handler
if self.action_delete:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_delete)
# контекстное меню прицепляется к гриду только в том случае, если
# в нем есть хотя бы один пункт
if self.context_menu_tree.items:
self.handler_contextmenu = self.context_menu_tree
if self.context_menu_row.items:
self.handler_rowcontextmenu = self.context_menu_row
#======================================================================
# Настройка top bar
#======================================================================
for action, btn in (
(self.action_new, self.top_bar.add_menu),
(self.action_edit, self.top_bar.button_edit),
(self.action_delete, self.top_bar.button_delete),
(self.action_data, self.top_bar.button_refresh),
):
if not action and btn in self.top_bar.items:
self.top_bar.items.remove(btn)
# тонкая настройка self.store
if not self.url and self.action_data:
self.url = get_url(self.action_data)
self.render_base_config()
self.render_params()
return render_component(self)
def render_params(self):
super(ExtObjectTree, self).render_params()
get_url_or_none = lambda x: get_url(x) if x else None
new_url = get_url_or_none(self.action_new)
edit_url = get_url_or_none(self.action_edit)
delete_url = get_url_or_none(self.action_delete)
data_url = get_url_or_none(self.action_data)
context_json = (
self.action_context.json if self.action_context else None
)
self._put_params_value(
'actions', {
'newUrl': new_url,
'editUrl': edit_url,
'deleteUrl': delete_url,
'dataUrl': data_url,
'contextJson': context_json
}
)
self._put_params_value('rowIdName', self.row_id_name)
self._put_params_value('parentIdName', self.parent_id_name)
self._put_params_value('folderSort', self.folder_sort)
self._put_params_value('enableTreeSort', self.enable_tree_sort)
self._put_params_value('incrementalUpdate', self.incremental_update)
def t_render_base_config(self):
return self._get_config_str()
def t_render_params(self):
return self._get_params_str()
| barsgroup/m3-ext | src/m3_ext/ui/panels/trees.py | Python | mit | 9,988 |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
__version__ = "20.0.dev0"
def main(args=None):
# type: (Optional[List[str]]) -> int
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)
| xavfernandez/pip | src/pip/__init__.py | Python | mit | 458 |
from keras.models import Sequential, model_from_json
from keras.layers import Dense, Dropout, Activation, Flatten, Convolution2D, MaxPooling2D, Lambda, ELU
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Adam
import cv2
import csv
import numpy as np
import os
from random import random
from sklearn.model_selection import train_test_split
DATA_PATH = './data/t1/'
def trans_image(image,steer,trans_range):
#
# Translate image
# Ref: https://chatbotslife.com/using-augmentation-to-mimic-human-driving-496b569760a9#.s1pwczi3q
#
rows, cols, _ = image.shape
tr_x = trans_range*np.random.uniform()-trans_range/2
steer_ang = steer + tr_x/trans_range*2*.2
tr_y = 40*np.random.uniform()-40/2
Trans_M = np.float32([[1,0,tr_x],[0,1,tr_y]])
image_tr = cv2.warpAffine(image,Trans_M,(cols,rows))
return image_tr, steer_ang
def gen_data(X, y, batch_size=128, validation=False):
#
# Generate data for fit_generator
#
gen_start = 0
while True:
features = []
labels = []
if gen_start >= len(y):
gen_start = 0
ending = min(gen_start+batch_size, len(y))
for idx, row in enumerate(y[gen_start:ending]):
center_img = cv2.imread(DATA_PATH + X[gen_start+idx][0].strip())
center_img = cv2.cvtColor(center_img, cv2.COLOR_BGR2HSV)
center_label = float(row[0])
# Augmentation 1: Jitter image
center_img, center_label = trans_image(center_img, center_label, 100)
# Augmentation 2: Occasionally flip straight
if random() > 0.5 and abs(center_label) > 0.1:
center_img = cv2.flip(center_img, 1)
labels.append(-center_label)
else:
labels.append(center_label)
# Augmentation 3: Random brightness
random_bright = .25 + np.random.uniform()
center_img[:,:,2] = center_img[:,:,2]*random_bright
features.append(center_img)
if not validation:
# Augmentation 4: +0.25 to Left Image
left_img = cv2.imread(DATA_PATH + X[gen_start+idx][1].strip())
features.append(left_img)
labels.append(float(row[0]) + 0.15)
# Augmentation 5: -0.25 to Right Image
right_img = cv2.imread(DATA_PATH + X[gen_start+idx][2].strip())
features.append(right_img)
labels.append(float(row[0]) - 0.15)
gen_start += batch_size
features = np.array(features)
labels = np.array(labels)
yield features, labels
def nvidia_model(row=66, col=200, ch=3, dropout=0.3, lr=0.0001):
#
# NVIDIA CNN model
# Ref: https://arxiv.org/abs/1604.07316
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(48, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def nvidialite_model(row=33, col=100, ch=3, dropout=0.3, lr=0.0001):
#
# Modified of NVIDIA CNN Model (Dysfunctional)
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(48, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def load_data(filter=True):
#
# Load and split data
# CSV: center,left,right,steering,throttle,brake,speed
#
total = 0
with open(DATA_PATH + 'driving_log.csv', 'r') as f:
reader = csv.reader(f)
data = [row for row in reader]
data = np.array(data)
X = data[:,[0,1,2]]
y = data[:,[3]]
print('Total samples:', total)
print('Total samples (after filter):', len(X))
return train_test_split(X, y, test_size=0.2, random_state=42)
def load_model(lr=0.001):
#
# Load the existing model and weight
#
with open('model.json', 'r') as jfile:
model = model_from_json(jfile.read())
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
model.load_weights('model.h5')
return model
def main():
# Load data
X_train, X_val, y_train, y_val = load_data()
print('X_train shape:', X_train.shape)
print('X_val shape:', X_val.shape)
# Build model
if 'model.json' in os.listdir():
model = load_model()
else:
model = nvidia_model()
model.fit_generator(gen_data(X_train, y_train),
samples_per_epoch=len(X_train)*3, nb_epoch=8,
validation_data=gen_data(X_val, y_val, validation=True),
nb_val_samples=len(X_val))
# Save model
json = model.to_json()
model.save_weights('model.h5')
with open('model.json', 'w') as f:
f.write(json)
if __name__ == "__main__": main()
| shernshiou/CarND | Term1/04-CarND-Behavioral-Cloning/model.py | Python | mit | 6,595 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# called from wnf.py
# lib/wnf.py --install [rootpassword] [dbname] [source]
from __future__ import unicode_literals
import os, sys, json
import webnotes
import webnotes.db
import getpass
from webnotes.model.db_schema import DbManager
from webnotes.model.sync import sync_for
from webnotes.utils import cstr
class Installer:
def __init__(self, root_login, root_password=None, db_name=None, site=None, site_config=None):
make_conf(db_name, site=site, site_config=site_config)
self.site = site
if isinstance(root_password, list):
root_password = root_password[0]
self.make_connection(root_login, root_password)
webnotes.local.conn = self.conn
webnotes.local.session = webnotes._dict({'user':'Administrator'})
self.dbman = DbManager(self.conn)
def make_connection(self, root_login, root_password):
if root_login:
if not root_password:
root_password = webnotes.conf.get("root_password") or None
if not root_password:
root_password = getpass.getpass("MySQL root password: ")
self.root_password = root_password
self.conn = webnotes.db.Database(user=root_login, password=root_password)
def install(self, db_name, source_sql=None, admin_password = 'admin', verbose=0,
force=0):
if force or (db_name not in self.dbman.get_database_list()):
# delete user (if exists)
self.dbman.delete_user(db_name)
else:
raise Exception("Database %s already exists" % (db_name,))
# create user and db
self.dbman.create_user(db_name, webnotes.conf.db_password)
if verbose: print "Created user %s" % db_name
# create a database
self.dbman.create_database(db_name)
if verbose: print "Created database %s" % db_name
# grant privileges to user
self.dbman.grant_all_privileges(db_name, db_name)
if verbose: print "Granted privileges to user %s and database %s" % (db_name, db_name)
# flush user privileges
self.dbman.flush_privileges()
# close root connection
self.conn.close()
webnotes.connect(db_name=db_name, site=self.site)
self.dbman = DbManager(webnotes.conn)
# import in db_name
if verbose: print "Starting database import..."
# get the path of the sql file to import
if not source_sql:
source_sql = os.path.join(os.path.dirname(webnotes.__file__), "..",
'conf', 'Framework.sql')
self.dbman.restore_database(db_name, source_sql, db_name, webnotes.conf.db_password)
if verbose: print "Imported from database %s" % source_sql
self.create_auth_table()
# fresh app
if 'Framework.sql' in source_sql:
if verbose: print "Installing app..."
self.install_app(verbose=verbose)
# update admin password
self.update_admin_password(admin_password)
# create public folder
from webnotes.install_lib import setup_public_folder
setup_public_folder.make(site=self.site)
if not self.site:
from webnotes.build import bundle
bundle(False)
return db_name
def install_app(self, verbose=False):
sync_for("lib", force=True, sync_everything=True, verbose=verbose)
self.import_core_docs()
try:
from startup import install
except ImportError, e:
install = None
if os.path.exists("app"):
sync_for("app", force=True, sync_everything=True, verbose=verbose)
if os.path.exists(os.path.join("app", "startup", "install_fixtures")):
install_fixtures()
# build website sitemap
from website.doctype.website_sitemap_config.website_sitemap_config import build_website_sitemap_config
build_website_sitemap_config()
if verbose: print "Completing App Import..."
install and install.post_import()
if verbose: print "Updating patches..."
self.set_all_patches_as_completed()
self.assign_all_role_to_administrator()
def update_admin_password(self, password):
from webnotes.auth import _update_password
webnotes.conn.begin()
_update_password("Administrator", webnotes.conf.get("admin_password") or password)
webnotes.conn.commit()
def import_core_docs(self):
install_docs = [
# profiles
{'doctype':'Profile', 'name':'Administrator', 'first_name':'Administrator',
'email':'admin@localhost', 'enabled':1},
{'doctype':'Profile', 'name':'Guest', 'first_name':'Guest',
'email':'guest@localhost', 'enabled':1},
# userroles
{'doctype':'UserRole', 'parent': 'Administrator', 'role': 'Administrator',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype':'UserRole', 'parent': 'Guest', 'role': 'Guest',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype': "Role", "role_name": "Report Manager"}
]
webnotes.conn.begin()
for d in install_docs:
bean = webnotes.bean(d)
bean.insert()
webnotes.conn.commit()
def set_all_patches_as_completed(self):
try:
from patches.patch_list import patch_list
except ImportError, e:
print "No patches to update."
return
for patch in patch_list:
webnotes.doc({
"doctype": "Patch Log",
"patch": patch
}).insert()
webnotes.conn.commit()
def assign_all_role_to_administrator(self):
webnotes.bean("Profile", "Administrator").get_controller().add_roles(*webnotes.conn.sql_list("""
select name from tabRole"""))
webnotes.conn.commit()
def create_auth_table(self):
webnotes.conn.sql_ddl("""create table if not exists __Auth (
`user` VARCHAR(180) NOT NULL PRIMARY KEY,
`password` VARCHAR(180) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8""")
def make_conf(db_name=None, db_password=None, site=None, site_config=None):
try:
from werkzeug.exceptions import NotFound
import conf
try:
webnotes.init(site=site)
except NotFound:
pass
if not site and webnotes.conf.site:
site = webnotes.conf.site
if site:
# conf exists and site is specified, create site_config.json
make_site_config(site, db_name, db_password, site_config)
elif os.path.exists("conf.py"):
print "conf.py exists"
else:
# pyc file exists but py doesn't
raise ImportError
except ImportError:
if site:
raise Exception("conf.py does not exist")
else:
# create conf.py
with open(os.path.join("lib", "conf", "conf.py"), "r") as confsrc:
with open("conf.py", "w") as conftar:
conftar.write(confsrc.read() % get_conf_params(db_name, db_password))
webnotes.destroy()
webnotes.init(site=site)
def make_site_config(site, db_name=None, db_password=None, site_config=None):
import conf
if not getattr(conf, "sites_dir", None):
raise Exception("sites_dir missing in conf.py")
site_path = os.path.join(conf.sites_dir, site)
if not os.path.exists(site_path):
os.mkdir(site_path)
site_file = os.path.join(site_path, "site_config.json")
if not os.path.exists(site_file):
if not (site_config and isinstance(site_config, dict)):
site_config = get_conf_params(db_name, db_password)
with open(site_file, "w") as f:
f.write(json.dumps(site_config, indent=1, sort_keys=True))
def get_conf_params(db_name=None, db_password=None):
if not db_name:
db_name = raw_input("Database Name: ")
if not db_name:
raise Exception("Database Name Required")
if not db_password:
from webnotes.utils import random_string
db_password = random_string(16)
return {"db_name": db_name, "db_password": db_password}
def install_fixtures():
print "Importing install fixtures..."
for basepath, folders, files in os.walk(os.path.join("app", "startup", "install_fixtures")):
for f in files:
f = cstr(f)
if f.endswith(".json"):
print "Importing " + f
with open(os.path.join(basepath, f), "r") as infile:
webnotes.bean(json.loads(infile.read())).insert_or_update()
webnotes.conn.commit()
if f.endswith(".csv"):
from core.page.data_import_tool.data_import_tool import import_file_by_path
import_file_by_path(os.path.join(basepath, f), ignore_links = True, overwrite=True)
webnotes.conn.commit()
if os.path.exists(os.path.join("app", "startup", "install_fixtures", "files")):
if not os.path.exists(os.path.join("public", "files")):
os.makedirs(os.path.join("public", "files"))
os.system("cp -r %s %s" % (os.path.join("app", "startup", "install_fixtures", "files", "*"),
os.path.join("public", "files")))
| saurabh6790/omnisys-lib | webnotes/install_lib/install.py | Python | mit | 8,275 |
import random
class ai:
def __init__(self, actions, responses):
self.IN = actions
self.OUT = responses
def get_act(self, action, valres):
if action in self.IN:
mList = {}
for response in self.OUT:
if self.IN[self.OUT.index(response)] == action and not response in mList:
mList[response] = 1
elif response in mList:
mList[response] += 1
print mList
keys = []
vals = []
for v in sorted(mList.values(), reverse = True):
for k in mList.keys():
if mList[k] == v:
keys.append(k)
vals.append(v)
print keys
print vals
try:
resp = keys[valres]
except:
resp = random.choice(self.OUT)
else:
resp = random.choice(self.OUT)
return resp
def update(ins, outs):
self.IN = ins
self.OUT = outs
def test():
stix = ai(['attack', 'retreat', 'eat', 'attack', 'attack'], ['run', 'cheer', 'share lunch', 'fall', 'run'])
print stix.get_act('attack', 0)
#test()
| iTecAI/Stixai | Module/Stixai.py | Python | mit | 1,343 |
# pylint: disable-msg=too-many-lines
"""OPP Hardware interface.
Contains the hardware interface and drivers for the Open Pinball Project
platform hardware, including the solenoid, input, incandescent, and neopixel
boards.
"""
import asyncio
from collections import defaultdict
from typing import Dict, List, Set, Union, Tuple, Optional # pylint: disable-msg=cyclic-import,unused-import
from mpf.core.platform_batch_light_system import PlatformBatchLightSystem
from mpf.core.utility_functions import Util
from mpf.platforms.base_serial_communicator import HEX_FORMAT
from mpf.platforms.interfaces.driver_platform_interface import PulseSettings, HoldSettings
from mpf.platforms.opp.opp_coil import OPPSolenoidCard
from mpf.platforms.opp.opp_incand import OPPIncandCard
from mpf.platforms.opp.opp_modern_lights import OPPModernLightChannel, OPPNeopixelCard, OPPModernMatrixLightsCard
from mpf.platforms.opp.opp_serial_communicator import OPPSerialCommunicator, BAD_FW_VERSION
from mpf.platforms.opp.opp_switch import OPPInputCard
from mpf.platforms.opp.opp_switch import OPPMatrixCard
from mpf.platforms.opp.opp_rs232_intf import OppRs232Intf
from mpf.core.platform import SwitchPlatform, DriverPlatform, LightsPlatform, SwitchSettings, DriverSettings, \
DriverConfig, SwitchConfig, RepulseSettings
MYPY = False
if MYPY: # pragma: no cover
from mpf.platforms.opp.opp_coil import OPPSolenoid # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_incand import OPPIncand # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_switch import OPPSwitch # pylint: disable-msg=cyclic-import,unused-import
# pylint: disable-msg=too-many-instance-attributes
class OppHardwarePlatform(LightsPlatform, SwitchPlatform, DriverPlatform):
"""Platform class for the OPP hardware.
Args:
----
machine: The main ``MachineController`` instance.
"""
__slots__ = ["opp_connection", "serial_connections", "opp_incands", "opp_solenoid", "sol_dict",
"opp_inputs", "inp_dict", "inp_addr_dict", "matrix_inp_addr_dict", "read_input_msg",
"neo_card_dict", "num_gen2_brd", "gen2_addr_arr", "bad_crc", "min_version", "_poll_task",
"config", "_poll_response_received", "machine_type", "opp_commands", "_incand_task", "_light_system",
"matrix_light_cards"]
def __init__(self, machine) -> None:
"""Initialise OPP platform."""
super().__init__(machine)
self.opp_connection = {} # type: Dict[str, OPPSerialCommunicator]
self.serial_connections = set() # type: Set[OPPSerialCommunicator]
self.opp_incands = dict() # type: Dict[str, OPPIncandCard]
self.opp_solenoid = [] # type: List[OPPSolenoidCard]
self.sol_dict = dict() # type: Dict[str, OPPSolenoid]
self.opp_inputs = [] # type: List[Union[OPPInputCard, OPPMatrixCard]]
self.inp_dict = dict() # type: Dict[str, OPPSwitch]
self.inp_addr_dict = dict() # type: Dict[str, OPPInputCard]
self.matrix_inp_addr_dict = dict() # type: Dict[str, OPPMatrixCard]
self.read_input_msg = {} # type: Dict[str, bytes]
self.neo_card_dict = dict() # type: Dict[str, OPPNeopixelCard]
self.matrix_light_cards = dict() # type: Dict[str, OPPModernMatrixLightsCard]
self.num_gen2_brd = 0
self.gen2_addr_arr = {} # type: Dict[str, Dict[int, Optional[int]]]
self.bad_crc = defaultdict(lambda: 0)
self.min_version = defaultdict(lambda: 0xffffffff) # type: Dict[str, int]
self._poll_task = {} # type: Dict[str, asyncio.Task]
self._incand_task = None # type: Optional[asyncio.Task]
self._light_system = None # type: Optional[PlatformBatchLightSystem]
self.features['tickless'] = True
self.config = self.machine.config_validator.validate_config("opp", self.machine.config.get('opp', {}))
self._configure_device_logging_and_debug("OPP", self.config)
self._poll_response_received = {} # type: Dict[str, asyncio.Event]
assert self.log is not None
if self.config['driverboards']:
self.machine_type = self.config['driverboards']
else:
self.machine_type = self.machine.config['hardware']['driverboards'].lower()
if self.machine_type == 'gen1':
raise AssertionError("Original OPP boards not currently supported.")
if self.machine_type == 'gen2':
self.debug_log("Configuring the OPP Gen2 boards")
else:
self.raise_config_error('Invalid driverboards type: {}'.format(self.machine_type), 15)
# Only including responses that should be received
self.opp_commands = {
ord(OppRs232Intf.INV_CMD): self.inv_resp,
ord(OppRs232Intf.EOM_CMD): self.eom_resp,
ord(OppRs232Intf.GET_GEN2_CFG): self.get_gen2_cfg_resp,
ord(OppRs232Intf.READ_GEN2_INP_CMD): self.read_gen2_inp_resp_initial,
ord(OppRs232Intf.GET_VERS_CMD): self.vers_resp,
ord(OppRs232Intf.READ_MATRIX_INP): self.read_matrix_inp_resp_initial,
}
async def initialize(self):
"""Initialise connections to OPP hardware."""
await self._connect_to_hardware()
self.opp_commands[ord(OppRs232Intf.READ_GEN2_INP_CMD)] = self.read_gen2_inp_resp
self.opp_commands[ord(OppRs232Intf.READ_MATRIX_INP)] = self.read_matrix_inp_resp
self._light_system = PlatformBatchLightSystem(self.machine.clock, self._send_multiple_light_update,
self.machine.config['mpf']['default_light_hw_update_hz'],
128)
async def _send_multiple_light_update(self, sequential_brightness_list: List[Tuple[OPPModernLightChannel,
float, int]]):
first_light, _, common_fade_ms = sequential_brightness_list[0]
number_leds = len(sequential_brightness_list)
msg = bytearray()
msg.append(int(ord(OppRs232Intf.CARD_ID_GEN2_CARD) + first_light.addr))
msg.append(OppRs232Intf.SERIAL_LED_CMD_FADE)
msg.append(int(first_light.pixel_num / 256))
msg.append(int(first_light.pixel_num % 256))
msg.append(int(number_leds / 256))
msg.append(int(number_leds % 256))
msg.append(int(common_fade_ms / 256))
msg.append(int(common_fade_ms % 256))
for _, brightness, _ in sequential_brightness_list:
msg.append(int(brightness * 255))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
cmd = bytes(msg)
if self.debug:
self.debug_log("Set color on %s: %s", first_light.chain_serial, "".join(HEX_FORMAT % b for b in cmd))
self.send_to_processor(first_light.chain_serial, cmd)
async def start(self):
"""Start polling and listening for commands."""
# start polling
for chain_serial in self.read_input_msg:
self._poll_task[chain_serial] = self.machine.clock.loop.create_task(self._poll_sender(chain_serial))
self._poll_task[chain_serial].add_done_callback(Util.raise_exceptions)
# start listening for commands
for connection in self.serial_connections:
await connection.start_read_loop()
if [version for version in self.min_version.values() if version < 0x02010000]:
# if we run any CPUs with firmware prior to 2.1.0 start incands updater
self._incand_task = self.machine.clock.schedule_interval(self.update_incand,
1 / self.config['incand_update_hz'])
self._light_system.start()
def stop(self):
"""Stop hardware and close connections."""
if self._light_system:
self._light_system.stop()
for task in self._poll_task.values():
task.cancel()
self._poll_task = {}
if self._incand_task:
self._incand_task.cancel()
self._incand_task = None
for connections in self.serial_connections:
connections.stop()
self.serial_connections = []
def __repr__(self):
"""Return string representation."""
return '<Platform.OPP>'
def process_received_message(self, chain_serial, msg):
"""Send an incoming message from the OPP hardware to the proper method for servicing.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
if len(msg) >= 1:
# Verify valid Gen2 address
if (msg[0] & 0xe0) == 0x20:
if len(msg) >= 2:
cmd = msg[1]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
# Look for EOM or INV commands
elif msg[0] == ord(OppRs232Intf.INV_CMD) or msg[0] == ord(OppRs232Intf.EOM_CMD):
cmd = msg[0]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
else:
# No messages received, fake an EOM
cmd = OppRs232Intf.EOM_CMD
# Can't use try since it swallows too many errors for now
if cmd in self.opp_commands:
self.opp_commands[cmd](chain_serial, msg)
else:
self.log.warning("Received unknown serial command?%s. (This is "
"very worrisome.)", "".join(HEX_FORMAT % b for b in msg))
# TODO: This means synchronization is lost. Send EOM characters
# until they come back
self.opp_connection[chain_serial].lost_synch()
@staticmethod
def _get_numbers(mask):
number = 0
ref = 1
result = []
while mask > ref:
if mask & ref:
result.append(number)
number += 1
ref = ref << 1
return result
def get_info_string(self):
"""Dump infos about boards."""
if not self.serial_connections:
return "No connection to any CPU board."
infos = "Connected CPUs:\n"
for connection in sorted(self.serial_connections, key=lambda x: x.chain_serial):
infos += " - Port: {} at {} baud. Chain Serial: {}\n".format(connection.port, connection.baud,
connection.chain_serial)
for board_id, board_firmware in self.gen2_addr_arr[connection.chain_serial].items():
if board_firmware is None:
infos += " -> Board: 0x{:02x} Firmware: broken\n".format(board_id)
else:
infos += " -> Board: 0x{:02x} Firmware: 0x{:02x}\n".format(board_id, board_firmware)
infos += "\nIncand cards:\n" if self.opp_incands else ""
card_format_string = " - Chain: {} Board: 0x{:02x} Card: {} Numbers: {}\n"
for incand in self.opp_incands.values():
infos += card_format_string.format(incand.chain_serial, incand.addr,
incand.card_num,
self._get_numbers(incand.mask))
infos += "\nInput cards:\n"
for inputs in self.opp_inputs:
infos += card_format_string.format(inputs.chain_serial, inputs.addr,
inputs.card_num,
self._get_numbers(inputs.mask))
infos += "\nSolenoid cards:\n"
for outputs in self.opp_solenoid:
infos += card_format_string.format(outputs.chain_serial, outputs.addr,
outputs.card_num,
self._get_numbers(outputs.mask))
infos += "\nLEDs:\n" if self.neo_card_dict else ""
for leds in self.neo_card_dict.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {}\n".format(leds.chain_serial, leds.addr, leds.card_num)
infos += "\nMatrix lights:\n" if self.matrix_light_cards else ''
for matrix_light in self.matrix_light_cards.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {} Numbers: 0 - 63\n".format(
matrix_light.chain_serial, matrix_light.addr, matrix_light.card_num)
return infos
async def _connect_to_hardware(self):
"""Connect to each port from the config.
This process will cause the OPPSerialCommunicator to figure out which chains they've connected to
and to register themselves.
"""
port_chain_serial_map = {v: k for k, v in self.config['chains'].items()}
for port in self.config['ports']:
# overwrite serial if defined for port
overwrite_chain_serial = port_chain_serial_map.get(port, None)
if overwrite_chain_serial is None and len(self.config['ports']) == 1:
overwrite_chain_serial = port
comm = OPPSerialCommunicator(platform=self, port=port, baud=self.config['baud'],
overwrite_serial=overwrite_chain_serial)
await comm.connect()
self.serial_connections.add(comm)
for chain_serial, versions in self.gen2_addr_arr.items():
for chain_id, version in versions.items():
if not version:
self.raise_config_error("Could not read version for board {}-{}.".format(chain_serial, chain_id),
16)
if self.min_version[chain_serial] != version:
self.raise_config_error("Version mismatch. Board {}-{} has version {:d}.{:d}.{:d}.{:d} which is not"
" the minimal version "
"{:d}.{:d}.{:d}.{:d}".format(chain_serial, chain_id, (version >> 24) & 0xFF,
(version >> 16) & 0xFF, (version >> 8) & 0xFF,
version & 0xFF,
(self.min_version[chain_serial] >> 24) & 0xFF,
(self.min_version[chain_serial] >> 16) & 0xFF,
(self.min_version[chain_serial] >> 8) & 0xFF,
self.min_version[chain_serial] & 0xFF), 1)
def register_processor_connection(self, serial_number, communicator):
"""Register the processors to the platform.
Args:
----
serial_number: Serial number of chain.
communicator: Instance of OPPSerialCommunicator
"""
self.opp_connection[serial_number] = communicator
def send_to_processor(self, chain_serial, msg):
"""Send message to processor with specific serial number.
Args:
----
chain_serial: Serial of the processor.
msg: Message to send.
"""
self.opp_connection[chain_serial].send(msg)
def update_incand(self):
"""Update all the incandescents connected to OPP hardware.
This is done once per game loop if changes have been made.
It is currently assumed that the UART oversampling will guarantee proper
communication with the boards. If this does not end up being the case,
this will be changed to update all the incandescents each loop.
This is used for board with firmware < 2.1.0
"""
for incand in self.opp_incands.values():
if self.min_version[incand.chain_serial] >= 0x02010000:
continue
whole_msg = bytearray()
# Check if any changes have been made
if incand.old_state is None or (incand.old_state ^ incand.new_state) != 0:
# Update card
incand.old_state = incand.new_state
msg = bytearray()
msg.append(incand.addr)
msg.extend(OppRs232Intf.INCAND_CMD)
msg.extend(OppRs232Intf.INCAND_SET_ON_OFF)
msg.append((incand.new_state >> 24) & 0xff)
msg.append((incand.new_state >> 16) & 0xff)
msg.append((incand.new_state >> 8) & 0xff)
msg.append(incand.new_state & 0xff)
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
whole_msg.extend(msg)
if whole_msg:
# Note: No need to send EOM at end of cmds
send_cmd = bytes(whole_msg)
if self.debug:
self.debug_log("Update incand on %s cmd:%s", incand.chain_serial,
"".join(HEX_FORMAT % b for b in send_cmd))
self.send_to_processor(incand.chain_serial, send_cmd)
@classmethod
def get_coil_config_section(cls):
"""Return coil config section."""
return "opp_coils"
async def get_hw_switch_states(self):
"""Get initial hardware switch states.
This changes switches from active low to active high
"""
hw_states = dict()
for opp_inp in self.opp_inputs:
if not opp_inp.is_matrix:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & opp_inp.mask) != 0:
if (curr_bit & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 0
curr_bit <<= 1
else:
for index in range(0, 64):
if ((1 << index) & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 0
return hw_states
def inv_resp(self, chain_serial, msg):
"""Parse inventory response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
self.debug_log("Received Inventory Response: %s for %s", "".join(HEX_FORMAT % b for b in msg), chain_serial)
index = 1
self.gen2_addr_arr[chain_serial] = {}
while msg[index] != ord(OppRs232Intf.EOM_CMD):
if (msg[index] & ord(OppRs232Intf.CARD_ID_TYPE_MASK)) == ord(OppRs232Intf.CARD_ID_GEN2_CARD):
self.num_gen2_brd += 1
self.gen2_addr_arr[chain_serial][msg[index]] = None
else:
self.log.warning("Invalid inventory response %s for %s.", msg[index], chain_serial)
index += 1
self.debug_log("Found %d Gen2 OPP boards on %s.", self.num_gen2_brd, chain_serial)
# pylint: disable-msg=too-many-statements
@staticmethod
def eom_resp(chain_serial, msg):
"""Process an EOM.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# An EOM command can be used to resynchronize communications if message synch is lost
def _parse_gen2_board(self, chain_serial, msg, read_input_msg):
has_neo = False
has_sw_matrix = False
has_lamp_matrix = False
wing_index = 0
sol_mask = 0
inp_mask = 0
incand_mask = 0
while wing_index < OppRs232Intf.NUM_G2_WING_PER_BRD:
if msg[2 + wing_index] == ord(OppRs232Intf.WING_SOL):
sol_mask |= (0x0f << (4 * wing_index))
inp_mask |= (0x0f << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INP):
inp_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_SW_MATRIX_OUT),
ord(OppRs232Intf.WING_SW_MATRIX_OUT_LOW_WING)):
has_sw_matrix = True
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO):
has_neo = True
inp_mask |= (0xef << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_HI_SIDE_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO_SOL):
inp_mask |= (0x0e << (8 * wing_index))
sol_mask |= (0x0f << (4 * wing_index))
has_neo = True
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_LAMP_MATRIX_COL_WING),
ord(OppRs232Intf.WING_LAMP_MATRIX_ROW_WING)):
has_lamp_matrix = True
wing_index += 1
if incand_mask != 0:
card = OPPIncandCard(chain_serial, msg[0], incand_mask, self.machine)
self.opp_incands["{}-{}".format(chain_serial, card.card_num)] = card
if sol_mask != 0:
self.opp_solenoid.append(
OPPSolenoidCard(chain_serial, msg[0], sol_mask, self.sol_dict, self))
if inp_mask != 0:
# Create the input object, and add to the command to read all inputs
self.opp_inputs.append(OPPInputCard(chain_serial, msg[0], inp_mask, self.inp_dict,
self.inp_addr_dict, self))
# Add command to read all inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_GEN2_INP_CMD)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_sw_matrix:
# Create the matrix object, and add to the command to read all matrix inputs
self.opp_inputs.append(OPPMatrixCard(chain_serial, msg[0], self.inp_dict,
self.matrix_inp_addr_dict, self))
# Add command to read all matrix inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_MATRIX_INP)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_neo:
card = OPPNeopixelCard(chain_serial, msg[0], self)
self.neo_card_dict[chain_serial + '-' + card.card_num] = card
if has_lamp_matrix:
card = OPPModernMatrixLightsCard(chain_serial, msg[0], self)
self.matrix_light_cards[chain_serial + '-' + card.card_num] = card
def _bad_crc(self, chain_serial, msg):
"""Show warning and increase counter."""
self.bad_crc[chain_serial] += 1
self.log.warning("Chain: %sMsg contains bad CRC: %s.", chain_serial, "".join(HEX_FORMAT % b for b in msg))
def get_gen2_cfg_resp(self, chain_serial, msg):
"""Process cfg response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get gen2 cfg responses can be received at once
self.debug_log("Received Gen2 Cfg Response:%s", "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
read_input_msg = bytearray()
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
self._parse_gen2_board(chain_serial, msg[curr_index:curr_index + 6], read_input_msg)
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_GEN2_CFG)):
curr_index += 7
else:
self.log.warning("Malformed GET_GEN2_CFG response:%s.",
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
read_input_msg.extend(OppRs232Intf.EOM_CMD)
self.read_input_msg[chain_serial] = bytes(read_input_msg)
self._poll_response_received[chain_serial] = asyncio.Event()
self._poll_response_received[chain_serial].set()
def vers_resp(self, chain_serial, msg):
"""Process version response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get version responses can be received at once
self.debug_log("Received Version Response (Chain: %s): %s", chain_serial, "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short (Chain: %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
version = (msg[curr_index + 2] << 24) | \
(msg[curr_index + 3] << 16) | \
(msg[curr_index + 4] << 8) | \
msg[curr_index + 5]
self.debug_log("Firmware version of board 0x%02x (Chain: %s): %d.%d.%d.%d", msg[curr_index], chain_serial,
msg[curr_index + 2], msg[curr_index + 3], msg[curr_index + 4], msg[curr_index + 5])
if msg[curr_index] not in self.gen2_addr_arr[chain_serial]:
self.log.warning("Got firmware response for %s but not in inventory at %s", msg[curr_index],
chain_serial)
else:
self.gen2_addr_arr[chain_serial][msg[curr_index]] = version
if version < self.min_version[chain_serial]:
self.min_version[chain_serial] = version
if version == BAD_FW_VERSION:
raise AssertionError("Original firmware sent only to Brian before adding "
"real version numbers. The firmware must be updated before "
"MPF will work.")
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_VERS_CMD)):
curr_index += 7
else:
self.log.warning("Malformed GET_VERS_CMD response (Chain %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
def read_gen2_inp_resp_initial(self, chain_serial, msg):
"""Read initial switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 7:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
opp_inp.old_state = new_state
def read_gen2_inp_resp(self, chain_serial, msg):
"""Read switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 7:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
# Update the state which holds inputs that are active
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def read_matrix_inp_resp_initial(self, chain_serial, msg):
"""Read initial matrix switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 11:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
opp_inp.old_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
# pylint: disable-msg=too-many-nested-blocks
def read_matrix_inp_resp(self, chain_serial, msg):
"""Read matrix switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 11:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(32, 96):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def _get_dict_index(self, input_str):
if not isinstance(input_str, str):
self.raise_config_error("Invalid number format for OPP. Number should be card-number or chain-card-number "
"(e.g. 0-1)", 2)
try:
chain_str, card_str, number_str = input_str.split("-")
except ValueError:
if len(self.serial_connections) > 1:
self.raise_config_error("You need to specify a chain as chain-card-number in: {}".format(input_str), 17)
else:
chain_str = list(self.serial_connections)[0].chain_serial
try:
card_str, number_str = input_str.split("-")
except ValueError:
card_str = '0'
number_str = input_str
if chain_str not in self.opp_connection:
self.raise_config_error("Chain {} does not exist. Existing chains: {}".format(
chain_str, list(self.opp_connection.keys())), 3)
return chain_str + "-" + card_str + "-" + number_str
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
"""Configure a driver.
Args:
----
config: Config dict.
number: Number of this driver.
platform_settings: Platform specific settings.
"""
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP solenoid, "
"but no OPP connection is available", 4)
number = self._get_dict_index(number)
if number not in self.sol_dict:
self.raise_config_error("A request was made to configure an OPP solenoid "
"with number {} which doesn't exist".format(number), 5)
# Use new update individual solenoid command
opp_sol = self.sol_dict[number]
opp_sol.config = config
opp_sol.platform_settings = platform_settings
if self.debug:
self.debug_log("Configure driver %s", number)
default_pulse = PulseSettings(config.default_pulse_power, config.default_pulse_ms)
default_hold = HoldSettings(config.default_hold_power)
opp_sol.reconfigure_driver(default_pulse, default_hold)
# Removing the default input is not necessary since the
# CFG_SOL_USE_SWITCH is not being set
return opp_sol
def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict):
"""Configure a switch.
Args:
----
number: Number of this switch.
config: Config dict.
platform_config: Platform specific settings.
"""
del platform_config
del config
# A switch is termed as an input to OPP
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP switch, "
"but no OPP connection is available", 6)
number = self._get_dict_index(number)
if number not in self.inp_dict:
self.raise_config_error("A request was made to configure an OPP switch "
"with number {} which doesn't exist".format(number), 7)
return self.inp_dict[number]
def parse_light_number_to_channels(self, number: str, subtype: str):
"""Parse number and subtype to channel."""
if subtype in ("matrix", "incand"):
return [
{
"number": self._get_dict_index(number)
}
]
if not subtype or subtype == "led":
full_index = self._get_dict_index(number)
chain_serial, card, index = full_index.split('-')
number_format = "{}-{}-{}"
return [
{
"number": number_format.format(chain_serial, card, int(index) * 3)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 1)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 2)
},
]
self.raise_config_error("Unknown subtype {}".format(subtype), 8)
return []
def configure_light(self, number, subtype, config, platform_settings):
"""Configure a led or matrix light."""
del config
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP light, "
"but no OPP connection is available", 9)
chain_serial, card, light_num = number.split('-')
index = chain_serial + '-' + card
if not subtype or subtype == "led":
if index not in self.neo_card_dict:
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} which doesn't exist".format(card), 10)
if not self.neo_card_dict[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 22)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num), self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype == "matrix" and self.min_version[chain_serial] >= 0x02010000:
# modern matrix lights
if index not in self.matrix_light_cards:
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} which doesn't exist".format(card), 18)
if not self.matrix_light_cards[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 19)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num) + 0x2000, self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype in ("incand", "matrix"):
if index not in self.opp_incands:
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} which doesn't exist".format(card), 20)
if not self.opp_incands[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 21)
if self.min_version[chain_serial] >= 0x02010000:
light = self.opp_incands[index].configure_modern_fade_incand(light_num, self._light_system)
self._light_system.mark_dirty(light)
return light
# legacy incands with new or old subtype
return self.opp_incands[index].configure_software_fade_incand(light_num)
self.raise_config_error("Unknown subtype {}".format(subtype), 12)
return None
async def _poll_sender(self, chain_serial):
"""Poll switches."""
if len(self.read_input_msg[chain_serial]) <= 1:
# there is no point in polling without switches
return
while True:
# wait for previous poll response
timeout = 1 / self.config['poll_hz'] * 25
try:
await asyncio.wait_for(self._poll_response_received[chain_serial].wait(), timeout)
except asyncio.TimeoutError:
self.log.warning("Poll took more than %sms for %s", timeout * 1000, chain_serial)
else:
self._poll_response_received[chain_serial].clear()
# send poll
self.send_to_processor(chain_serial, self.read_input_msg[chain_serial])
await self.opp_connection[chain_serial].writer.drain()
# the line above saturates the link and seems to overwhelm the hardware. limit it to 100Hz
await asyncio.sleep(1 / self.config['poll_hz'])
def _verify_coil_and_switch_fit(self, switch, coil):
chain_serial, card, solenoid = coil.hw_driver.number.split('-')
sw_chain_serial, sw_card, sw_num = switch.hw_switch.number.split('-')
if self.min_version[chain_serial] >= 0x20000:
if chain_serial != sw_chain_serial or card != sw_card:
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. Driver and switch have to be on the same '
'board.'.format(coil.hw_driver.number, switch.hw_switch.number), 13)
else:
matching_sw = ((int(solenoid) & 0x0c) << 1) | (int(solenoid) & 0x03)
if chain_serial != sw_chain_serial or card != sw_card or matching_sw != int(sw_num):
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. For Firmware < 0.2.0 they have to be on the same board and '
'have the same number'.format(coil.hw_driver.number, switch.hw_switch.number),
14)
def set_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit rule on driver.
Pulses a driver when a switch is hit. When the switch is released the pulse continues. Typically used for
autofire coils such as pop bumpers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False)
def set_delayed_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings, delay_ms: int):
"""Set pulse on hit and release rule to driver.
When a switch is hit and a certain delay passed it pulses a driver.
When the switch is released the pulse continues.
Typically used for kickbacks.
"""
if delay_ms <= 0:
raise AssertionError("set_delayed_pulse_on_hit_rule should be used with a positive delay "
"not {}".format(delay_ms))
if delay_ms > 255:
raise AssertionError("set_delayed_pulse_on_hit_rule is limited to max 255ms "
"(was {})".format(delay_ms))
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False, delay_ms=int(delay_ms))
def set_pulse_on_hit_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and release rule to driver.
Pulses a driver when a switch is hit. When the switch is released the pulse is canceled. Typically used on
the main coil for dual coil flippers without eos switch.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=True)
def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and enable and relase rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. Typically used for single coil flippers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=True, can_cancel=True)
def set_pulse_on_hit_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. When the second disable_switch is hit the pulse is canceled
and the driver gets disabled. Typically used on the main coil for dual coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and enable and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver becomes disabled. When the eos_switch is hit the pulse is canceled
and the driver becomes enabled (likely with PWM).
Typically used on the coil for single-wound coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
# pylint: disable-msg=too-many-arguments
def _write_hw_rule(self, switch_obj: SwitchSettings, driver_obj: DriverSettings, use_hold, can_cancel,
delay_ms=None):
if switch_obj.invert:
raise AssertionError("Cannot handle inverted switches")
if driver_obj.hold_settings and not use_hold:
raise AssertionError("Invalid call")
self._verify_coil_and_switch_fit(switch_obj, driver_obj)
self.debug_log("Setting HW Rule. Driver: %s", driver_obj.hw_driver.number)
driver_obj.hw_driver.switches.append(switch_obj.hw_switch.number)
driver_obj.hw_driver.set_switch_rule(driver_obj.pulse_settings, driver_obj.hold_settings, driver_obj.recycle,
can_cancel, delay_ms)
_, _, switch_num = switch_obj.hw_switch.number.split("-")
switch_num = int(switch_num)
self._add_switch_coil_mapping(switch_num, driver_obj.hw_driver)
def _remove_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Remove mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num) + ord(OppRs232Intf.CFG_SOL_INP_REMOVE))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Unmapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def _add_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Add mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Mapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def clear_hw_rule(self, switch: SwitchSettings, coil: DriverSettings):
"""Clear a hardware rule.
This is used if you want to remove the linkage between a switch and
some driver activity. For example, if you wanted to disable your
flippers (so that a player pushing the flipper buttons wouldn't cause
the flippers to flip), you'd call this method with your flipper button
as the *sw_num*.
"""
if switch.hw_switch.number in coil.hw_driver.switches:
if self.debug:
self.debug_log("Clearing HW Rule for switch: %s, coils: %s", switch.hw_switch.number,
coil.hw_driver.number)
coil.hw_driver.switches.remove(switch.hw_switch.number)
_, _, switch_num = switch.hw_switch.number.split("-")
switch_num = int(switch_num)
self._remove_switch_coil_mapping(switch_num, coil.hw_driver)
# disable rule if there are no more switches
# Technically not necessary unless the solenoid parameters are
# changing. MPF may not know when initial kick and hold values
# are changed, so this might need to be called each time.
if not coil.hw_driver.switches:
coil.hw_driver.remove_switch_rule()
| missionpinball/mpf | mpf/platforms/opp/opp.py | Python | mit | 53,276 |
from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
| JustinWingChungHui/electionleaflets | electionleaflets/apps/people/models.py | Python | mit | 1,603 |
import json
from app import models
from django.test import Client, TestCase
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
# Create your tests here.
class TestLecturerWeb(TestCase):
def _init_test_lecturer(self):
if hasattr(self, '_lecturer'):
return
self.lecturer = "lecturer_oUP1zwTO9"
self.lecturer_pswd = "123"
user_data = {
'password': make_password(self.lecturer_pswd),
'is_staff': False,
'is_superuser': False,
}
user, _ = User.objects.get_or_create(username=self.lecturer,
defaults=user_data)
_lecturer, _ = models.Lecturer.objects.get_or_create(
user=user,
defaults={
"subject": models.Subject.get_english(),
"name": "kaoru"
})
self._lecturer = _lecturer
def setUp(self):
self.client = Client()
self._init_test_lecturer()
self.client.login(username=self.lecturer, password=self.lecturer_pswd)
def tearDown(self):
pass
def test_home(self):
response = self.client.get(reverse('lecturer:home'))
self.assertEqual(302, response.status_code)
def test_index(self):
response = self.client.get(reverse('lecturer:index'))
self.assertEqual(200, response.status_code)
def test_login(self):
client = Client()
response = client.get(reverse('lecturer:login'))
self.assertEqual(200, response.status_code)
def test_login_auth(self):
client = Client()
data = {'username': self.lecturer, 'password': self.lecturer_pswd}
response = client.post(reverse('lecturer:login'), data=data)
self.assertEqual(302, response.status_code)
def test_logout(self):
client = Client()
client.login(username=self.lecturer, password=self.lecturer_pswd)
response = client.get(reverse('lecturer:logout'))
self.assertEqual(302, response.status_code)
def test_timeslots(self):
response = self.client.get(reverse('lecturer:timeslots'))
self.assertEqual(200, response.status_code)
def test_living(self):
response = self.client.get(reverse('lecturer:living'))
self.assertEqual(200, response.status_code)
def test_timeslot_questions(self):
response = self.client.get(
reverse('lecturer:timeslot-questions', kwargs={'tsid': 1}))
self.assertEqual(200, response.status_code)
# update test
response = self.client.post(
reverse('lecturer:timeslot-questions', kwargs={'tsid': 0}),
data={'gids': ''}
)
self.assertEqual(404, response.status_code)
# TODO: create test LiveCourse
def test_exercise_store(self):
response = self.client.get(reverse('lecturer:exercise-store'))
self.assertEqual(200, response.status_code)
data = {
"group": '{"exercises":[{"analyse":"题目解析","solution":"选项1","id":"","title":"题目","options":[{"text":"选项1","id":""},{"text":"选项2","id":""},{"text":"选项3","id":""},{"text":"选项4","id":""}]}],"desc":"题组描述","id":"","title":"题组名称"}'}
response = self.client.post(reverse('lecturer:exercise-store'), data)
self.assertEqual(200, response.status_code)
def test_api_exercise_store(self):
url = reverse('lecturer:api-exercise-store')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
url = reverse('lecturer:api-exercise-store') + '?action=group_list'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
url = reverse('lecturer:api-exercise-store') + '?action=group&gid=1'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
| malaonline/Server | server/lecturer/tests.py | Python | mit | 4,001 |
from settings.common import Common
class Dev(Common):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'ffrpg.sql', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
} | Critical-Impact/ffrpg-gen | django/settings/dev.py | Python | mit | 650 |
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACCOUNT_SID"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
number = client.lookups.phone_numbers("+16502530000").fetch(
type="caller-name",
)
print(number.carrier['type'])
print(number.carrier['name'])
| teoreteetik/api-snippets | lookups/lookup-get-cname-example-1/lookup-get-cname-example-1.6.x.py | Python | mit | 417 |
import json
from dateutil import parser as datetime_parser
from occam.app import get_redis
from occam.runtime import OCCAM_SERVER_CONFIG_KEY
def get_servers():
redis = get_redis()
servers = json.loads(redis.get(OCCAM_SERVER_CONFIG_KEY))
return servers.items()
def iterate_servers():
redis = get_redis()
servers = json.loads(redis.get(OCCAM_SERVER_CONFIG_KEY))
for server_name, server_location in servers.iteritems():
yield server_name, server_location
def sorted_by_time_element(l, element_getter=None):
if not element_getter:
element_getter = lambda x: x
key_getter = lambda x: datetime_parser.parse(element_getter(x))
return sorted(l, key=key_getter)
| Yelp/occam | occam/util.py | Python | mit | 715 |
import json
f = open('text-stripped-3.json')
out = open('text-lines.json', 'w')
start_obj = json.load(f)
end_obj = {'data': []}
characters_on_stage = []
currently_speaking = None
last_scene = '1.1'
for i in range(len(start_obj['data'])):
obj = start_obj['data'][i]
if obj['type'] == 'entrance':
if obj['characters'] in characters_on_stage:
raise Exception('Character tried to enter stage when already on stage at object ' + str(i))
characters_on_stage = characters_on_stage + obj['characters']
elif obj['type'] == 'exeunt':
characters_on_stage = []
elif obj['type'] == 'exit':
characters_on_stage = [char for char in characters_on_stage if char not in obj['characters']]
elif obj['type'] == 'speaker tag':
if obj['speaker'] not in characters_on_stage:
raise Exception('Character tried to speak when not on stage at object ' + str(i), start_obj['data'][i + 1])
currently_speaking = obj['speaker']
elif obj['type'] == 'line':
if currently_speaking == None:
raise Exception('A line did not have an associated speaker at object ' + str(i))
identifier_info = obj['identifier'].split('.')
scene = identifier_info[0] + '.' + identifier_info[1]
#if scene != last_scene:
# if len(characters_on_stage) != 0:
# print('Warning: scene ' + scene + ' just started with ' + str(characters_on_stage) + ' still on stage')
last_scene = scene
end_obj['data'].append({
'type': 'line',
'identifier': obj['identifier'],
'text': obj['text'].strip(),
'speaker': currently_speaking,
'characters': characters_on_stage
})
if len(characters_on_stage) == 0:
currently_speaking = None
json.dump(end_obj, out) | SyntaxBlitz/syntaxblitz.github.io | mining-lear/process/step6.py | Python | mit | 1,654 |
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from .views import UploadBlackListView, DemoView, UdateBlackListView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^upload-blacklist$', login_required(UploadBlackListView.as_view()), name='upload-blacklist'),
url(r'^update-blacklist$', UdateBlackListView.as_view(), name='update-blacklist'),
url(r'^profile/', include('n_profile.urls')),
url(r'^demo$', DemoView.as_view(), name='demo'),
]
| nirvaris/nirvaris-djangofence | djangofence/urls.py | Python | mit | 566 |
import requests
from flask import session, Blueprint, redirect
from flask import request
from grano import authz
from grano.lib.exc import BadRequest
from grano.lib.serialisation import jsonify
from grano.views.cache import validate_cache
from grano.core import db, url_for, app
from grano.providers import github, twitter, facebook
from grano.model import Account
from grano.logic import accounts
blueprint = Blueprint('sessions_api', __name__)
@blueprint.route('/api/1/sessions', methods=['GET'])
def status():
permissions = {}
if authz.logged_in():
for permission in request.account.permissions:
permissions[permission.project.slug] = {
'reader': permission.reader,
'editor': permission.editor,
'admin': permission.admin
}
keys = {
'p': repr(permissions),
'i': request.account.id if authz.logged_in() else None
}
validate_cache(keys=keys)
return jsonify({
'logged_in': authz.logged_in(),
'api_key': request.account.api_key if authz.logged_in() else None,
'account': request.account if request.account else None,
'permissions': permissions
})
def provider_not_enabled(name):
return jsonify({
'status': 501,
'name': 'Provider not configured: %s' % name,
'message': 'There are no OAuth credentials given for %s' % name,
}, status=501)
@blueprint.route('/api/1/sessions/logout', methods=['GET'])
def logout():
#authz.require(authz.logged_in())
session.clear()
return redirect(request.args.get('next_url', '/'))
@blueprint.route('/api/1/sessions/login/github', methods=['GET'])
def github_login():
if not app.config.get('GITHUB_CLIENT_ID'):
return provider_not_enabled('github')
callback=url_for('sessions_api.github_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return github.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/github', methods=['GET'])
@github.authorized_handler
def github_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'access_token' in resp:
return redirect(next_url)
access_token = resp['access_token']
session['access_token'] = access_token, ''
res = requests.get('https://api.github.com/user?access_token=%s' % access_token,
verify=False)
data = res.json()
account = Account.by_github_id(data.get('id'))
data_ = {
'full_name': data.get('name'),
'login': data.get('login'),
'email': data.get('email'),
'github_id': data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
@blueprint.route('/api/1/sessions/login/twitter', methods=['GET'])
def twitter_login():
if not app.config.get('TWITTER_API_KEY'):
return provider_not_enabled('twitter')
callback=url_for('sessions_api.twitter_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return twitter.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/twitter', methods=['GET'])
@twitter.authorized_handler
def twitter_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'oauth_token' in resp:
return redirect(next_url)
session['twitter_token'] = (resp['oauth_token'],
resp['oauth_token_secret'])
res = twitter.get('users/show.json?user_id=%s' % resp.get('user_id'))
account = Account.by_twitter_id(res.data.get('id'))
data_ = {
'full_name': res.data.get('name'),
'login': res.data.get('screen_name'),
'twitter_id': res.data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
@blueprint.route('/api/1/sessions/login/facebook', methods=['GET'])
def facebook_login():
if not app.config.get('FACEBOOK_APP_ID'):
return provider_not_enabled('facebook')
callback=url_for('sessions_api.facebook_authorized')
session.clear()
if not request.args.get('next_url'):
raise BadRequest("No 'next_url' is specified.")
session['next_url'] = request.args.get('next_url')
return facebook.authorize(callback=callback)
@blueprint.route('/api/1/sessions/callback/facebook', methods=['GET'])
@facebook.authorized_handler
def facebook_authorized(resp):
next_url = session.get('next_url', '/')
if resp is None or not 'access_token' in resp:
return redirect(next_url)
session['facebook_token'] = (resp.get('access_token'), '')
data = facebook.get('/me').data
account = Account.by_facebook_id(data.get('id'))
data_ = {
'full_name': data.get('name'),
'login': data.get('username'),
'email': data.get('email'),
'facebook_id': data.get('id')
}
account = accounts.save(data_, account=account)
db.session.commit()
session['id'] = account.id
return redirect(next_url)
| clkao/grano | grano/views/sessions_api.py | Python | mit | 5,328 |
import json
import logging
from foxglove import glove
from httpx import Response
from .settings import Settings
logger = logging.getLogger('ext')
def lenient_json(v):
if isinstance(v, (str, bytes)):
try:
return json.loads(v)
except (ValueError, TypeError):
pass
return v
class ApiError(RuntimeError):
def __init__(self, method, url, status, response_text):
self.method = method
self.url = url
self.status = status
self.body = response_text
def __str__(self):
return f'{self.method} {self.url}, unexpected response {self.status}'
class ApiSession:
def __init__(self, root_url, settings: Settings):
self.settings = settings
self.root = root_url.rstrip('/') + '/'
async def get(self, uri, *, allowed_statuses=(200,), **data) -> Response:
return await self._request('GET', uri, allowed_statuses=allowed_statuses, **data)
async def delete(self, uri, *, allowed_statuses=(200,), **data) -> Response:
return await self._request('DELETE', uri, allowed_statuses=allowed_statuses, **data)
async def post(self, uri, *, allowed_statuses=(200, 201), **data) -> Response:
return await self._request('POST', uri, allowed_statuses=allowed_statuses, **data)
async def put(self, uri, *, allowed_statuses=(200, 201), **data) -> Response:
return await self._request('PUT', uri, allowed_statuses=allowed_statuses, **data)
async def _request(self, method, uri, allowed_statuses=(200, 201), **data) -> Response:
method, url, data = self._modify_request(method, self.root + str(uri).lstrip('/'), data)
kwargs = {}
headers = data.pop('headers_', None)
if headers is not None:
kwargs['headers'] = headers
if timeout := data.pop('timeout_', None):
kwargs['timeout'] = timeout
r = await glove.http.request(method, url, json=data or None, **kwargs)
if isinstance(allowed_statuses, int):
allowed_statuses = (allowed_statuses,)
if allowed_statuses != '*' and r.status_code not in allowed_statuses:
data = {
'request_real_url': str(r.request.url),
'request_headers': dict(r.request.headers),
'request_data': data,
'response_headers': dict(r.headers),
'response_content': lenient_json(r.text),
}
logger.warning(
'%s unexpected response %s /%s -> %s',
self.__class__.__name__,
method,
uri,
r.status_code,
extra={'data': data} if self.settings.verbose_http_errors else {},
)
raise ApiError(method, url, r.status_code, r.text)
else:
logger.debug('%s /%s -> %s', method, uri, r.status_code)
return r
def _modify_request(self, method, url, data):
return method, url, data
class Mandrill(ApiSession):
def __init__(self, settings):
super().__init__(settings.mandrill_url, settings)
def _modify_request(self, method, url, data):
data['key'] = self.settings.mandrill_key
return method, url, data
class MessageBird(ApiSession):
def __init__(self, settings):
super().__init__(settings.messagebird_url, settings)
def _modify_request(self, method, url, data):
data['headers_'] = {'Authorization': f'AccessKey {self.settings.messagebird_key}'}
return method, url, data
| tutorcruncher/morpheus | src/ext.py | Python | mit | 3,555 |
from distutils.core import setup
setup(
name='sequencehelpers',
py_modules=['sequencehelpers'],
version='0.2.1',
description="A library consisting of functions for interacting with sequences and iterables.",
author='Zach Swift',
author_email='cras.zswift@gmail.com',
url='https://github.com/2achary/sequencehelpers',
download_url='https://github.com/2achary/sequence/tarball/0.2.1',
keywords=['sequence', 'single', 'distinct'],
classifiers=[],
)
| 2achary/sequencehelpers | setup.py | Python | mit | 467 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kirppu', '0039_counter_private_key'),
]
operations = [
migrations.AlterUniqueTogether(
name='itemtype',
unique_together={('event', 'order')},
),
migrations.RemoveField(
model_name='itemtype',
name='key',
),
]
| jlaunonen/kirppu | kirppu/migrations/0040_remove_itemtype_key.py | Python | mit | 408 |
from django.contrib import admin
from .models import BackgroundImages, Widget
class WidgetAdmin(admin.ModelAdmin):
list_display = ('name', 'link', 'is_featured')
ordering = ('-id',)
class BackgroundAdmin(admin.ModelAdmin):
list_display = ('name', 'created_at')
ordering = ('-id',)
admin.site.register(Widget, WidgetAdmin)
admin.site.register(BackgroundImages, BackgroundAdmin)
| malikshahzad228/widget-jack | widgets/admin.py | Python | mit | 400 |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from DistributedMinigame import *
from direct.interval.IntervalGlobal import *
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from toontown.safezone import Walk
from toontown.toonbase import ToontownTimer
from direct.gui import OnscreenText
import MinigameAvatarScorePanel
from direct.distributed import DistributedSmoothNode
import random
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPGlobals
import TagGameGlobals
import Trajectory
class DistributedTagGame(DistributedMinigame):
DURATION = TagGameGlobals.DURATION
IT_SPEED_INCREASE = 1.3
IT_ROT_INCREASE = 1.3
def __init__(self, cr):
DistributedMinigame.__init__(self, cr)
self.gameFSM = ClassicFSM.ClassicFSM('DistributedTagGame', [State.State('off', self.enterOff, self.exitOff, ['play']), State.State('play', self.enterPlay, self.exitPlay, ['cleanup']), State.State('cleanup', self.enterCleanup, self.exitCleanup, ['off'])], 'off', 'off')
self.addChildGameFSM(self.gameFSM)
self.walkStateData = Walk.Walk('walkDone')
self.scorePanels = []
self.initialPositions = ((0, 10, 0, 180, 0, 0),
(10, 0, 0, 90, 0, 0),
(0, -10, 0, 0, 0, 0),
(-10, 0, 0, -90, 0, 0))
base.localAvatar.isIt = 0
self.modelCount = 4
def getTitle(self):
return TTLocalizer.TagGameTitle
def getInstructions(self):
return TTLocalizer.TagGameInstructions
def getMaxDuration(self):
return self.DURATION
def load(self):
self.notify.debug('load')
DistributedMinigame.load(self)
self.itText = OnscreenText.OnscreenText('itText', fg=(0.95, 0.95, 0.65, 1), scale=0.14, font=ToontownGlobals.getSignFont(), pos=(0.0, -0.8), wordwrap=15, mayChange=1)
self.itText.hide()
self.sky = loader.loadModel('phase_3.5/models/props/TT_sky')
self.ground = loader.loadModel('phase_4/models/minigames/tag_arena')
self.music = base.loadMusic('phase_4/audio/bgm/MG_toontag.ogg')
self.tagSfx = base.loadSfx('phase_4/audio/sfx/MG_Tag_C.ogg')
self.itPointer = loader.loadModel('phase_4/models/minigames/bboard-pointer')
self.tracks = []
self.IT = None
return
def unload(self):
self.notify.debug('unload')
DistributedMinigame.unload(self)
self.ignoreAll()
del self.tracks
del self.IT
self.sky.removeNode()
del self.sky
self.itPointer.removeNode()
del self.itPointer
self.ground.removeNode()
del self.ground
del self.music
del self.tagSfx
self.itText.cleanup()
del self.itText
self.removeChildGameFSM(self.gameFSM)
del self.gameFSM
def onstage(self):
self.notify.debug('onstage')
DistributedMinigame.onstage(self)
self.ground.reparentTo(render)
self.sky.reparentTo(render)
myPos = self.avIdList.index(self.localAvId)
base.localAvatar.setPosHpr(*self.initialPositions[myPos])
base.localAvatar.reparentTo(render)
base.localAvatar.loop('neutral')
camera.reparentTo(render)
camera.setPosHpr(0, -24, 16, 0, -30, 0)
base.camLens.setFar(450.0)
base.transitions.irisIn(0.4)
NametagGlobals.setMasterArrowsOn(1)
DistributedSmoothNode.activateSmoothing(1, 1)
self.IT = None
return
def offstage(self):
self.notify.debug('offstage')
DistributedSmoothNode.activateSmoothing(1, 0)
NametagGlobals.setMasterArrowsOn(0)
DistributedMinigame.offstage(self)
self.sky.reparentTo(hidden)
self.ground.reparentTo(hidden)
base.camLens.setFar(ToontownGlobals.DefaultCameraFar)
self.itText.hide()
def setGameReady(self):
if not self.hasLocalToon:
return
self.notify.debug('setGameReady')
if DistributedMinigame.setGameReady(self):
return
for avId in self.avIdList:
self.acceptTagEvent(avId)
myPos = self.avIdList.index(self.localAvId)
for i in xrange(self.numPlayers):
avId = self.avIdList[i]
avatar = self.getAvatar(avId)
if avatar:
avatar.startSmooth()
base.localAvatar.setPosHpr(*self.initialPositions[myPos])
base.localAvatar.d_clearSmoothing()
base.localAvatar.sendCurrentPosition()
base.localAvatar.b_setAnimState('neutral', 1)
base.localAvatar.b_setParent(ToontownGlobals.SPRender)
def setGameStart(self, timestamp):
if not self.hasLocalToon:
return
self.notify.debug('setGameStart')
DistributedMinigame.setGameStart(self, timestamp)
self.gameFSM.request('play')
def enterOff(self):
self.notify.debug('enterOff')
def exitOff(self):
pass
def enterPlay(self):
self.notify.debug('enterPlay')
for i in xrange(self.numPlayers):
avId = self.avIdList[i]
avName = self.getAvatarName(avId)
scorePanel = MinigameAvatarScorePanel.MinigameAvatarScorePanel(avId, avName)
scorePanel.setPos(-0.213, 0.0, 0.28 * i + 0.66)
scorePanel.reparentTo(base.a2dBottomRight)
self.scorePanels.append(scorePanel)
base.setCellsAvailable(base.rightCells, 0)
self.walkStateData.enter()
self.walkStateData.fsm.request('walking')
if base.localAvatar.isIt:
base.mouseInterfaceNode.setForwardSpeed(ToontownGlobals.ToonForwardSpeed * self.IT_SPEED_INCREASE)
base.mouseInterfaceNode.setRotateSpeed(ToontownGlobals.ToonRotateSpeed * self.IT_ROT_INCREASE)
self.timer = ToontownTimer.ToontownTimer()
self.timer.posInTopRightCorner()
self.timer.setTime(self.DURATION)
self.timer.countdown(self.DURATION, self.timerExpired)
base.playMusic(self.music, looping=1, volume=0.9)
base.localAvatar.setIdealCameraPos(Point3(0, -24, 8))
def exitPlay(self):
for task in self.tracks:
task.finish()
self.tracks = []
for avId in self.avIdList:
toon = self.getAvatar(avId)
if toon:
toon.getGeomNode().clearMat()
toon.scale = 1.0
toon.rescaleToon()
self.walkStateData.exit()
self.music.stop()
self.timer.destroy()
del self.timer
for panel in self.scorePanels:
panel.cleanup()
self.scorePanels = []
base.setCellsAvailable(base.rightCells, 1)
base.mouseInterfaceNode.setForwardSpeed(ToontownGlobals.ToonForwardSpeed)
base.mouseInterfaceNode.setRotateSpeed(ToontownGlobals.ToonRotateSpeed)
self.itPointer.reparentTo(hidden)
base.localAvatar.cameraIndex = 0
base.localAvatar.setCameraPositionByIndex(0)
def timerExpired(self):
self.notify.debug('local timer expired')
self.gameOver()
def enterCleanup(self):
self.notify.debug('enterCleanup')
self.gameFSM.request('off')
def exitCleanup(self):
pass
def setIt(self, avId):
if not self.hasLocalToon:
return
if self.gameFSM.getCurrentState().getName() != 'play':
self.notify.debug('Ignoring setIt after done playing')
return
self.itText.show()
self.notify.debug(str(avId) + ' is now it')
if avId == self.localAvId:
self.itText.setText(TTLocalizer.TagGameYouAreIt)
base.localAvatar.isIt = 1
base.localAvatar.controlManager.setSpeeds(OTPGlobals.ToonForwardSpeed * self.IT_SPEED_INCREASE, OTPGlobals.ToonJumpForce, OTPGlobals.ToonReverseSpeed * self.IT_SPEED_INCREASE, OTPGlobals.ToonRotateSpeed * self.IT_ROT_INCREASE)
else:
self.itText.setText(TTLocalizer.TagGameSomeoneElseIsIt % self.getAvatarName(avId))
base.localAvatar.isIt = 0
base.localAvatar.setWalkSpeedNormal()
avatar = self.getAvatar(avId)
if avatar:
self.itPointer.reparentTo(avatar)
self.itPointer.setZ(avatar.getHeight())
base.playSfx(self.tagSfx)
toon = self.getAvatar(avId)
duration = 0.6
if not toon:
return
spinTrack = LerpHprInterval(toon.getGeomNode(), duration, Point3(0, 0, 0), startHpr=Point3(-5.0 * 360.0, 0, 0), blendType='easeOut')
growTrack = Parallel()
gs = 2.5
for hi in xrange(toon.headParts.getNumPaths()):
head = toon.headParts[hi]
growTrack.append(LerpScaleInterval(head, duration, Point3(gs, gs, gs)))
def bounceFunc(t, trajectory, node = toon.getGeomNode()):
node.setZ(trajectory.calcZ(t))
def bounceCleanupFunc(node = toon.getGeomNode(), z = toon.getGeomNode().getZ()):
node.setZ(z)
bounceTrack = Sequence()
startZ = toon.getGeomNode().getZ()
tLen = 0
zVel = 30
decay = 0.6
while tLen < duration:
trajectory = Trajectory.Trajectory(0, Point3(0, 0, startZ), Point3(0, 0, zVel), gravMult=5.0)
dur = trajectory.calcTimeOfImpactOnPlane(startZ)
if dur <= 0:
break
bounceTrack.append(LerpFunctionInterval(bounceFunc, fromData=0.0, toData=dur, duration=dur, extraArgs=[trajectory]))
tLen += dur
zVel *= decay
bounceTrack.append(Func(bounceCleanupFunc))
tagTrack = Sequence(Func(toon.animFSM.request, 'off'), Parallel(spinTrack, growTrack, bounceTrack), Func(toon.animFSM.request, 'Happy'))
self.tracks.append(tagTrack)
tagTrack.start()
if self.IT:
it = self.getAvatar(self.IT)
shrinkTrack = Parallel()
for hi in xrange(it.headParts.getNumPaths()):
head = it.headParts[hi]
scale = ToontownGlobals.toonHeadScales[it.style.getAnimal()]
shrinkTrack.append(LerpScaleInterval(head, duration, scale))
self.tracks.append(shrinkTrack)
shrinkTrack.start()
self.IT = avId
def acceptTagEvent(self, avId):
self.accept('enterdistAvatarCollNode-' + str(avId), self.sendTagIfIt, [avId])
def sendTagIfIt(self, avId, collisionEntry):
if base.localAvatar.isIt:
self.notify.debug('Tagging ' + str(avId))
self.sendUpdate('tag', [avId])
else:
self.notify.debug('Bumped ' + str(avId))
def setTreasureScore(self, scores):
if not self.hasLocalToon:
return
self.notify.debug('setTreasureScore: %s' % scores)
for i in xrange(len(self.scorePanels)):
self.scorePanels[i].setScore(scores[i])
| ToonTownInfiniteRepo/ToontownInfinite | toontown/minigame/DistributedTagGame.py | Python | mit | 10,955 |
"""Pipeline configuration parameters."""
from os.path import dirname, abspath, join
from sqlalchemy import create_engine
OS_TYPES_URL = ('https://raw.githubusercontent.com/'
'openspending/os-types/master/src/os-types.json')
PIPELINE_FILE = 'pipeline-spec.yaml'
SOURCE_DATAPACKAGE_FILE = 'source.datapackage.json'
SOURCE_FILE = 'source.description.yaml'
STATUS_FILE = 'pipeline-status.json'
SCRAPER_FILE = 'scraper.py'
SOURCE_ZIP = 'source.datapackage.zip'
FISCAL_ZIP_FILE = 'fiscal.datapackage.zip'
SOURCE_DB = 'source.db.xlsx'
DATAPACKAGE_FILE = 'datapackage.json'
ROOT_DIR = abspath(join(dirname(__file__), '..'))
DATA_DIR = join(ROOT_DIR, 'data')
SPECIFICATIONS_DIR = join(ROOT_DIR, 'specifications')
PROCESSORS_DIR = join(ROOT_DIR, 'common', 'processors')
CODELISTS_DIR = join(ROOT_DIR, 'codelists')
DROPBOX_DIR = join(ROOT_DIR, 'dropbox')
GEOCODES_FILE = join(ROOT_DIR, 'geography', 'geocodes.nuts.csv')
FISCAL_SCHEMA_FILE = join(SPECIFICATIONS_DIR, 'fiscal.schema.yaml')
FISCAL_MODEL_FILE = join(SPECIFICATIONS_DIR, 'fiscal.model.yaml')
FISCAL_METADATA_FILE = join(SPECIFICATIONS_DIR, 'fiscal.metadata.yaml')
DEFAULT_PIPELINE_FILE = join(SPECIFICATIONS_DIR, 'default-pipeline-spec.yaml')
TEMPLATE_SCRAPER_FILE = join(PROCESSORS_DIR, 'scraper_template.py')
DESCRIPTION_SCHEMA_FILE = join(SPECIFICATIONS_DIR, 'source.schema.json')
TEMPLATE_SOURCE_FILE = join(SPECIFICATIONS_DIR, SOURCE_FILE)
LOCAL_PATH_EXTRACTOR = 'ingest_local_file'
REMOTE_CSV_EXTRACTOR = 'simple_remote_source'
REMOTE_EXCEL_EXTRACTOR = 'stream_remote_excel'
DATAPACKAGE_MUTATOR = 'mutate_datapackage'
DB_URI = 'sqlite:///{}/metrics.sqlite'
DB_ENGINE = create_engine(DB_URI.format(ROOT_DIR))
VERBOSE = False
LOG_SAMPLE_SIZE = 15
JSON_FORMAT = dict(indent=4, ensure_ascii=False, default=repr)
SNIFFER_SAMPLE_SIZE = 5000
SNIFFER_MAX_FAILURE_RATIO = 0.01
IGNORED_FIELD_TAG = '_ignored'
UNKNOWN_FIELD_TAG = '_unknown'
WARNING_CUTOFF = 10
NUMBER_FORMATS = [
{'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ','},
{'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': '.'},
{'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ' '},
{'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': ' '},
{'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ''},
{'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': '`'},
{'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': '\''},
{'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': ' '},
]
DATE_FORMATS = [
{'format': '%Y'},
{'format': '%d/%m/%Y'},
{'format': '%d//%m/%Y'},
{'format': '%d-%b-%Y'}, # abbreviated month
{'format': '%d-%b-%y'}, # abbreviated month
{'format': '%d. %b %y'}, # abbreviated month
{'format': '%b %y'}, # abbreviated month
{'format': '%d/%m/%y'},
{'format': '%d-%m-%Y'},
{'format': '%Y-%m-%d'},
{'format': '%y-%m-%d'},
{'format': '%y.%m.%d'},
{'format': '%Y.%m.%d'},
{'format': '%d.%m.%Y'},
{'format': '%d.%m.%y'},
{'format': '%d.%m.%Y %H:%M'},
{'format': '%Y-%m-%d %H:%M:%S'},
{'format': '%Y-%m-%d %H:%M:%S.%f'},
{'format': '%Y-%m-%dT%H:%M:%SZ'},
{'format': '%m/%d/%Y'},
{'format': '%m/%Y'},
{'format': '%y'},
]
| Victordeleon/os-data-importers | eu-structural-funds/common/config.py | Python | mit | 3,382 |
from pokemongo_bot.human_behaviour import sleep
from pokemongo_bot.base_task import BaseTask
class IncubateEggs(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
last_km_walked = 0
def initialize(self):
self.ready_incubators = []
self.used_incubators = []
self.eggs = []
self.km_walked = 0
self.hatching_animation_delay = 4.20
self.max_iv = 45.0
self._process_config()
def _process_config(self):
self.longer_eggs_first = self.config.get("longer_eggs_first", True)
def work(self):
try:
self._check_inventory()
except:
return
if self.used_incubators and IncubateEggs.last_km_walked != self.km_walked:
self.used_incubators.sort(key=lambda x: x.get("km"))
km_left = self.used_incubators[0]['km']-self.km_walked
if km_left <= 0:
self._hatch_eggs()
else:
self.emit_event(
'next_egg_incubates',
formatted='Next egg ({km_needed} km) incubates in {distance_in_km:.2f} km',
data={
'km_needed': self.used_incubators[0]['km_needed'],
'distance_in_km': km_left
}
)
IncubateEggs.last_km_walked = self.km_walked
sorting = self.longer_eggs_first
self.eggs.sort(key=lambda x: x.get("km"), reverse=sorting)
if self.ready_incubators:
self._apply_incubators()
def _apply_incubators(self):
for incubator in self.ready_incubators:
if incubator.get('used', False):
continue
for egg in self.eggs:
if egg["used"] or egg["km"] == -1:
continue
self.emit_event(
'incubate_try',
level='debug',
formatted="Attempting to apply incubator {incubator_id} to egg {egg_id}",
data={
'incubator_id': incubator['id'],
'egg_id': egg['id']
}
)
ret = self.bot.api.use_item_egg_incubator(
item_id=incubator["id"],
pokemon_id=egg["id"]
)
if ret:
code = ret.get("responses", {}).get("USE_ITEM_EGG_INCUBATOR", {}).get("result", 0)
if code == 1:
self.emit_event(
'incubate',
formatted='Incubating a {distance_in_km} egg.',
data={
'distance_in_km': str(egg['km'])
}
)
egg["used"] = True
incubator["used"] = True
break
elif code == 5 or code == 7:
self.emit_event(
'incubator_already_used',
level='debug',
formatted='Incubator in use.',
)
incubator["used"] = True
break
elif code == 6:
self.emit_event(
'egg_already_incubating',
level='debug',
formatted='Egg already incubating',
)
egg["used"] = True
def _check_inventory(self, lookup_ids=[]):
inv = {}
response_dict = self.bot.get_inventory()
matched_pokemon = []
temp_eggs = []
temp_used_incubators = []
temp_ready_incubators = []
inv = reduce(
dict.__getitem__,
["responses", "GET_INVENTORY", "inventory_delta", "inventory_items"],
response_dict
)
for inv_data in inv:
inv_data = inv_data.get("inventory_item_data", {})
if "egg_incubators" in inv_data:
temp_used_incubators = []
temp_ready_incubators = []
incubators = inv_data.get("egg_incubators", {}).get("egg_incubator",[])
if isinstance(incubators, basestring): # checking for old response
incubators = [incubators]
for incubator in incubators:
if 'pokemon_id' in incubator:
start_km = incubator.get('start_km_walked', 9001)
km_walked = incubator.get('target_km_walked', 9001)
temp_used_incubators.append({
"id": incubator.get('id', -1),
"km": km_walked,
"km_needed": (km_walked - start_km)
})
else:
temp_ready_incubators.append({
"id": incubator.get('id', -1)
})
continue
if "pokemon_data" in inv_data:
pokemon = inv_data.get("pokemon_data", {})
if pokemon.get("is_egg", False) and "egg_incubator_id" not in pokemon:
temp_eggs.append({
"id": pokemon.get("id", -1),
"km": pokemon.get("egg_km_walked_target", -1),
"used": False
})
elif 'is_egg' not in pokemon and pokemon['id'] in lookup_ids:
pokemon.update({
"iv": [
pokemon.get('individual_attack', 0),
pokemon.get('individual_defense', 0),
pokemon.get('individual_stamina', 0)
]})
matched_pokemon.append(pokemon)
continue
if "player_stats" in inv_data:
self.km_walked = inv_data.get("player_stats", {}).get("km_walked", 0)
if temp_used_incubators:
self.used_incubators = temp_used_incubators
if temp_ready_incubators:
self.ready_incubators = temp_ready_incubators
if temp_eggs:
self.eggs = temp_eggs
return matched_pokemon
def _hatch_eggs(self):
response_dict = self.bot.api.get_hatched_eggs()
log_color = 'green'
try:
result = reduce(dict.__getitem__, ["responses", "GET_HATCHED_EGGS"], response_dict)
except KeyError:
return
pokemon_ids = []
if 'pokemon_id' in result:
pokemon_ids = [id for id in result['pokemon_id']]
stardust = result.get('stardust_awarded', "error")
candy = result.get('candy_awarded', "error")
xp = result.get('experience_awarded', "error")
sleep(self.hatching_animation_delay)
self.bot.latest_inventory = None
try:
pokemon_data = self._check_inventory(pokemon_ids)
for pokemon in pokemon_data:
# pokemon ids seem to be offset by one
if pokemon['pokemon_id']!=-1:
pokemon['name'] = self.bot.pokemon_list[(pokemon.get('pokemon_id')-1)]['Name']
else:
pokemon['name'] = "error"
except:
pokemon_data = [{"name":"error","cp":"error","iv":"error"}]
if not pokemon_ids or pokemon_data[0]['name'] == "error":
self.emit_event(
'egg_hatched',
data={
'pokemon': 'error',
'cp': 'error',
'iv': 'error',
'exp': 'error',
'stardust': 'error',
'candy': 'error',
}
)
return
for i in range(len(pokemon_data)):
msg = "Egg hatched with a {pokemon} (CP {cp} - IV {iv}), {exp} exp, {stardust} stardust and {candy} candies."
self.emit_event(
'egg_hatched',
formatted=msg,
data={
'pokemon': pokemon_data[i]['name'],
'cp': pokemon_data[i]['cp'],
'iv': "{} {}".format(
"/".join(map(str, pokemon_data[i]['iv'])),
sum(pokemon_data[i]['iv'])/self.max_iv
),
'exp': xp[i],
'stardust': stardust[i],
'candy': candy[i],
}
)
| Compjeff/PokemonGo-Bot | pokemongo_bot/cell_workers/incubate_eggs.py | Python | mit | 8,649 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Verify data doesn't have basic mistakes, like empty text fields or empty label
candidates.
## Examples
```shell
parlai verify_data --task convai2 --datatype valid
```
"""
from parlai.agents.repeat_label.repeat_label import RepeatLabelAgent
from parlai.core.message import Message
from parlai.core.params import ParlaiParser
from parlai.utils.misc import TimeLogger, warn_once
from parlai.core.worlds import create_task
from parlai.core.script import ParlaiScript, register_script
import parlai.utils.logging as logging
def setup_args(parser=None):
if parser is None:
parser = ParlaiParser(True, True, 'Check tasks for common errors')
# Get command line arguments
parser.add_argument('-ltim', '--log-every-n-secs', type=float, default=2)
parser.add_argument('-d', '--display-examples', type='bool', default=False)
parser.set_defaults(datatype='train:stream:ordered')
return parser
def report(world, counts, log_time):
report = world.report()
log = {
'missing_text': counts['missing_text'],
'missing_labels': counts['missing_labels'],
'missing_label_candidates': counts['missing_label_candidates'],
'empty_string_label_candidates': counts['empty_string_label_candidates'],
'label_candidates_with_missing_label': counts[
'label_candidates_with_missing_label'
],
'did_not_return_message': counts['did_not_return_message'],
}
text, log = log_time.log(report['exs'], world.num_examples(), log)
return text, log
def warn(txt, act, opt):
if opt.get('display_examples'):
print(txt + ":\n" + str(act))
else:
warn_once(txt)
def verify(opt):
if opt['datatype'] == 'train':
logging.warning("changing datatype from train to train:ordered")
opt['datatype'] = 'train:ordered'
opt.log()
# create repeat label agent and assign it to the specified task
agent = RepeatLabelAgent(opt)
world = create_task(opt, agent)
log_every_n_secs = opt.get('log_every_n_secs', -1)
if log_every_n_secs <= 0:
log_every_n_secs = float('inf')
log_time = TimeLogger()
counts = {}
counts['missing_text'] = 0
counts['missing_labels'] = 0
counts['missing_label_candidates'] = 0
counts['empty_string_label_candidates'] = 0
counts['label_candidates_with_missing_label'] = 0
counts['did_not_return_message'] = 0
# Show some example dialogs.
while not world.epoch_done():
world.parley()
act = world.acts[0]
if not isinstance(act, Message):
counts['did_not_return_message'] += 1
if 'text' not in act and 'image' not in act:
warn("warning: missing text field:\n", act, opt)
counts['missing_text'] += 1
if 'labels' not in act and 'eval_labels' not in act:
warn("warning: missing labels/eval_labels field:\n", act, opt)
counts['missing_labels'] += 1
else:
if 'label_candidates' not in act:
counts['missing_label_candidates'] += 1
else:
labels = act.get('labels', act.get('eval_labels'))
is_label_cand = {}
for l in labels:
is_label_cand[l] = False
for c in act['label_candidates']:
if c == '':
warn("warning: empty string label_candidate:\n", act, opt)
counts['empty_string_label_candidates'] += 1
if c in is_label_cand:
if is_label_cand[c] is True:
warn(
"warning: label mentioned twice in candidate_labels:\n",
act,
opt,
)
is_label_cand[c] = True
for _, has in is_label_cand.items():
if has is False:
warn("warning: label missing in candidate_labels:\n", act, opt)
counts['label_candidates_with_missing_label'] += 1
if log_time.time() > log_every_n_secs:
text, log = report(world, counts, log_time)
print(text)
try:
# print dataset size if available
logging.info(
f'Loaded {world.num_episodes()} episodes with a '
f'total of {world.num_examples()} examples'
)
except AttributeError:
pass
counts['exs'] = int(world.report()['exs'])
return counts
def verify_data(opt):
counts = verify(opt)
print(counts)
return counts
@register_script('verify_data', hidden=True)
class VerifyData(ParlaiScript):
@classmethod
def setup_args(cls):
return setup_args()
def run(self):
return verify_data(self.opt)
if __name__ == '__main__':
VerifyData.main()
| facebookresearch/ParlAI | parlai/scripts/verify_data.py | Python | mit | 5,106 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.desk.notifications import delete_notification_count_for
from frappe.core.doctype.user.user import STANDARD_USERS
from frappe.utils.user import get_enabled_system_users
from frappe.utils import cint
@frappe.whitelist()
def get_list(arg=None):
"""get list of messages"""
frappe.form_dict['limit_start'] = int(frappe.form_dict['limit_start'])
frappe.form_dict['limit_page_length'] = int(frappe.form_dict['limit_page_length'])
frappe.form_dict['user'] = frappe.session['user']
# set all messages as read
frappe.db.begin()
frappe.db.sql("""UPDATE `tabCommunication` set seen = 1
where
communication_type in ('Chat', 'Notification')
and reference_doctype = 'User'
and reference_name = %s""", frappe.session.user)
delete_notification_count_for("Messages")
frappe.local.flags.commit = True
if frappe.form_dict['contact'] == frappe.session['user']:
# return messages
return frappe.db.sql("""select * from `tabCommunication`
where
communication_type in ('Chat', 'Notification')
and reference_doctype ='User'
and (owner=%(contact)s
or reference_name=%(user)s
or owner=reference_name)
order by creation desc
limit %(limit_start)s, %(limit_page_length)s""", frappe.local.form_dict, as_dict=1)
else:
return frappe.db.sql("""select * from `tabCommunication`
where
communication_type in ('Chat', 'Notification')
and reference_doctype ='User'
and ((owner=%(contact)s and reference_name=%(user)s)
or (owner=%(contact)s and reference_name=%(contact)s))
order by creation desc
limit %(limit_start)s, %(limit_page_length)s""", frappe.local.form_dict, as_dict=1)
@frappe.whitelist()
def get_active_users():
data = frappe.db.sql("""select name,
(select count(*) from tabSessions where user=tabUser.name
and timediff(now(), lastupdate) < time("01:00:00")) as has_session
from tabUser
where enabled=1 and
ifnull(user_type, '')!='Website User' and
name not in ({})
order by first_name""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS, as_dict=1)
# make sure current user is at the top, using has_session = 100
users = [d.name for d in data]
if frappe.session.user in users:
data[users.index(frappe.session.user)]["has_session"] = 100
else:
# in case of administrator
data.append({"name": frappe.session.user, "has_session": 100})
return data
@frappe.whitelist()
def post(txt, contact, parenttype=None, notify=False, subject=None):
"""post message"""
d = frappe.new_doc('Communication')
d.communication_type = 'Notification' if parenttype else 'Chat'
d.subject = subject
d.content = txt
d.reference_doctype = 'User'
d.reference_name = contact
d.sender = frappe.session.user
d.insert(ignore_permissions=True)
delete_notification_count_for("Messages")
if notify and cint(notify):
if contact==frappe.session.user:
_notify([user.name for user in get_enabled_system_users()], txt)
else:
_notify(contact, txt, subject)
return d
@frappe.whitelist()
def delete(arg=None):
frappe.get_doc("Communication", frappe.form_dict['name']).delete()
def _notify(contact, txt, subject=None):
from frappe.utils import get_fullname, get_url
try:
if not isinstance(contact, list):
contact = [frappe.db.get_value("User", contact, "email") or contact]
frappe.sendmail(\
recipients=contact,
sender= frappe.db.get_value("User", frappe.session.user, "email"),
subject=subject or "New Message from " + get_fullname(frappe.session.user),
message=frappe.get_template("templates/emails/new_message.html").render({
"from": get_fullname(frappe.session.user),
"message": txt,
"link": get_url()
}),
bulk=True)
except frappe.OutgoingEmailError:
pass
| vCentre/vFRP-6233 | frappe/desk/page/messages/messages.py | Python | mit | 3,886 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
from py3compat import PY2
from snisi_core.models.Entities import AdministrativeEntity as AEntity
if PY2:
import unicodecsv as csv
else:
import csv
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-f',
help='CSV file',
action='store',
dest='filename'),
)
def handle(self, *args, **options):
headers = ['name', 'region', 'cercle_commune', 'commune_quartier']
f = open(options.get('filename'), 'w')
csv_writer = csv.DictWriter(f, fieldnames=headers)
csv_writer.writeheader()
csv_writer.writerow({
'name': "label",
'region': "Région",
'cercle_commune': "Cercle",
'commune_quartier': "Commune",
})
for region in AEntity.objects.filter(type__slug='region'):
logger.info(region)
is_bko = region.name == 'BAMAKO'
for cercle in AEntity.objects.filter(parent=region):
logger.info(cercle)
for commune in AEntity.objects.filter(parent=cercle):
logger.info(commune)
if not is_bko:
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': cercle.name,
'commune_quartier': commune.name
})
continue
for vfq in AEntity.objects.filter(parent=commune):
for v in (region, cercle, commune, vfq):
if not len(v.name.strip()):
continue
csv_writer.writerow({
'name': "choice_label",
'region': region.name,
'cercle_commune': commune.name,
'commune_quartier': vfq.name
})
f.close()
| yeleman/snisi | snisi_maint/management/commands/entities_to_cascades.py | Python | mit | 2,414 |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
#
# sl-ls.py: get information utility
# Created by NAKAJIMA Takaaki
# Last modified: Apr 16, 2014.
#
# Require: Python v3
#
# See also https://softlayer-api-python-client.readthedocs.org
#
# You should set env variables
# SL_USERNAME = YOUR_USERNAME
# SL_API_KEY = YOUR_API_KEY
import logging
import SoftLayer
client = SoftLayer.Client()
class IterableItems:
u"""Pagenate されているリストを全体を回せるようにする"""
def __init__(self, client, limit=10):
self.master_account = client['Account']
self.offset = 0
self.limit = limit
self.define_fetch_method()
self.fetched = self.fetch()
def define_fetch_method(self):
u"""継承側クラスで実装すること"""
# self.fetch_method に適切な pagenate メソッドを設定
raise NotImpementedError("Not implemented yet.")
def fetch(self):
items = self.fetch_method(limit=self.limit, offset=self.offset)
self.offset += self.limit
return items
def __iter__(self):
return self
def __next__(self):
if len(self.fetched) < 1:
raise StopIteration
item = self.fetched.pop()
if len(self.fetched) < 1: # prefetch for next
self.fetched = self.fetch()
return item
class Users(IterableItems):
u"""List of SoftLayer_User_Customer"""
def define_fetch_method(self):
self.fetch_method = self.master_account.getUsers
class VirtualGuests(IterableItems):
u"""List of SoftLayer_Virtual_Guest"""
def define_fetch_method(self):
self.fetch_method = self.master_account.getVirtualGuests
# --------------------------------------------------------------
try:
master_account = client['Account']
print("## Account information ##")
user_mask="id, firstName, lastName, email"
account_info = master_account.getObject(mask=user_mask)
print(account_info)
# all child users
#for user in master_account.getUsers(limit=10, offset=0):
print("## Users ##");
for user in Users(client):
print("id:%d, %s" % (user['id'], user['username']))
# Virtual guest OSes
# for vg in client['Account'].getVirtualGuests(limit=10, offset=0):
print("## Virtual guests ##");
for vg in VirtualGuests(client):
print("AccountId=%s, ID=%d, hostname=%s"
% (vg['accountId'], vg['id'], vg['hostname']))
print("## Instances ##");
cci_manager = SoftLayer.CCIManager(client)
for cci in cci_manager.list_instances():
print("FQDN=%s, IP_addrs=%s, %s"
% (cci['fullyQualifiedDomainName'], cci['primaryIpAddress'], cci['primaryBackendIpAddress']))
print("## Billing items ##")
billing_mask = "id, parentId, description, currentHourlyCharge"
print(master_account.getAllBillingItems(mask=billing_mask))
except SoftLayer.SoftLayerAPIError as e:
print("Unable to retrieve account information faultCode%s, faultString=%s"
% (e.faultCode, e.faultString))
exit(1)
| ryumei/softlayer-utility | sl-ls.py | Python | mit | 3,178 |
from .View import View
class MethuselahView(View):
type = "Methuselah"
trans = {
"stableAfter": {"pick": "l"}
}
| mir3z/life.js | library-scrapper/views/Methuselah.py | Python | mit | 134 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
# Greed is a dice game where you roll up to five dice to accumulate
# points. The following "score" function will be used calculate the
# score of a single roll of the dice.
#
# A greed roll is scored as follows:
#
# * A set of three ones is 1000 points
#
# * A set of three numbers (other than ones) is worth 100 times the
# number. (e.g. three fives is 500 points).
#
# * A one (that is not part of a set of three) is worth 100 points.
#
# * A five (that is not part of a set of three) is worth 50 points.
#
# * Everything else is worth 0 points.
#
#
# Examples:
#
# score([1,1,1,5,1]) => 1150 points
# score([2,3,4,6,2]) => 0 points
# score([3,4,5,3,3]) => 350 points
# score([1,5,1,2,4]) => 250 points
#
# More scoring examples are given in the tests below:
#
# Your goal is to write the score method.
from collections import Counter
def score(dice):
'''
Calculate the scores for results of up to fice dice rolls
'''
return sum((score_of_three(k) * (v//3) + score_of_one(k) * (v%3) for k, v in Counter(dice).items()))
def score_of_three(num):
'''
Calculate score for set of three
'''
if num == 1:
return 1000
else:
return num*100
def score_of_one(num):
'''
Calculate score for a roll not in a set of three
'''
if num == 1:
return 100
elif num == 5:
return 50
else:
return 0
class AboutScoringProject(Koan):
def test_score_of_an_empty_list_is_zero(self):
self.assertEqual(0, score([]))
def test_score_of_a_single_roll_of_5_is_50(self):
self.assertEqual(50, score([5]))
def test_score_of_a_single_roll_of_1_is_100(self):
self.assertEqual(100, score([1]))
def test_score_of_multiple_1s_and_5s_is_the_sum_of_individual_scores(self):
self.assertEqual(300, score([1,5,5,1]))
def test_score_of_single_2s_3s_4s_and_6s_are_zero(self):
self.assertEqual(0, score([2,3,4,6]))
def test_score_of_a_triple_1_is_1000(self):
self.assertEqual(1000, score([1,1,1]))
def test_score_of_other_triples_is_100x(self):
self.assertEqual(200, score([2,2,2]))
self.assertEqual(300, score([3,3,3]))
self.assertEqual(400, score([4,4,4]))
self.assertEqual(500, score([5,5,5]))
self.assertEqual(600, score([6,6,6]))
def test_score_of_mixed_is_sum(self):
self.assertEqual(250, score([2,5,2,2,3]))
self.assertEqual(550, score([5,5,5,5]))
self.assertEqual(1150, score([1,1,1,5,1]))
def test_ones_not_left_out(self):
self.assertEqual(300, score([1,2,2,2]))
self.assertEqual(350, score([1,5,2,2,2])) | kimegitee/python-koans | python3/koans/about_scoring_project.py | Python | mit | 2,731 |
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class FeedbackList(ListResource):
def __init__(self, version, account_sid, message_sid):
"""
Initialize the FeedbackList
:param Version version: Version that contains the resource
:param account_sid: The account_sid
:param message_sid: The message_sid
:returns: twilio.rest.api.v2010.account.message.feedback.FeedbackList
:rtype: twilio.rest.api.v2010.account.message.feedback.FeedbackList
"""
super(FeedbackList, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'message_sid': message_sid,
}
self._uri = '/Accounts/{account_sid}/Messages/{message_sid}/Feedback.json'.format(**self._solution)
def create(self, outcome=values.unset):
"""
Create a new FeedbackInstance
:param FeedbackInstance.Outcome outcome: The outcome
:returns: Newly created FeedbackInstance
:rtype: twilio.rest.api.v2010.account.message.feedback.FeedbackInstance
"""
data = values.of({
'Outcome': outcome,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return FeedbackInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.FeedbackList>'
class FeedbackPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the FeedbackPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The account_sid
:param message_sid: The message_sid
:returns: twilio.rest.api.v2010.account.message.feedback.FeedbackPage
:rtype: twilio.rest.api.v2010.account.message.feedback.FeedbackPage
"""
super(FeedbackPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of FeedbackInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.message.feedback.FeedbackInstance
:rtype: twilio.rest.api.v2010.account.message.feedback.FeedbackInstance
"""
return FeedbackInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.FeedbackPage>'
class FeedbackInstance(InstanceResource):
class Outcome(object):
CONFIRMED = "confirmed"
UMCONFIRMED = "umconfirmed"
def __init__(self, version, payload, account_sid, message_sid):
"""
Initialize the FeedbackInstance
:returns: twilio.rest.api.v2010.account.message.feedback.FeedbackInstance
:rtype: twilio.rest.api.v2010.account.message.feedback.FeedbackInstance
"""
super(FeedbackInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload['account_sid'],
'message_sid': payload['message_sid'],
'outcome': payload['outcome'],
'date_created': deserialize.rfc2822_datetime(payload['date_created']),
'date_updated': deserialize.rfc2822_datetime(payload['date_updated']),
'uri': payload['uri'],
}
# Context
self._context = None
self._solution = {
'account_sid': account_sid,
'message_sid': message_sid,
}
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def message_sid(self):
"""
:returns: The message_sid
:rtype: unicode
"""
return self._properties['message_sid']
@property
def outcome(self):
"""
:returns: The outcome
:rtype: FeedbackInstance.Outcome
"""
return self._properties['outcome']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def uri(self):
"""
:returns: The uri
:rtype: unicode
"""
return self._properties['uri']
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.FeedbackInstance>'
| angadpc/Alexa-Project- | twilio/rest/api/v2010/account/message/feedback.py | Python | mit | 5,676 |
""" Tests barbante.api.generate_product_templates_tfidf.
"""
import json
import nose.tools
import barbante.api.generate_product_templates_tfidf as script
import barbante.utils.logging as barbante_logging
import barbante.tests as tests
log = barbante_logging.get_logger(__name__)
def test_script():
""" Tests a call to script barbante.api.generate_product_templates_tfidf.
"""
result = script.main([tests.TEST_ENV])
log.debug(result)
result_json = json.dumps(result)
nose.tools.ok_(result_json) # a well-formed json is enough
if __name__ == '__main__':
test_script() | hypermindr/barbante | barbante/api/tests/test_generate_product_templates_tfidf_api.py | Python | mit | 603 |
param = dict(
useAIon=True,
verbose=False,
chargePreXlinkIons=[1, 3],
chargePostXlinkIons=[2, 5],
basepeakint = 100.0,
dynamicrange = 0.001,
missedsites = 2,
minlength = 4,
maxlength = 51,
modRes = '',
modMass = 0.0,
linkermass = 136.10005,
ms1tol = dict(measure='ppm', val=5),
ms2tol = dict(measure='da', val=0.01),
minmz = 200,
maxmz = 2000,
mode = 'conservative',
patternstring = '^[ACDEFGHIKLMNPQRSTVWY]*K[ACDEFGHIKLMNPQRSTVWY]+$',
fixedMod = [],
neutralloss=dict(
h2oLoss=dict(
mass=-18.010565,
aa=set('ACDEFGHIKLMNPQRSTVWY')),
nh3Loss=dict(
mass=-17.026549,
aa=set('ACDEFGHIKLMNPQRSTVWY')),
h2oGain=dict(
mass=18.010565,
aa=set('ACDEFGHIKLMNPQRSTVWY'))))
mass = dict(
A=71.037114,
R=156.101111,
N=114.042927,
D=115.026943,
C=103.009184,
E=129.042593,
Q=128.058578,
G=57.021464,
H=137.058912,
I=113.084064,
L=113.084064,
K=128.094963,
M=131.040485,
F=147.068414,
P=97.052764,
S=87.032028,
T=101.047678,
W=186.079313,
Y=163.063329,
V=99.068414,
Hatom=1.007825032,
Oatom=15.99491462,
neutronmass = 1.008701,
BIonRes=1.0078246,
AIonRes=-26.9870904,
YIonRes=19.0183888,
isotopeInc = [1.008701/4, 1.008701/3, 1.008701/2, 1.008701/1])
modification = dict(
position=[],
deltaMass=[])
for i in range(len(param['fixedMod'])):
aa = param['fixedMod'][i][0]
delta = param['fixedMod'][i][1]
mass[aa] += delta
| COL-IU/XLSearch | library/Parameter.py | Python | mit | 1,380 |
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class IntegrationEntity(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'integration_id': 'str',
'created': 'datetime'
}
attribute_map = {
'integration_id': 'integrationId',
'created': 'created'
}
def __init__(self, integration_id=None, created=None, local_vars_configuration=None): # noqa: E501
"""IntegrationEntity - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._integration_id = None
self._created = None
self.discriminator = None
self.integration_id = integration_id
self.created = created
@property
def integration_id(self):
"""Gets the integration_id of this IntegrationEntity. # noqa: E501
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:return: The integration_id of this IntegrationEntity. # noqa: E501
:rtype: str
"""
return self._integration_id
@integration_id.setter
def integration_id(self, integration_id):
"""Sets the integration_id of this IntegrationEntity.
The integration ID for this entity sent to and used in the Talon.One system. # noqa: E501
:param integration_id: The integration_id of this IntegrationEntity. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and integration_id is None: # noqa: E501
raise ValueError("Invalid value for `integration_id`, must not be `None`") # noqa: E501
self._integration_id = integration_id
@property
def created(self):
"""Gets the created of this IntegrationEntity. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this IntegrationEntity. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this IntegrationEntity.
The exact moment this entity was created. # noqa: E501
:param created: The created of this IntegrationEntity. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IntegrationEntity):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IntegrationEntity):
return True
return self.to_dict() != other.to_dict()
| talon-one/talon_one.py | talon_one/models/integration_entity.py | Python | mit | 5,337 |
from math import radians, cos, sin, asin, sqrt
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
# 6367 km is the radius of the Earth
km = 6367 * c
return km
| anduslim/codex | codex_project/actors/haversine.py | Python | mit | 554 |
# -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import web
import simplejson as json
import karesansui
from karesansui.lib.rest import Rest, auth
from karesansui.lib.const import VIRT_COMMAND_APPLY_SNAPSHOT
from karesansui.lib.utils import is_param, is_int
from karesansui.lib.virt.snapshot import KaresansuiVirtSnapshot
from karesansui.db.access.machine import findbyguest1
from karesansui.db.access.snapshot import findbyname_guestby1 as s_findbyname_guestby1
from karesansui.db.access._2pysilhouette import save_job_collaboration
from karesansui.db.access.machine2jobgroup import new as m2j_new
from karesansui.db.model._2pysilhouette import Job, JobGroup
from pysilhouette.command import dict2command
class GuestBy1CurrentSnapshot(Rest):
@auth
def _PUT(self, *param, **params):
(host_id, guest_id) = self.chk_guestby1(param)
if guest_id is None: return web.notfound()
if is_param(self.input, 'id') is False \
or is_int(self.input.id) is False:
return web.badrequest("Request data is invalid.")
snapshot_id = str(self.input.id)
snapshot = s_findbyname_guestby1(self.orm, snapshot_id, guest_id)
if snapshot is None:
pass
# ignore snapshots that is not in database.
#return web.badrequest("Request data is invalid.")
model = findbyguest1(self.orm, guest_id)
kvs = KaresansuiVirtSnapshot(readonly=False)
snapshot_list = []
try:
domname = kvs.kvc.uuid_to_domname(model.uniq_key)
if not domname: return web.notfound()
self.view.is_creatable = kvs.isSupportedDomain(domname)
try:
snapshot_list = kvs.listNames(domname)[domname]
except:
pass
finally:
kvs.finish()
if not snapshot_id in snapshot_list:
self.logger.debug(_("The specified snapshot does not exist in database. - %s") % snapshot_id)
# ignore snapshots that is not in database.
#return web.notfound()
action_cmd = dict2command(
"%s/%s" % (karesansui.config['application.bin.dir'],
VIRT_COMMAND_APPLY_SNAPSHOT),
{"name" : domname, "id" : snapshot_id})
cmdname = 'Apply Snapshot'
_jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey'])
_job = Job('%s command' % cmdname, 0, action_cmd)
_jobgroup.jobs.append(_job)
_machine2jobgroup = m2j_new(machine=model,
jobgroup_id=-1,
uniq_key=karesansui.sheconf['env.uniqkey'],
created_user=self.me,
modified_user=self.me,
)
save_job_collaboration(self.orm,
self.pysilhouette.orm,
_machine2jobgroup,
_jobgroup,
)
self.view.currentsnapshot = snapshot
return web.accepted(url=web.ctx.path)
urls = (
'/host/(\d+)/guest/(\d+)/currentsnapshot/?(\.part)?$', GuestBy1CurrentSnapshot,
)
| karesansui/karesansui | karesansui/gadget/guestby1currentsnapshot.py | Python | mit | 4,377 |
# Stack implementation
class Stack (object):
def __init__ (self):
self.stack = []
def push (self, data):
self.stack.append(data)
def peek (self):
if self.isEmpty():
return None
return self.stack[-1]
def pop (self):
if self.isEmpty():
return None
return self.stack.pop()
def isEmpty (self):
return len(self.stack) == 0
def __str__ (self):
return ' '.join(str(x) for x in self.stack)
| mag6367/Cracking_the_Coding_Interview_Python_Solutions | chapter3/stack.py | Python | mit | 418 |
#!/usr/bin/env python
import os,sys
folder = "/media/kentir1/Development/Linux_Program/Fundkeep/"
def makinGetYear():
return os.popen("date +'%Y'").read()[:-1]
def makinGetMonth():
return os.popen("date +'%m'").read()[:-1]
def makinGetDay():
return os.popen("date +'%d'").read()[:-1]
def makinGetPrevYear(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%Y'").read()[:-1]
def makinGetPrevMonth(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%m'").read()[:-1]
def makinGetPrevDay(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%d'").read()[:-1]
#last entry
f = open(folder+"data/last_entry","r")
le = f.read()
le_y=le[:4]
le_m=le[4:6]
le_d=le[6:]
#input
os.system("gedit "+folder+"var/input")
f = open(folder+"var/input","r")
data = f.read()
f.close()
balance_out = int(data[:data.find(" ")])
balance_ket = data[data.find(" ")+1:-1]
print balance_ket
os.system("mkdir "+folder+"data")
os.system("mkdir "+folder+"data/"+makinGetYear())
os.system("mkdir "+folder+"data/"+makinGetYear()+"/"+makinGetMonth())
os.system("mkdir "+folder+"data/"+makinGetYear()+"/"+makinGetMonth()+"/"+makinGetDay())
balance_before = 0
#ambil balance dr hari sebelumnya
dapet = 0
while (dapet == 0):
dpassed = 1
try:
f = open(folder+"data/"
+makinGetPrevYear(dpassed)
+"/"
+makinGetPrevMonth(dpassed)
+"/"
+makinGetPrevDay(dpassed)
+"/balance_after","r")
if (makinGetDay()=="01"):
t_day = 31
t_bulan = ("0"+str(int(makinGetMonth())-1))[-2:]
t_tahun = makinGetYear()
if (int(makinGetMonth())=1):
t_bulan = 12
t_tahun = makinGetYear()-1
print t_bulan
dapet = 0
while (dapet==0):
try:
f = open(folder+"data/"+t_tahun+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
print t_day
dapet = 1
balance_before = int(f.read())
except:
t_day = t_day - 1
f.close()
else:
t_day = int(makinGetDay())-1
#~ t_bulan = ("0"+str(int(makinGetMonth())))[-2:]
t_bulan = makinGetMonth()
f = open(folder+"data/"+makinGetYear()+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
balance_before = int(f.read())
#bila fresh input
try:
f = open(folder+"data/"+t_tahun+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
except:
#bila hanya mengupdate isi balance_out (pengeluaran hari ini)
| imakin/PersonalAssistant | Fundkeep/modul/b__main_backu.py | Python | mit | 2,347 |
# Created by PyCharm Community Edition
# User: Kaushik Talukdar
# Date: 22-03-2017
# Time: 03:52 PM
#python doesn't allow you to mix strings and numbers directly. numbers must be converted to strings
age = 28
print("Greetings on your " + str(age) + "th birthday") | KT26/PythonCourse | 1. Getting Started/11.py | Python | mit | 322 |
# -*- coding: utf-8 -*-
from cachy import CacheManager
from cachy.serializers import PickleSerializer
class Cache(CacheManager):
_serializers = {
'pickle': PickleSerializer()
}
| sdispater/orator-cache | orator_cache/cache.py | Python | mit | 197 |
import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
| gaoce/TimeVis | tests/test_api.py | Python | mit | 1,185 |
# coding=utf-8
import os
import unittest
from hashlib import md5
from django.conf import settings
from djblets.testing.decorators import add_fixtures
from kgb import SpyAgency
from reviewboard.diffviewer.diffutils import patch
from reviewboard.diffviewer.testing.mixins import DiffParserTestingMixin
from reviewboard.scmtools.core import (Branch, Commit, Revision, HEAD,
PRE_CREATION)
from reviewboard.scmtools.errors import SCMError, FileNotFoundError
from reviewboard.scmtools.models import Repository, Tool
from reviewboard.scmtools.svn import SVNTool, recompute_svn_backend
from reviewboard.scmtools.svn.utils import (collapse_svn_keywords,
has_expanded_svn_keywords)
from reviewboard.scmtools.tests.testcases import SCMTestCase
from reviewboard.testing.testcase import TestCase
class _CommonSVNTestCase(DiffParserTestingMixin, SpyAgency, SCMTestCase):
"""Common unit tests for Subversion.
This is meant to be subclassed for each backend that wants to run
the common set of tests.
"""
backend = None
backend_name = None
fixtures = ['test_scmtools']
__test__ = False
def setUp(self):
super(_CommonSVNTestCase, self).setUp()
self._old_backend_setting = settings.SVNTOOL_BACKENDS
settings.SVNTOOL_BACKENDS = [self.backend]
recompute_svn_backend()
self.svn_repo_path = os.path.abspath(
os.path.join(os.path.dirname(__file__),
'..', 'testdata', 'svn_repo'))
self.svn_ssh_path = ('svn+ssh://localhost%s'
% self.svn_repo_path.replace('\\', '/'))
self.repository = Repository.objects.create(
name='Subversion SVN',
path='file://%s' % self.svn_repo_path,
tool=Tool.objects.get(name='Subversion'))
try:
self.tool = self.repository.get_scmtool()
except ImportError:
raise unittest.SkipTest('The %s backend could not be used. A '
'dependency may be missing.'
% self.backend)
assert self.tool.client.__class__.__module__ == self.backend
def tearDown(self):
super(_CommonSVNTestCase, self).tearDown()
settings.SVNTOOL_BACKENDS = self._old_backend_setting
recompute_svn_backend()
def shortDescription(self):
desc = super(_CommonSVNTestCase, self).shortDescription()
desc = desc.replace('<backend>', self.backend_name)
return desc
def test_get_repository_info(self):
"""Testing SVN (<backend>) get_repository_info"""
info = self.tool.get_repository_info()
self.assertIn('uuid', info)
self.assertIsInstance(info['uuid'], str)
self.assertEqual(info['uuid'], '41215d38-f5a5-421f-ba17-e0be11e6c705')
self.assertIn('root_url', info)
self.assertIsInstance(info['root_url'], str)
self.assertEqual(info['root_url'], self.repository.path)
self.assertIn('url', info)
self.assertIsInstance(info['url'], str)
self.assertEqual(info['url'], self.repository.path)
def test_ssh(self):
"""Testing SVN (<backend>) with a SSH-backed Subversion repository"""
self._test_ssh(self.svn_ssh_path, 'trunk/doc/misc-docs/Makefile')
def test_ssh_with_site(self):
"""Testing SVN (<backend>) with a SSH-backed Subversion repository
with a LocalSite
"""
self._test_ssh_with_site(self.svn_ssh_path,
'trunk/doc/misc-docs/Makefile')
def test_get_file(self):
"""Testing SVN (<backend>) get_file"""
tool = self.tool
expected = (b'include ../tools/Makefile.base-vars\n'
b'NAME = misc-docs\n'
b'OUTNAME = svn-misc-docs\n'
b'INSTALL_DIR = $(DESTDIR)/usr/share/doc/subversion\n'
b'include ../tools/Makefile.base-rules\n')
# There are 3 versions of this test in order to get 100% coverage of
# the svn module.
rev = Revision('2')
filename = 'trunk/doc/misc-docs/Makefile'
value = tool.get_file(filename, rev)
self.assertIsInstance(value, bytes)
self.assertEqual(value, expected)
value = tool.get_file('/%s' % filename, rev)
self.assertIsInstance(value, bytes)
self.assertEqual(value, expected)
value = tool.get_file('%s/%s' % (self.repository.path, filename), rev)
self.assertIsInstance(value, bytes)
self.assertEqual(value, expected)
with self.assertRaises(FileNotFoundError):
tool.get_file('')
def test_file_exists(self):
"""Testing SVN (<backend>) file_exists"""
tool = self.tool
self.assertTrue(tool.file_exists('trunk/doc/misc-docs/Makefile'))
self.assertFalse(tool.file_exists('trunk/doc/misc-docs/Makefile2'))
with self.assertRaises(FileNotFoundError):
tool.get_file('hello', PRE_CREATION)
def test_get_file_with_special_url_chars(self):
"""Testing SVN (<backend>) get_file with filename containing
characters that are special in URLs and repository path as a URI
"""
value = self.tool.get_file('trunk/crazy& ?#.txt', Revision('12'))
self.assertTrue(isinstance(value, bytes))
self.assertEqual(value, b'Lots of characters in this one.\n')
def test_file_exists_with_special_url_chars(self):
"""Testing SVN (<backend>) file_exists with filename containing
characters that are special in URLs
"""
self.assertTrue(self.tool.file_exists('trunk/crazy& ?#.txt',
Revision('12')))
# These should not crash. We'll be testing both file:// URLs
# (which fail for anything lower than ASCII code 32) and for actual
# URLs (which support all characters).
self.assertFalse(self.tool.file_exists('trunk/%s.txt' % ''.join(
chr(c)
for c in range(32, 128)
)))
self.tool.client.repopath = 'svn+ssh://localhost:0/svn'
try:
self.assertFalse(self.tool.file_exists('trunk/%s.txt' % ''.join(
chr(c)
for c in range(128)
)))
except SCMError:
# Couldn't connect. Valid result.
pass
def test_normalize_path_with_special_chars_and_remote_url(self):
"""Testing SVN (<backend>) normalize_path with special characters
and remote URL
"""
client = self.tool.client
client.repopath = 'svn+ssh://example.com/svn'
path = client.normalize_path(''.join(
chr(c)
for c in range(128)
))
# This URL was generated based on modified code that directly used
# Subversion's lookup take explicitly, ensuring we're getting the
# results we want from urllib.quote() and our list of safe characters.
self.assertEqual(
path,
"svn+ssh://example.com/svn/%00%01%02%03%04%05%06%07%08%09%0A"
"%0B%0C%0D%0E%0F%10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E"
"%1F%20!%22%23$%25&'()*+,-./0123456789:%3B%3C=%3E%3F@ABCDEFGH"
"IJKLMNOPQRSTUVWXYZ%5B%5C%5D%5E_%60abcdefghijklmnopqrstuvwxyz"
"%7B%7C%7D~%7F")
def test_normalize_path_with_special_chars_and_file_url(self):
"""Testing SVN (<backend>) normalize_path with special characters
and local file:// URL
"""
client = self.tool.client
client.repopath = 'file:///tmp/svn'
path = client.normalize_path(''.join(
chr(c)
for c in range(32, 128)
))
# This URL was generated based on modified code that directly used
# Subversion's lookup take explicitly, ensuring we're getting the
# results we want from urllib.quote() and our list of safe characters.
self.assertEqual(
path,
"file:///tmp/svn/%20!%22%23$%25&'()*+,-./0123456789:%3B%3C=%3E"
"%3F@ABCDEFGHIJKLMNOPQRSTUVWXYZ%5B%5C%5D%5E_%60abcdefghijklmno"
"pqrstuvwxyz%7B%7C%7D~%7F")
# This should provide a reasonable error for each code in 0..32.
for i in range(32):
c = chr(i)
message = (
'Invalid character code %s found in path %r.'
% (i, c)
)
with self.assertRaisesMessage(SCMError, message):
client.normalize_path(c)
def test_normalize_path_with_absolute_repo_path(self):
"""Testing SVN (<backend>) normalize_path with absolute path"""
client = self.tool.client
client.repopath = '/var/lib/svn'
path = '/var/lib/svn/foo/bar'
self.assertEqual(client.normalize_path(path), path)
client.repopath = 'svn+ssh://example.com/svn/'
path = 'svn+ssh://example.com/svn/foo/bar'
self.assertEqual(client.normalize_path(path), path)
def test_normalize_path_with_rel_path(self):
"""Testing SVN (<backend>) normalize_path with relative path"""
client = self.tool.client
client.repopath = 'svn+ssh://example.com/svn'
self.assertEqual(client.normalize_path('foo/bar'),
'svn+ssh://example.com/svn/foo/bar')
self.assertEqual(client.normalize_path('/foo/bar'),
'svn+ssh://example.com/svn/foo/bar')
self.assertEqual(client.normalize_path('//foo/bar'),
'svn+ssh://example.com/svn/foo/bar')
self.assertEqual(client.normalize_path('foo&/b ar?/#file#.txt'),
'svn+ssh://example.com/svn/foo&/b%20ar%3F/'
'%23file%23.txt')
def test_revision_parsing(self):
"""Testing SVN (<backend>) revision number parsing"""
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(working copy)'),
(b'', HEAD))
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b' (revision 0)'),
(b'', PRE_CREATION))
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(revision 1)'),
(b'', b'1'))
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(revision 23)'),
(b'', b'23'))
# Fix for bug 2176
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'\t(revision 4)'),
(b'', b'4'))
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision=b'2007-06-06 15:32:23 UTC (rev 10958)'),
(b'', b'10958'))
# Fix for bug 2632
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(revision )'),
(b'', PRE_CREATION))
with self.assertRaises(SCMError):
self.tool.parse_diff_revision(filename=b'',
revision=b'hello')
# Verify that 'svn diff' localized revision strings parse correctly.
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision='(revisión: 5)'.encode('utf-8')),
(b'', b'5'))
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision='(リビジョン 6)'.encode('utf-8')),
(b'', b'6'))
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision='(版本 7)'.encode('utf-8')),
(b'', b'7'))
def test_revision_parsing_with_nonexistent(self):
"""Testing SVN (<backend>) revision parsing with "(nonexistent)"
revision indicator
"""
# English
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(nonexistent)'),
(b'', PRE_CREATION))
# German
self.assertEqual(
self.tool.parse_diff_revision(filename=b'',
revision=b'(nicht existent)'),
(b'', PRE_CREATION))
# Simplified Chinese
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision='(不存在的)'.encode('utf-8')),
(b'', PRE_CREATION))
def test_revision_parsing_with_nonexistent_and_branches(self):
"""Testing SVN (<backend>) revision parsing with relocation
information and nonexistent revision specifier
"""
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision=b'(.../trunk) (nonexistent)'),
(b'trunk/', PRE_CREATION))
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision=b'(.../branches/branch-1.0) (nicht existent)'),
(b'branches/branch-1.0/', PRE_CREATION))
self.assertEqual(
self.tool.parse_diff_revision(
filename=b'',
revision=' (.../trunk) (不存在的)'.encode('utf-8')),
(b'trunk/', PRE_CREATION))
def test_interface(self):
"""Testing SVN (<backend>) with basic SVNTool API"""
self.assertFalse(self.tool.diffs_use_absolute_paths)
self.assertRaises(NotImplementedError,
lambda: self.tool.get_changeset(1))
def test_binary_diff(self):
"""Testing SVN (<backend>) parsing SVN diff with binary file"""
diff = (
b'Index: binfile\n'
b'============================================================'
b'=======\n'
b'Cannot display: file marked as a binary type.\n'
b'svn:mime-type = application/octet-stream\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'binfile',
orig_file_details=b'(unknown)',
modified_filename=b'binfile',
modified_file_details=b'(working copy)',
index_header_value=b'binfile',
binary=True,
data=diff)
def test_binary_diff_with_property_change(self):
"""Testing SVN (<backend>) parsing SVN diff with binary file with
property change
"""
diff = (
b'Index: binfile\n'
b'============================================================'
b'=======\n'
b'Cannot display: file marked as a binary type.\n'
b'svn:mime-type = application/octet-stream\n'
b'\n'
b'Property changes on: binfile\n'
b'____________________________________________________________'
b'_______\n'
b'Added: svn:mime-type\n'
b'## -0,0 +1 ##\n'
b'+application/octet-stream\n'
b'\\ No newline at end of property\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'binfile',
orig_file_details=b'(unknown)',
modified_filename=b'binfile',
modified_file_details=b'(working copy)',
index_header_value=b'binfile',
binary=True,
insert_count=1,
data=diff)
def test_keyword_diff(self):
"""Testing SVN (<backend>) parsing diff with keywords"""
# 'svn cat' will expand special variables in svn:keywords,
# but 'svn diff' doesn't expand anything. This causes the
# patch to fail if those variables appear in the patch context.
diff = (b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev$\n'
b' # $Revision:: $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n')
filename = 'trunk/doc/misc-docs/Makefile'
rev = Revision('4')
file = self.tool.get_file(filename, rev)
patch(diff, file, filename)
def test_unterminated_keyword_diff(self):
"""Testing SVN (<backend>) parsing diff with unterminated keywords"""
diff = (b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Id:\n'
b' # $Rev$\n'
b' # $Revision:: $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n')
filename = 'trunk/doc/misc-docs/Makefile'
rev = Revision('5')
file = self.tool.get_file(filename, rev)
patch(diff, file, filename)
def test_svn16_property_diff(self):
"""Testing SVN (<backend>) parsing SVN 1.6 diff with property changes
"""
diff = (
b'Index:\n'
b'======================================================'
b'=============\n'
b'--- (revision 123)\n'
b'+++ (working copy)\n'
b'Property changes on: .\n'
b'______________________________________________________'
b'_____________\n'
b'Modified: reviewboard:url\n'
b'## -1 +1 ##\n'
b'-http://reviews.reviewboard.org\n'
b'+http://reviews.reviewboard.org\n'
b'Index: binfile\n'
b'======================================================='
b'============\nCannot display: file marked as a '
b'binary type.\nsvn:mime-type = application/octet-stream\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'binfile',
orig_file_details=b'(unknown)',
modified_filename=b'binfile',
modified_file_details=b'(working copy)',
index_header_value=b'binfile',
binary=True,
data=diff)
def test_svn17_property_diff(self):
"""Testing SVN (<backend>) parsing SVN 1.7+ diff with property changes
"""
diff = (
b'Index .:\n'
b'======================================================'
b'=============\n'
b'--- . (revision 123)\n'
b'+++ . (working copy)\n'
b'\n'
b'Property changes on: .\n'
b'______________________________________________________'
b'_____________\n'
b'Modified: reviewboard:url\n'
b'## -0,0 +1,3 ##\n'
b'-http://reviews.reviewboard.org\n'
b'+http://reviews.reviewboard.org\n'
b'Added: myprop\n'
b'## -0,0 +1 ##\n'
b'+Property test.\n'
b'Index: binfile\n'
b'======================================================='
b'============\nCannot display: file marked as a '
b'binary type.\nsvn:mime-type = application/octet-stream\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'binfile',
orig_file_details=b'(unknown)',
modified_filename=b'binfile',
modified_file_details=b'(working copy)',
index_header_value=b'binfile',
binary=True,
data=diff)
def test_unicode_diff(self):
"""Testing SVN (<backend>) parsing diff with unicode characters"""
diff = (
'Index: Filé\n'
'==========================================================='
'========\n'
'--- Filé (revision 4)\n'
'+++ Filé (working copy)\n'
'@@ -1,6 +1,7 @@\n'
'+# foó\n'
' include ../tools/Makefile.base-vars\n'
' NAME = misc-docs\n'
' OUTNAME = svn-misc-docs\n'
).encode('utf-8')
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename='Filé'.encode('utf-8'),
orig_file_details=b'(revision 4)',
modified_filename='Filé'.encode('utf-8'),
modified_file_details=b'(working copy)',
index_header_value='Filé'.encode('utf-8'),
insert_count=1,
data=diff)
def test_diff_with_spaces_in_filenames(self):
"""Testing SVN (<backend>) parsing diff with spaces in filenames"""
diff = (
b'Index: File with spaces\n'
b'==========================================================='
b'========\n'
b'--- File with spaces (revision 4)\n'
b'+++ File with spaces (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'File with spaces',
orig_file_details=b'(revision 4)',
modified_filename=b'File with spaces',
modified_file_details=b'(working copy)',
index_header_value=b'File with spaces',
insert_count=1,
data=diff)
def test_diff_with_added_empty_file(self):
"""Testing parsing SVN diff with added empty file"""
diff = (
b'Index: empty-file\t(added)\n'
b'==========================================================='
b'========\n'
b'--- empty-file\t(revision 0)\n'
b'+++ empty-file\t(revision 0)\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'empty-file',
orig_file_details=b'(revision 0)',
modified_filename=b'empty-file',
modified_file_details=b'(revision 0)',
index_header_value=b'empty-file\t(added)',
data=diff)
def test_diff_with_deleted_empty_file(self):
"""Testing parsing SVN diff with deleted empty file"""
diff = (
b'Index: empty-file\t(deleted)\n'
b'==========================================================='
b'========\n'
b'--- empty-file\t(revision 4)\n'
b'+++ empty-file\t(working copy)\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'empty-file',
orig_file_details=b'(revision 4)',
modified_filename=b'empty-file',
modified_file_details=b'(working copy)',
index_header_value=b'empty-file\t(deleted)',
deleted=True,
data=diff)
def test_diff_with_nonexistent_revision_for_dest_file(self):
"""Testing parsing SVN diff with deleted file using "nonexistent"
destination revision
"""
diff = (
b'Index: deleted-file\n'
b'==========================================================='
b'========\n'
b'--- deleted-file\t(revision 4)\n'
b'+++ deleted-file\t(nonexistent)\n'
b'@@ -1,2 +0,0 @@\n'
b'-line 1\n'
b'-line 2\n'
)
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 1)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'deleted-file',
orig_file_details=b'(revision 4)',
modified_filename=b'deleted-file',
modified_file_details=b'(nonexistent)',
index_header_value=b'deleted-file',
deleted=True,
delete_count=2,
data=diff)
def test_idea_diff(self):
"""Testing parsing SVN diff with multi-file diff generated by IDEA
IDEs
"""
diff1 = (
b'Index: path/to/README\n'
b'IDEA additional info:\n'
b'Subsystem: org.reviewboard.org.test\n'
b'<+>ISO-8859-1\n'
b'=============================================================='
b'=====\n'
b'--- path/to/README\t(revision 4)\n'
b'+++ path/to/README\t(revision )\n'
b'@@ -1,6 +1,7 @@\n'
b' #\n'
b' #\n'
b' #\n'
b'+# test\n'
b' #\n'
b' #\n'
b' #\n'
)
diff2 = (
b'Index: path/to/README2\n'
b'IDEA additional info:\n'
b'Subsystem: org.reviewboard.org.test\n'
b'<+>ISO-8859-1\n'
b'=============================================================='
b'=====\n'
b'--- path/to/README2\t(revision 4)\n'
b'+++ path/to/README2\t(revision )\n'
b'@@ -1,6 +1,7 @@\n'
b' #\n'
b' #\n'
b' #\n'
b'+# test\n'
b' #\n'
b' #\n'
b' #\n'
)
diff = diff1 + diff2
parsed_files = self.tool.get_parser(diff).parse()
self.assertEqual(len(parsed_files), 2)
self.assert_parsed_diff_file(
parsed_files[0],
orig_filename=b'path/to/README',
orig_file_details=b'(revision 4)',
modified_filename=b'path/to/README',
modified_file_details=b'(revision )',
index_header_value=b'path/to/README',
insert_count=1,
data=diff1)
self.assert_parsed_diff_file(
parsed_files[1],
orig_filename=b'path/to/README2',
orig_file_details=b'(revision 4)',
modified_filename=b'path/to/README2',
modified_file_details=b'(revision )',
index_header_value=b'path/to/README2',
insert_count=1,
data=diff2)
def test_get_branches(self):
"""Testing SVN (<backend>) get_branches"""
branches = self.tool.get_branches()
self.assertEqual(len(branches), 3)
self.assertEqual(branches[0], Branch(id='trunk', name='trunk',
commit='12', default=True))
self.assertEqual(branches[1], Branch(id='branches/branch1',
name='branch1',
commit='7', default=False))
self.assertEqual(branches[2], Branch(id='top-level-branch',
name='top-level-branch',
commit='10', default=False))
def test_get_commits(self):
"""Testing SVN (<backend>) get_commits"""
commits = self.tool.get_commits(start='5')
self.assertEqual(len(commits), 5)
self.assertEqual(
commits[0],
Commit('chipx86',
'5',
'2010-05-21T09:33:40.893946',
'Add an unterminated keyword for testing bug #1523\n',
'4'))
commits = self.tool.get_commits(start='7')
self.assertEqual(len(commits), 7)
self.assertEqual(
commits[1],
Commit('david',
'6',
'2013-06-13T07:43:04.725088',
'Add a branches directory',
'5'))
def test_get_commits_with_branch(self):
"""Testing SVN (<backend>) get_commits with branch"""
commits = self.tool.get_commits(branch='/branches/branch1', start='5')
self.assertEqual(len(commits), 5)
self.assertEqual(
commits[0],
Commit('chipx86',
'5',
'2010-05-21T09:33:40.893946',
'Add an unterminated keyword for testing bug #1523\n',
'4'))
commits = self.tool.get_commits(branch='/branches/branch1', start='7')
self.assertEqual(len(commits), 6)
self.assertEqual(
commits[0],
Commit('david',
'7',
'2013-06-13T07:43:27.259554',
'Add a branch',
'5'))
self.assertEqual(
commits[1],
Commit('chipx86',
'5',
'2010-05-21T09:33:40.893946',
'Add an unterminated keyword for testing bug #1523\n',
'4'))
def test_get_commits_with_no_date(self):
"""Testing SVN (<backend>) get_commits with no date in commit"""
def _get_log(*args, **kwargs):
return [
{
'author': 'chipx86',
'revision': '5',
'message': 'Commit 1',
},
]
self.spy_on(self.tool.client.get_log, _get_log)
commits = self.tool.get_commits(start='5')
self.assertEqual(len(commits), 1)
self.assertEqual(
commits[0],
Commit('chipx86',
'5',
'',
'Commit 1'))
def test_get_commits_with_exception(self):
"""Testing SVN (<backend>) get_commits with exception"""
def _get_log(*args, **kwargs):
raise Exception('Bad things happened')
self.spy_on(self.tool.client.get_log, _get_log)
with self.assertRaisesMessage(SCMError, 'Bad things happened'):
self.tool.get_commits(start='5')
def test_get_change(self):
"""Testing SVN (<backend>) get_change"""
commit = self.tool.get_change('5')
self.assertEqual(md5(commit.message.encode('utf-8')).hexdigest(),
'928336c082dd756e3f7af4cde4724ebf')
self.assertEqual(md5(commit.diff).hexdigest(),
'56e50374056931c03a333f234fa63375')
def test_utf8_keywords(self):
"""Testing SVN (<backend>) with UTF-8 files with keywords"""
self.repository.get_file('trunk/utf8-file.txt', '9')
def test_normalize_patch_with_svn_and_expanded_keywords(self):
"""Testing SVN (<backend>) normalize_patch with expanded keywords"""
diff = (
b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev: 123$\n'
b' # $Revision:: 123 $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n'
)
normalized = self.tool.normalize_patch(
patch=diff,
filename='trunk/doc/misc-docs/Makefile',
revision='4')
self.assertEqual(
normalized,
b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev$\n'
b' # $Revision:: $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n')
def test_normalize_patch_with_svn_and_no_expanded_keywords(self):
"""Testing SVN (<backend>) normalize_patch with no expanded keywords"""
diff = (
b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev$\n'
b' # $Revision:: $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n'
)
normalized = self.tool.normalize_patch(
patch=diff,
filename='trunk/doc/misc-docs/Makefile',
revision='4')
self.assertEqual(
normalized,
b'Index: Makefile\n'
b'==========================================================='
b'========\n'
b'--- Makefile (revision 4)\n'
b'+++ Makefile (working copy)\n'
b'@@ -1,6 +1,7 @@\n'
b' # $Id$\n'
b' # $Rev$\n'
b' # $Revision:: $\n'
b'+# foo\n'
b' include ../tools/Makefile.base-vars\n'
b' NAME = misc-docs\n'
b' OUTNAME = svn-misc-docs\n')
class PySVNTests(_CommonSVNTestCase):
backend = 'reviewboard.scmtools.svn.pysvn'
backend_name = 'pysvn'
class SubvertpyTests(_CommonSVNTestCase):
backend = 'reviewboard.scmtools.svn.subvertpy'
backend_name = 'subvertpy'
class UtilsTests(SCMTestCase):
"""Unit tests for reviewboard.scmtools.svn.utils."""
def test_collapse_svn_keywords(self):
"""Testing collapse_svn_keywords"""
keyword_test_data = [
(b'Id',
b'/* $Id: test2.c 3 2014-08-04 22:55:09Z david $ */',
b'/* $Id$ */'),
(b'id',
b'/* $Id: test2.c 3 2014-08-04 22:55:09Z david $ */',
b'/* $Id$ */'),
(b'id',
b'/* $id: test2.c 3 2014-08-04 22:55:09Z david $ */',
b'/* $id$ */'),
(b'Id',
b'/* $id: test2.c 3 2014-08-04 22:55:09Z david $ */',
b'/* $id$ */')
]
for keyword, data, result in keyword_test_data:
self.assertEqual(collapse_svn_keywords(data, keyword),
result)
def test_has_expanded_svn_keywords(self):
"""Testing has_expanded_svn_keywords"""
self.assertTrue(has_expanded_svn_keywords(b'.. $ID: 123$ ..'))
self.assertTrue(has_expanded_svn_keywords(b'.. $id:: 123$ ..'))
self.assertFalse(has_expanded_svn_keywords(b'.. $Id:: $ ..'))
self.assertFalse(has_expanded_svn_keywords(b'.. $Id$ ..'))
self.assertFalse(has_expanded_svn_keywords(b'.. $Id ..'))
self.assertFalse(has_expanded_svn_keywords(b'.. $Id Here$ ..'))
class SVNAuthFormTests(TestCase):
"""Unit tests for SVNTool's authentication form."""
def test_fields(self):
"""Testing SVNTool authentication form fields"""
form = SVNTool.create_auth_form()
self.assertEqual(list(form.fields), ['username', 'password'])
self.assertEqual(form['username'].help_text, '')
self.assertEqual(form['username'].label, 'Username')
self.assertEqual(form['password'].help_text, '')
self.assertEqual(form['password'].label, 'Password')
@add_fixtures(['test_scmtools'])
def test_load(self):
"""Tetting SVNTool authentication form load"""
repository = self.create_repository(
tool_name='Subversion',
username='test-user',
password='test-pass')
form = SVNTool.create_auth_form(repository=repository)
form.load()
self.assertEqual(form['username'].value(), 'test-user')
self.assertEqual(form['password'].value(), 'test-pass')
@add_fixtures(['test_scmtools'])
def test_save(self):
"""Tetting SVNTool authentication form save"""
repository = self.create_repository(tool_name='Subversion')
form = SVNTool.create_auth_form(
repository=repository,
data={
'username': 'test-user',
'password': 'test-pass',
})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(repository.username, 'test-user')
self.assertEqual(repository.password, 'test-pass')
class SVNRepositoryFormTests(TestCase):
"""Unit tests for SVNTool's repository form."""
def test_fields(self):
"""Testing SVNTool repository form fields"""
form = SVNTool.create_repository_form()
self.assertEqual(list(form.fields), ['path', 'mirror_path'])
self.assertEqual(form['path'].help_text,
'The path to the repository. This will generally be '
'the URL you would use to check out the repository.')
self.assertEqual(form['path'].label, 'Path')
self.assertEqual(form['mirror_path'].help_text, '')
self.assertEqual(form['mirror_path'].label, 'Mirror Path')
@add_fixtures(['test_scmtools'])
def test_load(self):
"""Tetting SVNTool repository form load"""
repository = self.create_repository(
tool_name='Subversion',
path='https://svn.example.com/',
mirror_path='https://svn.mirror.example.com')
form = SVNTool.create_repository_form(repository=repository)
form.load()
self.assertEqual(form['path'].value(), 'https://svn.example.com/')
self.assertEqual(form['mirror_path'].value(),
'https://svn.mirror.example.com')
@add_fixtures(['test_scmtools'])
def test_save(self):
"""Tetting SVNTool repository form save"""
repository = self.create_repository(tool_name='Subversion')
form = SVNTool.create_repository_form(
repository=repository,
data={
'path': 'https://svn.example.com/',
'mirror_path': 'https://svn.mirror.example.com',
})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(repository.path, 'https://svn.example.com/')
self.assertEqual(repository.mirror_path,
'https://svn.mirror.example.com')
| reviewboard/reviewboard | reviewboard/scmtools/tests/test_svn.py | Python | mit | 39,691 |
import os
from setuptools import setup
import sys
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
install_requires = []
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
install_requires.append('argparse')
# Python 2.6 does not include OrderedDict.
try:
from collections import OrderedDict
except ImportError:
install_requires.append('ordereddict')
try:
with open('README.rst') as readme:
long_description = readme.read()
except IOError:
long_description = 'See https://pypi.python.org/pypi/wiggelen'
# This is quite the hack, but we don't want to import our package from here
# since that's recipe for disaster (it might have some uninstalled
# dependencies, or we might import another already installed version).
distmeta = {}
for line in open(os.path.join('wiggelen', '__init__.py')):
try:
field, value = (x.strip() for x in line.split('='))
except ValueError:
continue
if field == '__version_info__':
value = value.strip('[]()')
value = '.'.join(x.strip(' \'"') for x in value.split(','))
else:
value = value.strip('\'"')
distmeta[field] = value
setup(
name='wiggelen',
version=distmeta['__version_info__'],
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta['__author__'],
author_email=distmeta['__contact__'],
url=distmeta['__homepage__'],
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
install_requires=install_requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| martijnvermaat/wiggelen | setup.py | Python | mit | 2,054 |
#!/usr/bin/env python3
"""Download GTFS file and generate JSON file.
Author: Panu Ranta, panu.ranta@iki.fi, https://14142.net/kartalla/about.html
"""
import argparse
import datetime
import hashlib
import json
import logging
import os
import resource
import shutil
import sys
import tempfile
import time
import zipfile
def _main():
parser = argparse.ArgumentParser()
parser.add_argument('config', help='JSON configuration file')
parser.add_argument('--only-download', action='store_true', help='Only download GTFS file')
parser.add_argument('--use-no-q-dirs', action='store_true', help='Do not use Q dirs')
args = parser.parse_args()
_init_logging()
start_time = time.time()
logging.debug('started {}'.format(sys.argv))
config = _load_config(args.config)
gtfs_name = config['name']
downloaded_gtfs_zip = _download_gtfs(config['url'])
modify_date = _get_modify_date(downloaded_gtfs_zip)
gtfs_dir = _get_q_dir(config['gtfs_dir'], modify_date, not args.use_no_q_dirs)
gtfs_zip = _rename_gtfs_zip(gtfs_dir, downloaded_gtfs_zip, gtfs_name, modify_date)
if gtfs_zip and (not args.only_download):
log_dir = _get_q_dir(config['log_dir'], modify_date, not args.use_no_q_dirs)
_generate_json(gtfs_name, modify_date, gtfs_zip, config['json_dir'], log_dir)
logging.debug('took {} seconds, max mem: {} megabytes'.format(
int(time.time() - start_time), resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024))
def _init_logging():
log_format = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(funcName)s: %(message)s'
logging.basicConfig(filename='generate.log', format=log_format, level=logging.DEBUG)
def _progress(text):
print(text)
logging.debug(text)
def _progress_warning(text):
print('\033[31m{}\033[0m'.format(text))
logging.warning(text)
def _load_config(config_path):
with open(config_path) as config_file:
return json.load(config_file)
def _download_gtfs(url):
output_file, output_filename = tempfile.mkstemp(dir='.')
os.close(output_file)
curl_options = '--header "Accept-Encoding: gzip" --location'
command = 'curl {} "{}" > {}'.format(curl_options, url, output_filename)
_progress('downloading gtfs file into: {}'.format(os.path.relpath(output_filename)))
_execute_command(command)
return output_filename
def _execute_command(command):
if os.system(command) != 0:
raise SystemExit('failed to execute: {}'.format(command))
def _get_modify_date(zip_filename):
modify_times = _get_modify_times(zip_filename)
if len(modify_times) > 1:
_progress_warning('multiple modify times: {}'.format(modify_times))
return sorted(modify_times)[-1]
def _get_modify_times(zip_filename):
modify_times = set()
with zipfile.ZipFile(zip_filename) as zip_file:
for info in zip_file.infolist():
modify_times.add(datetime.datetime(*info.date_time).strftime('%Y%m%d'))
return modify_times
def _get_q_dir(base_dir, modify_date, create_q_dir):
if create_q_dir:
modify_month = int(modify_date[4:6])
q_dir = '{}_q{}'.format(modify_date[:4], 1 + ((modify_month - 1) // 3))
return os.path.join(base_dir, q_dir)
return base_dir
def _rename_gtfs_zip(gtfs_dir, old_filename, gtfs_name, modify_date):
_create_dir(gtfs_dir)
new_filename = os.path.join(gtfs_dir, '{}_{}.zip'.format(gtfs_name, modify_date))
if os.path.isfile(new_filename):
if _compare_files(old_filename, new_filename):
_progress('downloaded gtfs file is identical to: {}'.format(new_filename))
os.remove(old_filename)
return None
_rename_existing_file(new_filename)
os.rename(old_filename, new_filename)
_progress('renamed: {} -> {}'.format(old_filename, new_filename))
return new_filename
def _create_dir(new_dir):
if not os.path.isdir(new_dir):
os.makedirs(new_dir)
def _compare_files(filename_a, filename_b):
return _get_hash(filename_a) == _get_hash(filename_b)
def _get_hash(filename):
file_hash = hashlib.sha256()
with open(filename, 'rb') as input_file:
file_hash.update(input_file.read())
return file_hash.digest()
def _generate_json(gtfs_name, modify_date, gtfs_zip, json_dir, log_dir):
_create_dir(json_dir)
date_output_file = os.path.join(json_dir, '{}_{}.json'.format(gtfs_name, modify_date))
_rename_existing_file(date_output_file)
_create_dir(log_dir)
log_path = os.path.join(log_dir, 'gtfs2json_{}_{}_{}.log'.format(gtfs_name, modify_date,
_get_now_timestamp()))
_progress('generating json for {}'.format(gtfs_zip))
command = '{}/gtfs2json.py --log-file {} {} {}'.format(os.path.dirname(__file__), log_path,
gtfs_zip, date_output_file)
_execute_command(command)
_create_base_output_file(date_output_file, os.path.join(json_dir, '{}.json'.format(gtfs_name)))
def _create_base_output_file(date_output_file, base_output_file):
if os.path.isfile(base_output_file):
_progress('deleting {}'.format(base_output_file))
os.remove(base_output_file)
_progress('copying {} to {}'.format(date_output_file, base_output_file))
shutil.copyfile(date_output_file, base_output_file)
def _rename_existing_file(filename):
if os.path.isfile(filename):
suffix = filename.split('.')[-1]
new_filename = filename.replace('.{}'.format(suffix),
'_{}.{}'.format(_get_now_timestamp(), suffix))
os.rename(filename, new_filename)
_progress_warning('renamed existing {} file {} -> {}'.format(suffix, filename,
new_filename))
def _get_now_timestamp():
return datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
if __name__ == "__main__":
_main()
| panur/kartalla | gtfs2json/generate.py | Python | mit | 6,008 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Student CNN encoder for XE training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from models.encoders.core.cnn_util import conv_layer, max_pool, batch_normalization
############################################################
# Architecture: (feature map, kernel(f*t), stride(f,t))
# CNN1: (128, 9*9, (1,1)) * 1 layers
# Batch normalization
# ReLU
# Max pool (3,1)
# CNN2: (256, 3*4, (1,1)) * 1 layers
# Batch normalization
# ReLU
# Max pool (1,1)
# fc: 2048 (ReLU) * 4 layers
############################################################
class StudentCNNXEEncoder(object):
"""Student CNN encoder for XE training.
Args:
input_size (int): the dimensions of input vectors.
This is expected to be num_channels * 3 (static + Δ + ΔΔ)
splice (int): frames to splice
num_stack (int): the number of frames to stack
parameter_init (float, optional): the range of uniform distribution to
initialize weight parameters (>= 0)
name (string, optional): the name of encoder
"""
def __init__(self,
input_size,
splice,
num_stack,
parameter_init,
name='cnn_student_xe_encoder'):
assert input_size % 3 == 0
self.num_channels = (input_size // 3) // num_stack // splice
self.splice = splice
self.num_stack = num_stack
self.parameter_init = parameter_init
self.name = name
def __call__(self, inputs, keep_prob, is_training):
"""Construct model graph.
Args:
inputs (placeholder): A tensor of size
`[B, input_size (num_channels * splice * num_stack * 3)]`
keep_prob (placeholder, float): A probability to keep nodes
in the hidden-hidden connection
is_training (bool):
Returns:
outputs: Encoder states.
if time_major is True, a tensor of size `[T, B, output_dim]`
otherwise, `[B, output_dim]`
"""
# inputs: 2D tensor `[B, input_dim]`
batch_size = tf.shape(inputs)[0]
input_dim = inputs.shape.as_list()[-1]
# NOTE: input_dim: num_channels * splice * num_stack * 3
# for debug
# print(input_dim) # 1200
# print(self.num_channels) # 40
# print(self.splice) # 5
# print(self.num_stack) # 2
assert input_dim == self.num_channels * self.splice * self.num_stack * 3
# Reshape to 4D tensor `[B, num_channels, splice * num_stack, 3]`
inputs = tf.reshape(
inputs,
shape=[batch_size, self.num_channels, self.splice * self.num_stack, 3])
# NOTE: filter_size: `[H, W, C_in, C_out]`
with tf.variable_scope('CNN1'):
inputs = conv_layer(inputs,
filter_size=[9, 9, 3, 128],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu')
inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[3, 1],
stride=[3, 1],
name='max_pool')
with tf.variable_scope('CNN2'):
inputs = conv_layer(inputs,
filter_size=[3, 4, 128, 256],
stride=[1, 1],
parameter_init=self.parameter_init,
activation='relu')
inputs = batch_normalization(inputs, is_training=is_training)
inputs = max_pool(inputs,
pooling_size=[1, 1],
stride=[1, 1],
name='max_pool')
# Reshape to 2D tensor `[B, new_h * new_w * C_out]`
outputs = tf.reshape(
inputs, shape=[batch_size, np.prod(inputs.shape.as_list()[-3:])])
for i in range(1, 5, 1):
with tf.variable_scope('fc%d' % (i)) as scope:
outputs = tf.contrib.layers.fully_connected(
inputs=outputs,
num_outputs=2048,
activation_fn=tf.nn.relu,
weights_initializer=tf.truncated_normal_initializer(
stddev=self.parameter_init),
biases_initializer=tf.zeros_initializer(),
scope=scope)
return outputs
| hirofumi0810/tensorflow_end2end_speech_recognition | models/encoders/core/student_cnn_xe.py | Python | mit | 4,732 |
# -*- coding utf-8 -*-
from __future__ import unicode_literals
import pytest
from structures.insertion_sort import insertion_sort
@pytest.fixture
def sorted_list():
return [i for i in xrange(10)]
@pytest.fixture
def reverse_list():
return [i for i in xrange(9, -1, -1)]
@pytest.fixture
def average_list():
return [5, 9, 2, 4, 1, 6, 8, 7, 0, 3]
def test_sorted(sorted_list):
insertion_sort(sorted_list)
assert sorted_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
def test_worst(reverse_list):
insertion_sort(reverse_list)
assert reverse_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
def test_average(average_list):
insertion_sort(average_list)
assert average_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
def test_repeats():
l = [3, 6, 7, 3, 9, 5, 2, 7]
insertion_sort(l)
assert l == [2, 3, 3, 5, 6, 7, 7, 9]
def test_multiple_types():
l = [3, 'foo', 2.8, True, []]
# python 2 sorting is crazy
insertion_sort(l)
assert l == [True, 2.8, 3, [], 'foo']
| tlake/data-structures-mk2 | tests/test_insertion_sort.py | Python | mit | 1,008 |
from flask import Flask
app = Flask(__name__)
app.config.from_object('blog.config')
from blog import views
| t4ec/blog | blog/__init__.py | Python | mit | 110 |
import fileinput
def str_to_int(s):
return([ int(x) for x in s.split() ])
# args = [ 'line 1', 'line 2', ... ]
def proc_input(args):
(n, l) = str_to_int(args[0])
a = tuple(str_to_int(args[1]))
return(l, a)
def solve(args, verbose=False):
(l, a) = proc_input(args)
list_a = list(a)
list_a.sort()
max_dist = max(list_a[0] * 2, (l - list_a[-1]) * 2)
for x in xrange(len(a) - 1):
max_dist = max(max_dist, list_a[x + 1] - list_a[x])
if verbose:
print max_dist / float(2)
return max_dist / float(2)
def test():
assert(str_to_int('1 2 3') == [ 1, 2, 3 ])
assert(proc_input([ '2 5', '2 5' ]) == (5, (2, 5)))
assert(solve([ '2 5', '2 5' ]) == 2.0)
assert(solve([ '4 5', '0 1 2 3' ]) == 2.0)
assert(solve([ '7 15', '15 5 3 7 9 14 0' ]) == 2.5)
if __name__ == '__main__':
from sys import argv
if argv.pop() == 'test':
test()
else:
solve(list(fileinput.input()), verbose=True)
| cripplet/practice | codeforces/492/attempt/b_lanterns.py | Python | mit | 897 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright (C), 2013, The Schilduil Team. All rights reserved.
"""
import sys
import pony.orm
import suapp.orm
from suapp.logdecorator import loguse, logging
__all__ = ["Wooster", "Drone", "Jeeves"]
class FlowException(Exception):
pass
class ApplicationClosed(FlowException):
pass
class Wooster:
"""
A Wooster represents a UI window/page.
GENERALLY THESE THINGS ARE REUSED SO YOU NEED TO BE VERY CAREFUL ABOUT SIDE EFFECTS.
In case you have something that cannot be reused do something like:
1/ Create a new class instance of a subclass of Wooster
2/ Call inflow on that
"""
def lock(self):
pass
def unlock(self):
pass
def inflow(self, jeeves, drone):
# The only thing it does is store the Jeeves object.
self.jeeves = jeeves
# MODE: Modal=1, Replace=2, Both=3
# jeeves.drone(self, name, mode, dataobject)
def close(self):
pass
def toJSON(self):
return "Wooster %s" % (hex(self.__hash__()))
class Drone(object):
"""
A drone is the connection between two vertices.
"""
def __init__(self, name, tovertex):
self.name = name
self.tovertex = tovertex
@loguse
def get_new_instance_clone(self, dataobject, mode):
"""
Clone the drone and add the dataobject and mode.
"""
drone = Drone(self.name, self.tovertex)
drone.dataobject = dataobject
drone.mode = mode
return drone
def toJSON(self):
return "Drone %s > %s" % (self.name, self.tovertex)
class Jeeves(object):
"""
Jeeves is the controller that determins the flow.
It uses Drones to go from Wooster to Wooster.
"""
MODE_OPEN = 3
MODE_REPLACE = 2
MODE_MODAL = 1
@loguse
def __init__(self, app=None):
"""
Initializes the Jeeves with an empty flow and app name.
"""
self.flow = {"": {}}
self.app = app
self.views = {}
self.queries = {}
# TODO: I have no idea why I added ormscope: get rid of it?
self.ormscope = {}
def toJSON(self):
"""
Makes this object be made into json.
"""
return "Jeeves %s" % (hex(self.__hash__()))
@loguse
def whichDrone(self, fromname, outmessage, **kwargs):
"""
Finding the drone matching the outmessage.
"""
logging.getLogger(__name__).debug(
": Jeeves[%r].whichDrone : Flow: %s", self, self.flow
)
drone = None
try:
drone = self.flow[fromname][outmessage]
except:
try:
drone = self.flow[""][outmessage]
except:
# TODO: do something else then bluntly exiting.
logging.getLogger(__name__).error(
": Jeeves[%r].whichDrone : Not found '%s' - exiting.",
self,
outmessage,
)
if outmessage == "EXIT":
raise ApplicationClosed()
else:
raise FlowException("Unknown outmessage: %s" % (outmessage))
return drone
@loguse("@") # Not logging the return value.
def _do_query_str(self, query_template, scope, parameters):
"""
Execute a query that is a string.
DEPRECATED
"""
query = query_template % parameters
exec("result = %s" % (query), scope)
return scope["result"]
@loguse("@") # Not logging the return value.
def pre_query(self, name, scope=None, params=None):
"""
Returns the the query and parameters.
The query and the default parameters are looked up in self.queries.
The parameters are next updated with the passed params.
The self.queries is filled by moduleloader from the loaded modlib's
view_definitions() function.
"""
if scope is None:
scope = {}
query_template, defaults = self.queries[name]
# Start with the default defined.
parameters = defaults.copy()
parameters.update(params)
# Making sure the paging parameters are integers.
try:
parameters["pagenum"] = int(parameters["pagenum"])
except:
parameters["pagenum"] = 1
try:
parameters["pagesize"] = int(parameters["pagesize"])
except:
parameters["pagesize"] = 10
logging.getLogger(__name__).debug(
"Paging #%s (%s)", parameters["pagenum"], parameters["pagesize"]
)
return (query_template, parameters)
@loguse("@") # Not loggin the return value.
def do_query(self, name, scope=None, params=None):
"""
Executes a query by name and return the result.
The result is always a UiOrmObject by using UiOrmObject.uize on the
results of the query.
"""
query_template, parameters = self.pre_query(name, scope, params)
if callable(query_template):
# A callable, so just call it.
result = query_template(params=parameters)
else:
# DEPRECATED: python code as a string.
result = self._do_query_str(query_template, scope, parameters)
return (suapp.orm.UiOrmObject.uize(r) for r in result)
@loguse
def do_fetch_set(self, module, table, primarykey, link):
"""
Fetches the result from a foreign key that is a set.
This will return the list of objects representing the rows in the
database pointed to by the foreign key (which name should be passed in
link). The return type is either a list of suapp.orm.UiOrmObject's.
Usually you can follow the foreign key directly, but not in an
asynchronous target (UI) like the web where you need to fetch it anew.
For foreign keys that are not sets you can use do_fetch.
The module, table and primarykey are those from the object having the
foreign key and behave the same as with do_fetch. The extra parameter
link is the foreign key that is pointing to the set.
"""
origin = self.do_fetch(module, table, primarykey)
result = getattr(origin, link)
return (suapp.orm.UiOrmObject.uize(r) for r in result)
@loguse
def do_fetch(self, module, table, primarykey):
"""
Fetches a specific object from the database.
This will return the object representing a row in the
specified table from the database. The return type is
either a pony.orm.core.Entity or suapp.orm.UiOrmObject
subclass, depending on the class name specified in table.
Parameters:
- module: In what module the table is defined.
This should start with modlib.
- table: Class name of the object representing the table.
The class should be a subclass of either
- pony.orm.core.Entity
- suapp.orm.UiOrmObject
- primarykey: A string representing the primary key value
or a list of values (useful in case of a
multi variable primary key).
"""
if isinstance(primarykey, str):
primarykey = [primarykey]
module = sys.modules[module]
table_class = getattr(module, table)
params = {}
if issubclass(table_class, pony.orm.core.Entity):
pk_columns = table_class._pk_columns_
elif issubclass(table_class, suapp.orm.UiOrmObject):
pk_columns = table_class._ui_class._pk_columns_
else:
return None
if len(pk_columns) == 1:
if len(primarykey) == 1:
params[pk_columns[0]] = primarykey[0]
else:
i = 0
for column in pk_columns:
params[column] = primarykey[i]
i += 1
# Checking if the primary key is a foreign key.
for column in pk_columns:
logging.getLogger(__name__).debug(
"Primary key column: %s = %s", column, params[column]
)
logging.getLogger(__name__).debug("Fetching %s (%s)", table_class, params)
if issubclass(table_class, suapp.orm.UiOrmObject):
return table_class(**params)
else:
return table_class.get(**params)
@loguse("@") # Not logging the return value.
def drone(self, fromvertex, name, mode, dataobject, **kwargs):
"""
Find the drone and execute it.
"""
# Find the drone
fromname = ""
result = None
if isinstance(fromvertex, Wooster):
fromname = fromvertex.name
else:
fromname = str(fromvertex)
drone_type = self.whichDrone(fromname, name, **kwargs)
# Clone a new instance of the drone and setting dataobject & mode.
drone = drone_type.get_new_instance_clone(dataobject, mode)
# If there is a callback, call it.
if "callback_drone" in kwargs:
try:
kwargs["callback_drone"](drone)
except:
pass
# Depending on the mode
# Some targets depend on what is returned from inflow.
if mode == self.MODE_MODAL:
if isinstance(fromvertex, Wooster):
fromvertex.lock()
drone.fromvertex = fromvertex
result = drone.tovertex.inflow(self, drone)
if isinstance(fromvertex, Wooster):
fromvertex.unlock()
elif mode == self.MODE_REPLACE:
drone.fromvertex = None
fromvertex.close()
result = drone.tovertex.inflow(self, drone)
elif mode == self.MODE_OPEN:
drone.fromvertex = fromvertex
result = drone.tovertex.inflow(self, drone)
return result
@loguse
def start(self, dataobject=None):
"""
Start the Jeeves flow.
"""
self.drone("", "START", self.MODE_MODAL, dataobject)
if __name__ == "__main__":
logging.basicConfig(
format="%(asctime)s %(levelname)s %(name)s %(message)s", level=logging.DEBUG
)
logging.getLogger("__main__").setLevel(logging.DEBUG)
modulename = "__main__"
print(
"__main__: %s (%s)"
% (
modulename,
logging.getLevelName(logging.getLogger(modulename).getEffectiveLevel()),
)
)
class Application(Wooster):
name = "APP"
def inflow(self, jeeves, drone):
self.jeeves = jeeves
print(
"""This is the Jeeves and Wooster library!
Jeeves is Wooster's indispensible valet: a gentleman's personal
gentleman. In fact this Jeeves can manage more then one Wooster
(so he might not be that personal) and guide information from one
Wooster to another in an organised way making all the Woosters
march to the drones.
"""
)
def lock(self):
pass
def unlock(self):
pass
def close(self):
pass
flow = Jeeves()
flow.flow = {"": {"START": Drone("START", Application())}}
flow.start()
| schilduil/suapp | suapp/jandw.py | Python | mit | 11,304 |
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import array
from chainer.utils import type_check
class BilinearFunction(function.Function):
def check_type_forward(self, in_types):
n_in = type_check.eval(in_types.size())
if n_in != 3 and n_in != 6:
raise type_check.InvalidType(
'%s or %s' % (in_types.size() == 3, in_types.size() == 6),
'%s == %s' % (in_types.size(), n_in))
e1_type, e2_type, W_type = in_types[:3]
type_check_prod = type_check.make_variable(numpy.prod, 'prod')
type_check.expect(
e1_type.dtype == numpy.float32,
e1_type.ndim >= 2,
e2_type.dtype == numpy.float32,
e2_type.ndim >= 2,
e1_type.shape[0] == e2_type.shape[0],
W_type.dtype == numpy.float32,
W_type.ndim == 3,
type_check_prod(e1_type.shape[1:]) == W_type.shape[0],
type_check_prod(e2_type.shape[1:]) == W_type.shape[1],
)
if n_in == 6:
out_size = W_type.shape[2]
V1_type, V2_type, b_type = in_types[3:]
type_check.expect(
V1_type.dtype == numpy.float32,
V1_type.ndim == 2,
V1_type.shape[0] == W_type.shape[0],
V1_type.shape[1] == out_size,
V2_type.dtype == numpy.float32,
V2_type.ndim == 2,
V2_type.shape[0] == W_type.shape[1],
V2_type.shape[1] == out_size,
b_type.dtype == numpy.float32,
b_type.ndim == 1,
b_type.shape[0] == out_size,
)
def forward(self, inputs):
e1 = array.as_mat(inputs[0])
e2 = array.as_mat(inputs[1])
W = inputs[2]
if not type_check.same_types(*inputs):
raise ValueError('numpy and cupy must not be used together\n'
'type(W): {0}, type(e1): {1}, type(e2): {2}'
.format(type(W), type(e1), type(e2)))
xp = cuda.get_array_module(*inputs)
if xp is numpy:
y = numpy.einsum('ij,ik,jkl->il', e1, e2, W)
else:
i_len, j_len = e1.shape
k_len = e2.shape[1]
# 'ij,ik->ijk'
e1e2 = e1[:, :, None] * e2[:, None, :]
# ijk->i[jk]
e1e2 = e1e2.reshape(i_len, j_len * k_len)
# jkl->[jk]l
W_mat = W.reshape(-1, W.shape[2])
# 'i[jk],[jk]l->il'
y = e1e2.dot(W_mat)
if len(inputs) == 6:
V1, V2, b = inputs[3:]
y += e1.dot(V1)
y += e2.dot(V2)
y += b
return y,
def backward(self, inputs, grad_outputs):
e1 = array.as_mat(inputs[0])
e2 = array.as_mat(inputs[1])
W = inputs[2]
gy = grad_outputs[0]
xp = cuda.get_array_module(*inputs)
if xp is numpy:
gW = numpy.einsum('ij,ik,il->jkl', e1, e2, gy)
ge1 = numpy.einsum('ik,jkl,il->ij', e2, W, gy)
ge2 = numpy.einsum('ij,jkl,il->ik', e1, W, gy)
else:
kern = cuda.reduce('T in0, T in1, T in2', 'T out',
'in0 * in1 * in2', 'a + b', 'out = a', 0,
'bilinear_product')
e1_b = e1[:, :, None, None] # ij
e2_b = e2[:, None, :, None] # ik
gy_b = gy[:, None, None, :] # il
W_b = W[None, :, :, :] # jkl
gW = kern(e1_b, e2_b, gy_b, axis=0) # 'ij,ik,il->jkl'
ge1 = kern(e2_b, W_b, gy_b, axis=(2, 3)) # 'ik,jkl,il->ij'
ge2 = kern(e1_b, W_b, gy_b, axis=(1, 3)) # 'ij,jkl,il->ik'
ret = ge1.reshape(inputs[0].shape), ge2.reshape(inputs[1].shape), gW
if len(inputs) == 6:
V1, V2, b = inputs[3:]
gV1 = e1.T.dot(gy)
gV2 = e2.T.dot(gy)
gb = gy.sum(0)
ge1 += gy.dot(V1.T)
ge2 += gy.dot(V2.T)
ret += gV1, gV2, gb
return ret
def bilinear(e1, e2, W, V1=None, V2=None, b=None):
"""Applies a bilinear function based on given parameters.
This is a building block of Neural Tensor Network (see the reference paper
below). It takes two input variables and one or four parameters, and
outputs one variable.
To be precise, denote six input arrays mathematically by
:math:`e^1\\in \\mathbb{R}^{I\\cdot J}`,
:math:`e^2\\in \\mathbb{R}^{I\\cdot K}`,
:math:`W\\in \\mathbb{R}^{J \\cdot K \\cdot L}`,
:math:`V^1\\in \\mathbb{R}^{J \\cdot L}`,
:math:`V^2\\in \\mathbb{R}^{K \\cdot L}`, and
:math:`b\\in \\mathbb{R}^{L}`,
where :math:`I` is mini-batch size.
In this document, we call :math:`V^1`, :math:`V^2`, and :math:`b` linear
parameters.
The output of forward propagation is calculated as
.. math::
y_{il} = \\sum_{jk} e^1_{ij} e^2_{ik} W_{jkl} + \\
\\sum_{j} e^1_{ij} V^1_{jl} + \\sum_{k} e^2_{ik} V^2_{kl} + b_{l}.
Note that V1, V2, b are optional. If these are not given, then this
function omits the last three terms in the above equation.
.. note::
This function accepts an input variable ``e1`` or ``e2`` of a non-matrix
array. In this case, the leading dimension is treated as the batch
dimension, and the other dimensions are reduced to one dimension.
.. note::
In the original paper, :math:`J` and :math:`K`
must be equal and the author denotes :math:`[V^1 V^2]`
(concatenation of matrices) by :math:`V`.
Args:
e1 (~chainer.Variable): Left input variable.
e2 (~chainer.Variable): Right input variable.
W (~chainer.Variable): Quadratic weight variable.
V1 (~chainer.Variable): Left coefficient variable.
V2 (~chainer.Variable): Right coefficient variable.
b (~chainer.Variable): Bias variable.
Returns:
~chainer.Variable: Output variable.
See:
`Reasoning With Neural Tensor Networks for Knowledge Base Completion
<http://papers.nips.cc/paper/5028-reasoning-with-neural-tensor-
networks-for-knowledge-base-completion>`_ [Socher+, NIPS2013].
"""
flags = [V1 is None, V2 is None, b is None]
if any(flags):
if not all(flags):
raise ValueError('All coefficients and bias for bilinear() must '
'be None, if at least one of them is None.')
return BilinearFunction()(e1, e2, W)
else:
return BilinearFunction()(e1, e2, W, V1, V2, b)
| kashif/chainer | chainer/functions/connection/bilinear.py | Python | mit | 6,625 |
# coding: utf-8
import datetime
from sqlalchemy import bindparam
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import func
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy.dialects import mysql
from sqlalchemy.engine.url import make_url
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
from ...engine import test_execute
class DialectTest(fixtures.TestBase):
__backend__ = True
__only_on__ = "mysql"
@testing.combinations(
(None, "cONnection was kILLEd", "InternalError", "pymysql", True),
(None, "cONnection aLREady closed", "InternalError", "pymysql", True),
(None, "something broke", "InternalError", "pymysql", False),
(2006, "foo", "OperationalError", "mysqldb", True),
(2006, "foo", "OperationalError", "pymysql", True),
(2007, "foo", "OperationalError", "mysqldb", False),
(2007, "foo", "OperationalError", "pymysql", False),
)
def test_is_disconnect(
self, arg0, message, exc_cls_name, dialect_name, is_disconnect
):
class Error(Exception):
pass
dbapi = mock.Mock()
dbapi.Error = Error
dbapi.ProgrammingError = type("ProgrammingError", (Error,), {})
dbapi.OperationalError = type("OperationalError", (Error,), {})
dbapi.InterfaceError = type("InterfaceError", (Error,), {})
dbapi.InternalError = type("InternalError", (Error,), {})
dialect = getattr(mysql, dialect_name).dialect(dbapi=dbapi)
error = getattr(dbapi, exc_cls_name)(arg0, message)
eq_(dialect.is_disconnect(error, None, None), is_disconnect)
def test_ssl_arguments_mysqldb(self):
from sqlalchemy.dialects.mysql import mysqldb
dialect = mysqldb.dialect()
self._test_ssl_arguments(dialect)
def test_ssl_arguments_oursql(self):
from sqlalchemy.dialects.mysql import oursql
dialect = oursql.dialect()
self._test_ssl_arguments(dialect)
def _test_ssl_arguments(self, dialect):
kwarg = dialect.create_connect_args(
make_url(
"mysql://scott:tiger@localhost:3306/test"
"?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem"
)
)[1]
# args that differ among mysqldb and oursql
for k in ("use_unicode", "found_rows", "client_flag"):
kwarg.pop(k, None)
eq_(
kwarg,
{
"passwd": "tiger",
"db": "test",
"ssl": {
"ca": "/ca.pem",
"cert": "/cert.pem",
"key": "/key.pem",
},
"host": "localhost",
"user": "scott",
"port": 3306,
},
)
@testing.combinations(
("compress", True),
("connect_timeout", 30),
("read_timeout", 30),
("write_timeout", 30),
("client_flag", 1234),
("local_infile", 1234),
("use_unicode", False),
("charset", "hello"),
)
def test_normal_arguments_mysqldb(self, kwarg, value):
from sqlalchemy.dialects.mysql import mysqldb
dialect = mysqldb.dialect()
connect_args = dialect.create_connect_args(
make_url(
"mysql://scott:tiger@localhost:3306/test"
"?%s=%s" % (kwarg, value)
)
)
eq_(connect_args[1][kwarg], value)
def test_mysqlconnector_buffered_arg(self):
from sqlalchemy.dialects.mysql import mysqlconnector
dialect = mysqlconnector.dialect()
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db?buffered=true")
)[1]
eq_(kw["buffered"], True)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db?buffered=false")
)[1]
eq_(kw["buffered"], False)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db")
)[1]
eq_(kw["buffered"], True)
def test_mysqlconnector_raise_on_warnings_arg(self):
from sqlalchemy.dialects.mysql import mysqlconnector
dialect = mysqlconnector.dialect()
kw = dialect.create_connect_args(
make_url(
"mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true"
)
)[1]
eq_(kw["raise_on_warnings"], True)
kw = dialect.create_connect_args(
make_url(
"mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false"
)
)[1]
eq_(kw["raise_on_warnings"], False)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db")
)[1]
assert "raise_on_warnings" not in kw
@testing.only_on("mysql")
def test_random_arg(self):
dialect = testing.db.dialect
kw = dialect.create_connect_args(
make_url("mysql://u:p@host/db?foo=true")
)[1]
eq_(kw["foo"], "true")
@testing.only_on("mysql")
@testing.skip_if("mysql+mysqlconnector", "totally broken for the moment")
@testing.fails_on("mysql+oursql", "unsupported")
def test_special_encodings(self):
for enc in ["utf8mb4", "utf8"]:
eng = engines.testing_engine(
options={"connect_args": {"charset": enc, "use_unicode": 0}}
)
conn = eng.connect()
eq_(conn.dialect._connection_charset, enc)
def test_no_show_variables(self):
from sqlalchemy.testing import mock
engine = engines.testing_engine()
def my_execute(self, statement, *args, **kw):
if statement.startswith("SHOW VARIABLES"):
statement = "SELECT 1 FROM DUAL WHERE 1=0"
return real_exec(self, statement, *args, **kw)
real_exec = engine._connection_cls.exec_driver_sql
with mock.patch.object(
engine._connection_cls, "exec_driver_sql", my_execute
):
with expect_warnings(
"Could not retrieve SQL_MODE; please ensure the "
"MySQL user has permissions to SHOW VARIABLES"
):
engine.connect()
def test_no_default_isolation_level(self):
from sqlalchemy.testing import mock
engine = engines.testing_engine()
real_isolation_level = testing.db.dialect.get_isolation_level
def fake_isolation_level(connection):
connection = mock.Mock(
cursor=mock.Mock(
return_value=mock.Mock(
fetchone=mock.Mock(return_value=None)
)
)
)
return real_isolation_level(connection)
with mock.patch.object(
engine.dialect, "get_isolation_level", fake_isolation_level
):
with expect_warnings(
"Could not retrieve transaction isolation level for MySQL "
"connection."
):
engine.connect()
def test_autocommit_isolation_level(self):
c = testing.db.connect().execution_options(
isolation_level="AUTOCOMMIT"
)
assert c.exec_driver_sql("SELECT @@autocommit;").scalar()
c = c.execution_options(isolation_level="READ COMMITTED")
assert not c.exec_driver_sql("SELECT @@autocommit;").scalar()
def test_isolation_level(self):
values = [
"READ UNCOMMITTED",
"READ COMMITTED",
"REPEATABLE READ",
"SERIALIZABLE",
]
for value in values:
c = testing.db.connect().execution_options(isolation_level=value)
eq_(testing.db.dialect.get_isolation_level(c.connection), value)
class ParseVersionTest(fixtures.TestBase):
@testing.combinations(
((10, 2, 7), "10.2.7-MariaDB", (10, 2, 7, "MariaDB"), True),
(
(10, 2, 7),
"5.6.15.10.2.7-MariaDB",
(5, 6, 15, 10, 2, 7, "MariaDB"),
True,
),
((10, 2, 10), "10.2.10-MariaDB", (10, 2, 10, "MariaDB"), True),
((5, 7, 20), "5.7.20", (5, 7, 20), False),
((5, 6, 15), "5.6.15", (5, 6, 15), False),
(
(10, 2, 6),
"10.2.6.MariaDB.10.2.6+maria~stretch-log",
(10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log"),
True,
),
(
(10, 1, 9),
"10.1.9-MariaDBV1.0R050D002-20170809-1522",
(10, 1, 9, "MariaDB", "V1", "0R050D002", 20170809, 1522),
True,
),
)
def test_mariadb_normalized_version(
self, expected, raw_version, version, is_mariadb
):
dialect = mysql.dialect()
eq_(dialect._parse_server_version(raw_version), version)
dialect.server_version_info = version
eq_(dialect._mariadb_normalized_version_info, expected)
assert dialect._is_mariadb is is_mariadb
@testing.combinations(
(True, (10, 2, 7, "MariaDB")),
(True, (5, 6, 15, 10, 2, 7, "MariaDB")),
(False, (10, 2, 10, "MariaDB")),
(False, (5, 7, 20)),
(False, (5, 6, 15)),
(True, (10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log")),
)
def test_mariadb_check_warning(self, expect_, version):
dialect = mysql.dialect()
dialect.server_version_info = version
if expect_:
with expect_warnings(
".*before 10.2.9 has known issues regarding "
"CHECK constraints"
):
dialect._warn_for_known_db_issues()
else:
dialect._warn_for_known_db_issues()
class RemoveUTCTimestampTest(fixtures.TablesTest):
"""This test exists because we removed the MySQL dialect's
override of the UTC_TIMESTAMP() function, where the commit message
for this feature stated that "it caused problems with executemany()".
Since no example was provided, we are trying lots of combinations
here.
[ticket:3966]
"""
__only_on__ = "mysql"
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table(
"t",
metadata,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("data", DateTime),
)
Table(
"t_default",
metadata,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("idata", DateTime, default=func.utc_timestamp()),
Column("udata", DateTime, onupdate=func.utc_timestamp()),
)
def test_insert_executemany(self):
with testing.db.connect() as conn:
conn.execute(
self.tables.t.insert().values(data=func.utc_timestamp()),
[{"x": 5}, {"x": 6}, {"x": 7}],
)
def test_update_executemany(self):
with testing.db.connect() as conn:
timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2)
conn.execute(
self.tables.t.insert(),
[
{"x": 5, "data": timestamp},
{"x": 6, "data": timestamp},
{"x": 7, "data": timestamp},
],
)
conn.execute(
self.tables.t.update()
.values(data=func.utc_timestamp())
.where(self.tables.t.c.x == bindparam("xval")),
[{"xval": 5}, {"xval": 6}, {"xval": 7}],
)
def test_insert_executemany_w_default(self):
with testing.db.connect() as conn:
conn.execute(
self.tables.t_default.insert(), [{"x": 5}, {"x": 6}, {"x": 7}]
)
def test_update_executemany_w_default(self):
with testing.db.connect() as conn:
timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2)
conn.execute(
self.tables.t_default.insert(),
[
{"x": 5, "idata": timestamp},
{"x": 6, "idata": timestamp},
{"x": 7, "idata": timestamp},
],
)
conn.execute(
self.tables.t_default.update()
.values(idata=func.utc_timestamp())
.where(self.tables.t_default.c.x == bindparam("xval")),
[{"xval": 5}, {"xval": 6}, {"xval": 7}],
)
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = "mysql"
__backend__ = True
def _options(self, modes):
def connect(con, record):
cursor = con.cursor()
cursor.execute("set sql_mode='%s'" % (",".join(modes)))
e = engines.testing_engine(
options={
"pool_events": [
(connect, "first_connect"),
(connect, "connect"),
]
}
)
return e
def test_backslash_escapes(self):
engine = self._options(["NO_BACKSLASH_ESCAPES"])
c = engine.connect()
assert not engine.dialect._backslash_escapes
c.close()
engine.dispose()
engine = self._options([])
c = engine.connect()
assert engine.dialect._backslash_escapes
c.close()
engine.dispose()
def test_ansi_quotes(self):
engine = self._options(["ANSI_QUOTES"])
c = engine.connect()
assert engine.dialect._server_ansiquotes
c.close()
engine.dispose()
def test_combination(self):
engine = self._options(["ANSI_QUOTES,NO_BACKSLASH_ESCAPES"])
c = engine.connect()
assert engine.dialect._server_ansiquotes
assert not engine.dialect._backslash_escapes
c.close()
engine.dispose()
class ExecutionTest(fixtures.TestBase):
"""Various MySQL execution special cases."""
__only_on__ = "mysql"
__backend__ = True
def test_charset_caching(self):
engine = engines.testing_engine()
cx = engine.connect()
meta = MetaData()
charset = engine.dialect._detect_charset(cx)
meta.reflect(cx)
eq_(cx.dialect._connection_charset, charset)
cx.close()
def test_sysdate(self):
d = testing.db.scalar(func.sysdate())
assert isinstance(d, datetime.datetime)
class AutocommitTextTest(test_execute.AutocommitTextTest):
__only_on__ = "mysql"
def test_load_data(self):
self._test_keyword("LOAD DATA STUFF")
def test_replace(self):
self._test_keyword("REPLACE THING")
| graingert/sqlalchemy | test/dialect/mysql/test_dialect.py | Python | mit | 14,932 |
from accounts.models import Practice
def create_practice(request, strategy, backend, uid, response={}, details={}, user=None, social=None, *args, **kwargs):
"""
if user has a practice skip else create new practice
"""
practice, created = Practice.objects.update_or_create(user=user)
return None
| TimothyBest/Appointment_Booking_drchrono | appointment_booking_drchrono/accounts/pipeline.py | Python | mit | 321 |
from flask import Flask
from flask import render_template, request
app = Flask(__name__)
@app.route("/")
def main():
room = request.args.get('room', '')
if room:
return render_template('watch.html')
return render_template('index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| victorpoluceno/webrtc-sample-client | app/__init__.py | Python | mit | 314 |
from nose.tools import with_setup
import os
import hk_glazer as js2deg
import subprocess
import json
class TestClass:
@classmethod
def setup_class(cls):
cls.here = os.path.dirname(__file__)
cls.data = cls.here + '/data'
def test_1(self):
'''Test 1: Check that json_to_degree works when imported'''
with open(self.data + "/json_test_in.json") as config_file:
config_dict = json.load(config_file)
gen_str = js2deg.dict_to_dat(config_dict)
with open(self.data + "/json_test_out.txt") as verif_file:
test_str = verif_file.read()
assert(test_str == gen_str)
pass
def test_2(self):
'''Test 2: Check command line execution when saving to file'''
cmd = os.path.abspath(self.here + '/../../bin/hk_glazer')
print(cmd)
subprocess.check_call([cmd, "js2degree", self.data + "/json_test_in.json", "-o=test2.txt", "-s"])
with open("test2.txt") as file:
gen_str = file.read()
with open(self.data + "/json_test_out.txt") as file:
test_str = file.read()
assert(test_str == gen_str)
os.remove("test2.txt")
pass
def test_3(self):
'''Test 3: Command line execution when outfile already exists'''
cmd = os.path.abspath(self.here + '/../../bin/hk_glazer')
subprocess.check_call([cmd, "js2degree", self.data + "/json_test_in.json", "-o=test3.txt", "-s"])
try:
subprocess.check_call([cmd,"js2degree", self.data + "/json_test_in.json", "-o=test3.txt"])
except Exception as e:
#print(type(e))
assert(type(e) == subprocess.CalledProcessError)
pass
else:
assert(False)
finally:
os.remove("test3.txt")
| fmuzf/python_hk_glazer | hk_glazer/test/test.py | Python | mit | 1,705 |
from django.conf.urls import url
from . import views
from django.views.decorators.cache import cache_page
app_name = 'webinter'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^logout/$', views.logout_view, name='logout'),
]
| ipriver/0x71aBot-Web-Interface | webinter/urls.py | Python | mit | 262 |
# -*- coding: utf-8 -*-
"""
Money doctests as unittest Suite
"""
# RADAR: Python2
from __future__ import absolute_import
import doctest
import unittest
# RADAR: Python2
import money.six
FILES = (
'../../README.rst',
)
def load_tests(loader, tests, pattern):
# RADAR Python 2.x
if money.six.PY2:
# Doc tests are Python 3.x
return unittest.TestSuite()
return doctest.DocFileSuite(*FILES)
| carlospalol/money | money/tests/test_docs.py | Python | mit | 424 |
# encoding: UTF-8
"""
一个ATR-RSI指标结合的交易策略,适合用在股指的1分钟和5分钟线上。
注意事项:
1. 作者不对交易盈利做任何保证,策略代码仅供参考
2. 本策略需要用到talib,没有安装的用户请先参考www.vnpy.org上的教程安装
3. 将IF0000_1min.csv用ctaHistoryData.py导入MongoDB后,直接运行本文件即可回测策略
"""
from ctaBase import *
from ctaTemplate import CtaTemplate
import talib
import numpy as np
########################################################################
class AtrRsiStrategy(CtaTemplate):
"""结合ATR和RSI指标的一个分钟线交易策略"""
className = 'AtrRsiStrategy'
author = u'用Python的交易员'
# 策略参数
atrLength = 22 # 计算ATR指标的窗口数
atrMaLength = 10 # 计算ATR均线的窗口数
rsiLength = 5 # 计算RSI的窗口数
rsiEntry = 16 # RSI的开仓信号
trailingPercent = 0.8 # 百分比移动止损
initDays = 10 # 初始化数据所用的天数
# 策略变量
bar = None # K线对象
barMinute = EMPTY_STRING # K线当前的分钟
bufferSize = 100 # 需要缓存的数据的大小
bufferCount = 0 # 目前已经缓存了的数据的计数
highArray = np.zeros(bufferSize) # K线最高价的数组
lowArray = np.zeros(bufferSize) # K线最低价的数组
closeArray = np.zeros(bufferSize) # K线收盘价的数组
atrCount = 0 # 目前已经缓存了的ATR的计数
atrArray = np.zeros(bufferSize) # ATR指标的数组
atrValue = 0 # 最新的ATR指标数值
atrMa = 0 # ATR移动平均的数值
rsiValue = 0 # RSI指标的数值
rsiBuy = 0 # RSI买开阈值
rsiSell = 0 # RSI卖开阈值
intraTradeHigh = 0 # 移动止损用的持仓期内最高价
intraTradeLow = 0 # 移动止损用的持仓期内最低价
orderList = [] # 保存委托代码的列表
# 参数列表,保存了参数的名称
paramList = ['name',
'className',
'author',
'vtSymbol',
'atrLength',
'atrMaLength',
'rsiLength',
'rsiEntry',
'trailingPercent']
# 变量列表,保存了变量的名称
varList = ['inited',
'trading',
'pos',
'atrValue',
'atrMa',
'rsiValue',
'rsiBuy',
'rsiSell']
#----------------------------------------------------------------------
def __init__(self, ctaEngine, setting):
"""Constructor"""
super(AtrRsiStrategy, self).__init__(ctaEngine, setting)
#----------------------------------------------------------------------
def onInit(self):
"""初始化策略(必须由用户继承实现)"""
self.writeCtaLog(u'%s策略初始化' %self.name)
# 初始化RSI入场阈值
self.rsiBuy = 50 + self.rsiEntry
self.rsiSell = 50 - self.rsiEntry
# 载入历史数据,并采用回放计算的方式初始化策略数值
initData = self.loadBar(self.initDays)
for bar in initData:
self.onBar(bar)
self.putEvent()
#----------------------------------------------------------------------
def onStart(self):
"""启动策略(必须由用户继承实现)"""
self.writeCtaLog(u'%s策略启动' %self.name)
self.putEvent()
#----------------------------------------------------------------------
def onStop(self):
"""停止策略(必须由用户继承实现)"""
self.writeCtaLog(u'%s策略停止' %self.name)
self.putEvent()
#----------------------------------------------------------------------
def onTick(self, tick):
"""收到行情TICK推送(必须由用户继承实现)"""
# 计算K线
tickMinute = tick.datetime.minute
if tickMinute != self.barMinute:
if self.bar:
self.onBar(self.bar)
bar = CtaBarData()
bar.vtSymbol = tick.vtSymbol
bar.symbol = tick.symbol
bar.exchange = tick.exchange
bar.open = tick.lastPrice
bar.high = tick.lastPrice
bar.low = tick.lastPrice
bar.close = tick.lastPrice
bar.date = tick.date
bar.time = tick.time
bar.datetime = tick.datetime # K线的时间设为第一个Tick的时间
self.bar = bar # 这种写法为了减少一层访问,加快速度
self.barMinute = tickMinute # 更新当前的分钟
else: # 否则继续累加新的K线
bar = self.bar # 写法同样为了加快速度
bar.high = max(bar.high, tick.lastPrice)
bar.low = min(bar.low, tick.lastPrice)
bar.close = tick.lastPrice
#----------------------------------------------------------------------
def onBar(self, bar):
"""收到Bar推送(必须由用户继承实现)"""
# 撤销之前发出的尚未成交的委托(包括限价单和停止单)
for orderID in self.orderList:
self.cancelOrder(orderID)
self.orderList = []
# 保存K线数据
self.closeArray[0:self.bufferSize-1] = self.closeArray[1:self.bufferSize]
self.highArray[0:self.bufferSize-1] = self.highArray[1:self.bufferSize]
self.lowArray[0:self.bufferSize-1] = self.lowArray[1:self.bufferSize]
self.closeArray[-1] = bar.close
self.highArray[-1] = bar.high
self.lowArray[-1] = bar.low
self.bufferCount += 1
if self.bufferCount < self.bufferSize:
return
# 计算指标数值
self.atrValue = talib.ATR(self.highArray,
self.lowArray,
self.closeArray,
self.atrLength)[-1]
self.atrArray[0:self.bufferSize-1] = self.atrArray[1:self.bufferSize]
self.atrArray[-1] = self.atrValue
self.atrCount += 1
if self.atrCount < self.bufferSize:
return
self.atrMa = talib.MA(self.atrArray,
self.atrMaLength)[-1]
self.rsiValue = talib.RSI(self.closeArray,
self.rsiLength)[-1]
# 判断是否要进行交易
# 当前无仓位
if self.pos == 0:
self.intraTradeHigh = bar.high
self.intraTradeLow = bar.low
# ATR数值上穿其移动平均线,说明行情短期内波动加大
# 即处于趋势的概率较大,适合CTA开仓
if self.atrValue > self.atrMa:
# 使用RSI指标的趋势行情时,会在超买超卖区钝化特征,作为开仓信号
if self.rsiValue > self.rsiBuy:
# 这里为了保证成交,选择超价5个整指数点下单
self.buy(bar.close+5, 1)
return
if self.rsiValue < self.rsiSell:
self.short(bar.close-5, 1)
return
# 持有多头仓位
if self.pos == 1:
# 计算多头持有期内的最高价,以及重置最低价
self.intraTradeHigh = max(self.intraTradeHigh, bar.high)
self.intraTradeLow = bar.low
# 计算多头移动止损
longStop = self.intraTradeHigh * (1-self.trailingPercent/100)
# 发出本地止损委托,并且把委托号记录下来,用于后续撤单
orderID = self.sell(longStop, 1, stop=True)
self.orderList.append(orderID)
return
# 持有空头仓位
if self.pos == -1:
self.intraTradeLow = min(self.intraTradeLow, bar.low)
self.intraTradeHigh = bar.high
shortStop = self.intraTradeLow * (1+self.trailingPercent/100)
orderID = self.cover(shortStop, 1, stop=True)
self.orderList.append(orderID)
return
# 发出状态更新事件
self.putEvent()
#----------------------------------------------------------------------
def onOrder(self, order):
"""收到委托变化推送(必须由用户继承实现)"""
pass
#----------------------------------------------------------------------
def onTrade(self, trade):
pass
if __name__ == '__main__':
# 提供直接双击回测的功能
# 导入PyQt4的包是为了保证matplotlib使用PyQt4而不是PySide,防止初始化出错
from ctaBacktesting import *
from PyQt4 import QtCore, QtGui
# 创建回测引擎
engine = BacktestingEngine()
# 设置引擎的回测模式为K线
engine.setBacktestingMode(engine.BAR_MODE)
# 设置回测用的数据起始日期
engine.setStartDate('20120101')
# 载入历史数据到引擎中
engine.loadHistoryData(MINUTE_DB_NAME, 'IF0000')
# 设置产品相关参数
engine.setSlippage(0.2) # 股指1跳
engine.setRate(0.3/10000) # 万0.3
engine.setSize(300) # 股指合约大小
# 在引擎中创建策略对象
engine.initStrategy(AtrRsiStrategy, {})
# 开始跑回测
engine.runBacktesting()
# 显示回测结果
engine.showBacktestingResult()
| lukesummer/vnpy | vn.trader/ctaAlgo/strategyAtrRsi.py | Python | mit | 9,927 |
import os
import yaml
MONGO_USERNAME = os.getenv('MONGO_USERNAME', None)
MONGO_PASSWORD = os.getenv('MONGO_PASSWORD', None)
MONGODB_HOST = os.getenv('MONGODB_HOST', '127.0.0.1')
MONGODB_PORT = int(os.getenv('MONGODB_PORT', '27017'))
MONGODB_SERVERS = os.getenv('MONGODB_SERVERS') \
or '{}:{}'.format(MONGODB_HOST, MONGODB_PORT)
MONGODB_DEFAULT_URL = 'mongodb://{}'.format(MONGODB_SERVERS)
MONGO_URL = os.getenv('MONGO_URL') or MONGODB_DEFAULT_URL
MONGO_INCLUDES = os.getenv('MONGO_INCLUDES', '')
ES_URL = os.getenv('ES_URL', 'http://localhost:9200')
ES_INDEXES = yaml.load(os.getenv('ES_INDEXES') or '{}')
ES_TIMEOUT_SECONDS = int(os.getenv('ES_TIMEOUT_SECONDS', '100'))
LOG_VERBOSITY = int(os.getenv('LOG_VERBOSITY', 2))
MONGO_CONNECTOR_CONFIG = 'mongo-connector.json'
DEFAULTS = {
'es': {
'url': ES_URL,
'indexes': ES_INDEXES
},
'mongo-connector': {
'mainAddress': MONGO_URL,
'authentication': {
'adminUsername': MONGO_USERNAME,
'password': MONGO_PASSWORD
},
'namespaces': {
'include': MONGO_INCLUDES.split(','),
},
'timezoneAware': True,
'docManagers': [
{
'docManager': 'elastic_doc_manager',
'targetURL': ES_URL,
"args": {
"clientOptions": {
"timeout": ES_TIMEOUT_SECONDS
}
}
}
],
'logging': {
'type': 'stream'
},
'verbosity': LOG_VERBOSITY,
'continueOnError': True
},
}
CONFIG_LOCATION = os.getenv('CONFIG_LOCATION')
| ymind/docker-mongo-es | conf/appconfig.py | Python | mit | 1,677 |
# coding: UTF-8
import unittest
import play_file
class TestAssemblyReader(unittest.TestCase):
def test_version_reader(self):
assembly_reader = play_file.AssemblyReader()
version = assembly_reader.get_assembly_version('AssemblyInfo.cs')
self.assertEqual(version, '7.3.1.0210')
def test_version_writer(self):
new_version = '7.3.1.0228'
assembly_writer = play_file.AssemblyWriter()
version = assembly_writer.update_assembly_version('AssemblyInfo.cs', new_version)
self.assertEqual(version, new_version)
| biztudio/JustPython | syntaxlab/src/test_play_file.py | Python | mit | 570 |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 14 02:17:11 2017
@author: guida
"""
import json
import requests
def get_url(url):
response = requests.get(url)
content = response.content.decode("utf8")
return content
#Json parser
def get_json_from_url(url):
content = get_url(url)
js = json.loads(content)
return js | DiegoGuidaF/telegram-raspy | modules.py | Python | mit | 358 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2014 Alan Aguiar alanjas@hotmail.com
# Copyright (c) 2012-2014 Butiá Team butia@fing.edu.uy
# Butia is a free and open robotic platform
# www.fing.edu.uy/inco/proyectos/butia
# Facultad de Ingeniería - Universidad de la República - Uruguay
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
# Make sure that can import all files
abs_path = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, abs_path)
# version = 0.turtlebots_version.secondary_number
__version__ = '0.27.0'
| nvazquez/Turtlebots | plugins/butia/pybot/__init__.py | Python | mit | 1,171 |
import json
import os
from pokemongo_bot import inventory
from pokemongo_bot.base_dir import _base_dir
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot.human_behaviour import action_delay
from pokemongo_bot.services.item_recycle_worker import ItemRecycler
from pokemongo_bot.tree_config_builder import ConfigException
from pokemongo_bot.worker_result import WorkerResult
DEFAULT_MIN_EMPTY_SPACE = 6
class RecycleItems(BaseTask):
"""
Recycle undesired items if there is less than five space in inventory.
You can use either item's name or id. For the full list of items see ../../data/items.json
It's highly recommended to put this task before move_to_fort and spin_fort task in the config file so you'll most likely be able to loot.
Example config :
{
"type": "RecycleItems",
"config": {
"min_empty_space": 6, # 6 by default
"item_filter": {
"Pokeball": {"keep": 20},
"Greatball": {"keep": 50},
"Ultraball": {"keep": 100},
"Potion": {"keep": 0},
"Super Potion": {"keep": 0},
"Hyper Potion": {"keep": 20},
"Max Potion": {"keep": 50},
"Revive": {"keep": 0},
"Max Revive": {"keep": 20},
"Razz Berry": {"keep": 20}
}
}
}
"""
SUPPORTED_TASK_API_VERSION = 1
def initialize(self):
self.items_filter = self.config.get('item_filter', {})
self.min_empty_space = self.config.get('min_empty_space', None)
self._validate_item_filter()
def _validate_item_filter(self):
"""
Validate user's item filter config
:return: Nothing.
:rtype: None
:raise: ConfigException: When an item doesn't exist in ../../data/items.json
"""
item_list = json.load(open(os.path.join(_base_dir, 'data', 'items.json')))
for config_item_name, bag_count in self.items_filter.iteritems():
if config_item_name not in item_list.viewvalues():
if config_item_name not in item_list:
raise ConfigException(
"item {} does not exist, spelling mistake? (check for valid item names in data/items.json)".format(
config_item_name))
def should_run(self):
"""
Returns a value indicating whether the recycling process should be run.
:return: True if the recycling process should be run; otherwise, False.
:rtype: bool
"""
if inventory.Items.get_space_left() < (DEFAULT_MIN_EMPTY_SPACE if self.min_empty_space is None else self.min_empty_space):
return True
return False
def work(self):
"""
Start the process of recycling items if necessary.
:return: Returns whether or not the task went well
:rtype: WorkerResult
"""
# TODO: Use new inventory everywhere and then remove this inventory update
inventory.refresh_inventory()
worker_result = WorkerResult.SUCCESS
if self.should_run():
for item_in_inventory in inventory.items().all():
if self.item_should_be_recycled(item_in_inventory):
# Make the bot appears more human
action_delay(self.bot.config.action_wait_min, self.bot.config.action_wait_max)
# If at any recycling process call we got an error, we consider that the result of this task is error too.
if ItemRecycler(self.bot, item_in_inventory, self.get_amount_to_recycle(item_in_inventory)).work() == WorkerResult.ERROR:
worker_result = WorkerResult.ERROR
return worker_result
def item_should_be_recycled(self, item):
"""
Returns a value indicating whether the item should be recycled.
:param item: The Item to test
:return: True if the title should be recycled; otherwise, False.
:rtype: bool
"""
return (item.name in self.items_filter or str(item.id) in self.items_filter) and self.get_amount_to_recycle(item) > 0
def get_amount_to_recycle(self, item):
"""
Determine the amount to recycle accordingly to user config
:param item: Item to determine the amount to recycle.
:return: The amount to recycle
:rtype: int
"""
amount_to_keep = self.get_amount_to_keep(item)
return 0 if amount_to_keep is None else item.count - amount_to_keep
def get_amount_to_keep(self, item):
"""
Determine item's amount to keep in inventory.
:param item:
:return: Item's amount to keep in inventory.
:rtype: int
"""
item_filter_config = self.items_filter.get(item.name, 0)
if item_filter_config is not 0:
return item_filter_config.get('keep', 20)
else:
item_filter_config = self.items_filter.get(str(item.id), 0)
if item_filter_config is not 0:
return item_filter_config.get('keep', 20)
| dmateusp/PokemonGo-Bot | pokemongo_bot/cell_workers/recycle_items.py | Python | mit | 5,073 |
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
node = parent = None
def deleteNode(self, root: TreeNode, key: int) -> TreeNode:
# search for the node and its parent
self.findNodeAndParent(root, key)
if self.node == root and not root.left and not root.right:
return None
if self.node:
self.deleteNodeHelper(self.node, self.parent)
return root
def deleteNodeHelper(self, node, parent):
# if node is a leaf
if not node.left and not node.right:
if parent:
if parent.left == node:
parent.left = None
else:
parent.right = None
return
# if node has only one child
if not node.left or not node.right:
child = node.left if not node.right else node.right
node.val = child.val
node.left = child.left
node.right = child.right
return
# node has two children
successor, succesorParent = self.getNodeSuccessor(node)
node.val = successor.val
self.deleteNodeHelper(successor, succesorParent)
def getNodeSuccessor(self, node):
succesorParent = node
successor = node.right
while successor.left:
succesorParent = successor
successor = successor.left
return successor, succesorParent
def findNodeAndParent(self, root, key):
if not root:
return
if root.val == key:
self.node = root
return
self.parent = root
if key < root.val:
self.findNodeAndParent(root.left, key)
else:
self.findNodeAndParent(root.right, key)
root = TreeNode(10)
root.left = TreeNode(3)
root.left.left = TreeNode(2)
root.left.right = TreeNode(8)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(9)
root.right = TreeNode(15)
root.right.left = TreeNode(13)
root.right.right = TreeNode(17)
root.right.right.right = TreeNode(19)
ob = Solution()
root = TreeNode(50)
root = ob.deleteNode(root, 50)
print(root)
| shobhitmishra/CodingProblems | LeetCode/Session3/DeleteNodeBST.py | Python | mit | 2,274 |
# coding=utf-8
"""
Collects all number values from the db.serverStatus() command, other
values are ignored.
#### Dependencies
* pymongo
"""
import diamond.collector
from diamond.collector import str_to_bool
import re
import zlib
try:
import pymongo
pymongo # workaround for pyflakes issue #13
except ImportError:
pymongo = None
try:
from pymongo import ReadPreference
ReadPreference # workaround for pyflakes issue #13
except ImportError:
ReadPreference = None
class MongoDBCollector(diamond.collector.Collector):
MAX_CRC32 = 4294967295
def __init__(self, *args, **kwargs):
self.__totals = {}
super(MongoDBCollector, self).__init__(*args, **kwargs)
def get_default_config_help(self):
config_help = super(MongoDBCollector, self).get_default_config_help()
config_help.update({
'hosts': 'Array of hostname(:port) elements to get metrics from'
'Set an alias by prefixing host:port with alias@',
'host': 'A single hostname(:port) to get metrics from'
' (can be used instead of hosts and overrides it)',
'user': 'Username for authenticated login (optional)',
'passwd': 'Password for authenticated login (optional)',
'databases': 'A regex of which databases to gather metrics for.'
' Defaults to all databases.',
'ignore_collections': 'A regex of which collections to ignore.'
' MapReduce temporary collections (tmp.mr.*)'
' are ignored by default.',
'collection_sample_rate': 'Only send stats for a consistent subset '
'of collections. This is applied after collections are ignored via'
' ignore_collections Sampling uses crc32 so it is consistent across'
' replicas. Value between 0 and 1. Default is 1',
'network_timeout': 'Timeout for mongodb connection (in seconds).'
' There is no timeout by default.',
'simple': 'Only collect the same metrics as mongostat.',
'translate_collections': 'Translate dot (.) to underscores (_)'
' in collection names.',
'ssl': 'True to enable SSL connections to the MongoDB server.'
' Default is False'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MongoDBCollector, self).get_default_config()
config.update({
'path': 'mongo',
'hosts': ['localhost'],
'user': None,
'passwd': None,
'databases': '.*',
'ignore_collections': '^tmp\.mr\.',
'network_timeout': None,
'simple': 'False',
'translate_collections': 'False',
'collection_sample_rate': 1,
'ssl': False
})
return config
def collect(self):
"""Collect number values from db.serverStatus()"""
if pymongo is None:
self.log.error('Unable to import pymongo')
return
# we need this for backwards compatibility
if 'host' in self.config:
self.config['hosts'] = [self.config['host']]
# convert network_timeout to integer
if self.config['network_timeout']:
self.config['network_timeout'] = int(
self.config['network_timeout'])
# convert collection_sample_rate to float
if self.config['collection_sample_rate']:
self.config['collection_sample_rate'] = float(
self.config['collection_sample_rate'])
# use auth if given
if 'user' in self.config:
user = self.config['user']
else:
user = None
if 'passwd' in self.config:
passwd = self.config['passwd']
else:
passwd = None
for host in self.config['hosts']:
if len(self.config['hosts']) == 1:
# one host only, no need to have a prefix
base_prefix = []
else:
matches = re.search('((.+)\@)?(.+)?', host)
alias = matches.group(2)
host = matches.group(3)
if alias is None:
base_prefix = [re.sub('[:\.]', '_', host)]
else:
base_prefix = [alias]
try:
# Ensure that the SSL option is a boolean.
if type(self.config['ssl']) is str:
self.config['ssl'] = str_to_bool(self.config['ssl'])
if ReadPreference is None:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
slave_okay=True
)
else:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
read_preference=ReadPreference.SECONDARY,
)
except Exception, e:
self.log.error('Couldnt connect to mongodb: %s', e)
continue
# try auth
if user:
try:
conn.admin.authenticate(user, passwd)
except Exception, e:
self.log.error('User auth given, but could not autheticate'
+ ' with host: %s, err: %s' % (host, e))
return{}
data = conn.db.command('serverStatus')
self._publish_transformed(data, base_prefix)
if str_to_bool(self.config['simple']):
data = self._extract_simple_data(data)
self._publish_dict_with_prefix(data, base_prefix)
db_name_filter = re.compile(self.config['databases'])
ignored_collections = re.compile(self.config['ignore_collections'])
sample_threshold = self.MAX_CRC32 * self.config[
'collection_sample_rate']
for db_name in conn.database_names():
if not db_name_filter.search(db_name):
continue
db_stats = conn[db_name].command('dbStats')
db_prefix = base_prefix + ['databases', db_name]
self._publish_dict_with_prefix(db_stats, db_prefix)
for collection_name in conn[db_name].collection_names():
if ignored_collections.search(collection_name):
continue
if (self.config['collection_sample_rate'] < 1 and (
zlib.crc32(collection_name) & 0xffffffff
) > sample_threshold):
continue
collection_stats = conn[db_name].command('collstats',
collection_name)
if str_to_bool(self.config['translate_collections']):
collection_name = collection_name.replace('.', '_')
collection_prefix = db_prefix + [collection_name]
self._publish_dict_with_prefix(collection_stats,
collection_prefix)
def _publish_transformed(self, data, base_prefix):
""" Publish values of type: counter or percent """
self._publish_dict_with_prefix(data.get('opcounters', {}),
base_prefix + ['opcounters_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('opcountersRepl', {}),
base_prefix + ['opcountersRepl_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['backgroundFlushing_per_sec'],
'flushes',
data.get('backgroundFlushing', {}),
self.publish_counter)
self._publish_dict_with_prefix(data.get('network', {}),
base_prefix + ['network_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['extra_info_per_sec'],
'page_faults',
data.get('extra_info', {}),
self.publish_counter)
def get_dotted_value(data, key_name):
key_name = key_name.split('.')
for i in key_name:
data = data.get(i, {})
if not data:
return 0
return data
def compute_interval(data, total_name):
current_total = get_dotted_value(data, total_name)
total_key = '.'.join(base_prefix + [total_name])
last_total = self.__totals.get(total_key, current_total)
interval = current_total - last_total
self.__totals[total_key] = current_total
return interval
def publish_percent(value_name, total_name, data):
value = float(get_dotted_value(data, value_name) * 100)
interval = compute_interval(data, total_name)
key = '.'.join(base_prefix + ['percent', value_name])
self.publish_counter(key, value, time_delta=bool(interval),
interval=interval)
publish_percent('globalLock.lockTime', 'globalLock.totalTime', data)
publish_percent('indexCounters.btree.misses',
'indexCounters.btree.accesses', data)
locks = data.get('locks')
if locks:
if '.' in locks:
locks['_global_'] = locks['.']
del (locks['.'])
key_prefix = '.'.join(base_prefix + ['percent'])
db_name_filter = re.compile(self.config['databases'])
interval = compute_interval(data, 'uptimeMillis')
for db_name in locks:
if not db_name_filter.search(db_name):
continue
r = get_dotted_value(
locks,
'%s.timeLockedMicros.r' % db_name)
R = get_dotted_value(
locks,
'.%s.timeLockedMicros.R' % db_name)
value = float(r + R) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.read' % db_name,
value, time_delta=bool(interval),
interval=interval)
w = get_dotted_value(
locks,
'%s.timeLockedMicros.w' % db_name)
W = get_dotted_value(
locks,
'%s.timeLockedMicros.W' % db_name)
value = float(w + W) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.write' % db_name,
value, time_delta=bool(interval), interval=interval)
def _publish_dict_with_prefix(self, dict, prefix, publishfn=None):
for key in dict:
self._publish_metrics(prefix, key, dict, publishfn)
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if not key in data:
return
value = data[key]
keys = prev_keys + [key]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(keys, new_key, value)
elif isinstance(value, int) or isinstance(value, float):
publishfn('.'.join(keys), value)
elif isinstance(value, long):
publishfn('.'.join(keys), float(value))
def _extract_simple_data(self, data):
return {
'connections': data.get('connections'),
'globalLock': data.get('globalLock'),
'indexCounters': data.get('indexCounters')
}
| metamx/Diamond | src/collectors/mongodb/mongodb.py | Python | mit | 12,462 |
# -*- coding: utf-8 -*-
# @Author: yancz1989
# @Date: 2017-01-17 23:43:18
# @Last Modified by: yancz1989
# @Last Modified time: 2017-02-22 20:33:29
import utilities as util
from utilities import parse_image_file, filterBoxes, voxel_2_world, mkdir
import numpy as np
import os
import json
import sys
from PIL import Image, ImageDraw
import SimpleITK as sitk
from env import *
def generate_scan_image(subset):
list_dirs = os.walk(TRUNK_DIR + subset)
jsobjs = []
output_dir = SAMPLE_DIR + subset
mkdir(output_dir)
for root, dirs, files in list_dirs:
for f in files:
if f.lower().endswith('mhd'):
key = os.path.splitext(f)[0]
numpyImage, numpyOrigin, numpySpacing = (
util.load_itk_image(
os.path.join(root, f)))
for z in range(numpyImage.shape[0]):
patch = numpyImage[z, 0:512, 0:512]
patch = util.normalizePlanes(patch)
im = Image.fromarray(patch * 255).convert('L')
output_filename = (
subset + "-" + key + "-" + str(z) + "-scan.bmp")
print(subset + '/' + output_filename)
im.save(os.path.join(
output_dir, output_filename))
jsobjs.append({
"image_path": subset + '/' + output_filename,
"rects":[]
}
)
with open(META_DIR + subset + '-scan.json', 'w') as f:
json.dump(jsobjs, f)
def get_image_map(data_root, input_file, threshold):
result_map = {}
with open(input_file) as f:
result_list = json.load(f)
for it in result_list:
key, subset, z = parse_image_file(it['file'])
src_file = os.path.join(
data_root, subset, key + ".mhd")
boxes = filterBoxes(it['box'], threshold)
if not result_map.get(src_file):
result_map[src_file] = []
result_map[src_file].append((key, z, boxes))
return result_map
def generate_result(result_map, output_file):
with open(output_file) as fout:
fout.write("seriesuid,coordX,coordY,coordZ,probability\n")
for fkey, val in result_map.items():
itkimage = sitk.ReadImage(fkey)
for it in val:
key, z, boxes = val
for box in boxes:
world_box = voxel_2_world(
[z, box[1], box[0]], itkimage)
csv_line = key + "," + str(world_box[2]) + "," + str(world_box[1]) + "," + str(world_box[0]) + "," + str(box[4])
fout.write(csv_line + "\n")
if __name__ == '__main__':
if sys.argv[1] == 'gen':
generate_scan_image(sys.argv[2])
else:
result_map = get_image_map(TRUNK_DIR, sys.argv[2], 0.01)
generate_result(result_map, OUTPUT_FILE) | yancz1989/cancer | scan.py | Python | mit | 2,610 |
from django.conf.urls import url
from audiotracks import feeds
from audiotracks import views
urlpatterns = [
url(r"^$", views.index, name="audiotracks"),
url(r"^(?P<page_number>\d+)/?$", views.index, name="audiotracks"),
url(r"^track/(?P<track_slug>.*)$", views.track_detail,
name="track_detail"),
url(r"^upload", views.upload_track, name="upload_track"),
url(r"^edit/(?P<track_id>.+)", views.edit_track, name="edit_track"),
url(r"^confirm_delete/(?P<track_id>\d+)$",
views.confirm_delete_track, name="confirm_delete_track"),
url(r"^delete$", views.delete_track, name="delete_track"),
url(r"^tracks$", views.user_index, name="user_index"),
url(r"^tracks/(?P<page_number>\d)/?$", views.user_index,
name="user_index"),
url(r"^feed/?$", feeds.choose_feed, name="tracks_feed"),
url(r"^player.js$", views.player_script, name="player_script"),
url(r"^m3u/?$", views.m3u, name="m3u"),
]
| amarandon/django-audiotracks | audiotracks/urls.py | Python | mit | 955 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import bottle
import datetime
import time
@bottle.get('/')
def index():
return bottle.static_file('index.html', root='.')
@bottle.get('/stream')
def stream():
bottle.response.content_type = 'text/event-stream'
bottle.response.cache_control = 'no-cache'
while True:
yield 'data: %s\n\n' % str(datetime.datetime.now())
time.sleep(5)
if __name__ == '__main__':
bottle.run(host='0.0.0.0', port=8080, debug=True)
| hustbeta/python-web-recipes | server-sent-events/bottle-sse.py | Python | mit | 517 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
destination.factory
'''
from destination.zeus import ZeusDestination
from destination.aws import AwsDestination
from exceptions import AutocertError
from config import CFG
from app import app
class DestinationFactoryError(AutocertError):
def __init__(self, destination):
msg = f'destination factory error with {destination}'
super(DestinationFactoryError, self).__init__(msg)
def create_destination(destination, ar, cfg, timeout, verbosity):
d = None
if destination == 'aws':
d = AwsDestination(ar, cfg, verbosity)
elif destination == 'zeus':
d = ZeusDestination(ar, cfg, verbosity)
else:
raise DestinationFactoryError(destination)
dests = list(CFG.destinations.zeus.keys())
if d.has_connectivity(timeout, dests):
return d
| mozilla-it/autocert | autocert/api/destination/factory.py | Python | mit | 854 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .entity_health import EntityHealth
class PartitionHealth(EntityHealth):
"""Information about the health of a Service Fabric partition.
:param aggregated_health_state: Possible values include: 'Invalid', 'Ok',
'Warning', 'Error', 'Unknown'
:type aggregated_health_state: str or :class:`enum
<azure.servicefabric.models.enum>`
:param health_events: The list of health events reported on the entity.
:type health_events: list of :class:`HealthEvent
<azure.servicefabric.models.HealthEvent>`
:param unhealthy_evaluations:
:type unhealthy_evaluations: list of :class:`HealthEvaluationWrapper
<azure.servicefabric.models.HealthEvaluationWrapper>`
:param health_statistics:
:type health_statistics: :class:`HealthStatistics
<azure.servicefabric.models.HealthStatistics>`
:param partition_id:
:type partition_id: str
:param replica_health_states: The list of replica health states associated
with the partition.
:type replica_health_states: list of :class:`ReplicaHealthState
<azure.servicefabric.models.ReplicaHealthState>`
"""
_attribute_map = {
'aggregated_health_state': {'key': 'AggregatedHealthState', 'type': 'str'},
'health_events': {'key': 'HealthEvents', 'type': '[HealthEvent]'},
'unhealthy_evaluations': {'key': 'UnhealthyEvaluations', 'type': '[HealthEvaluationWrapper]'},
'health_statistics': {'key': 'HealthStatistics', 'type': 'HealthStatistics'},
'partition_id': {'key': 'PartitionId', 'type': 'str'},
'replica_health_states': {'key': 'ReplicaHealthStates', 'type': '[ReplicaHealthState]'},
}
def __init__(self, aggregated_health_state=None, health_events=None, unhealthy_evaluations=None, health_statistics=None, partition_id=None, replica_health_states=None):
super(PartitionHealth, self).__init__(aggregated_health_state=aggregated_health_state, health_events=health_events, unhealthy_evaluations=unhealthy_evaluations, health_statistics=health_statistics)
self.partition_id = partition_id
self.replica_health_states = replica_health_states
| AutorestCI/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/partition_health.py | Python | mit | 2,612 |
import os,sys
from trans_rot_coords import *
import numpy as np
from read_energy_force_new import *
from grids_structures_general import DS,Grid_Quarts
from orient_struct_2 import OrientDS as OrientDS_2
from orient_struct_3 import OrientDS as OrientDS_3
AU2KCAL = 23.0605*27.2116
R2D = 180.0/3.14159265358979
## np.pi/4.0:
pi4 = 0.78539816339744817
tMass = [15.999, 1.008, 1.008]
def get_com(coords):
x = [0,0,0]
totalM = 0
for i in range(len(coords)):
x = [ x[k]+ coords[i][k]*tMass[i] for k in range(3)]
totalM += tMass[i]
x = [x[k]/totalM for k in range(3)]
return x
def norm_prob(config,ndx,prob='wtr'):
if prob=='wtr':
v1 = np.array(config[ndx[1]]) - np.array(config[ndx[0]])
v2 = np.array(config[ndx[2]]) - np.array(config[ndx[0]])
vec = get_normal_unit(v1,v2)
return vec
class new_atom():
def __init__(self, line, ftype='gjf'):
if ftype=='gjf': self.addgjf(line)
elif ftype=='gms': self.addinp(line)
elif ftype=='pdb': self.addpdb(line)
def addgjf(self, line):
line = line.split()
self.a_nam = line[0]
self.x = [float(line[1]), float(line[2]), float(line[3])]
def addpdb(self, line):
self.line = line
self.i_atm = int(line[6:11])
self.a_nam = line[11:16].strip()
self.a_res = line[16:20].strip()
self.a_chn = line[20:22].strip()
self.i_res = int(line[22:26])
self.x = []
self.x.append(float(line[30:38]))
self.x.append(float(line[38:46]))
self.x.append(float(line[46:54]))
def addinp(self, line):
line = line.split()
self.a_nam = line[0]
self.x = [float(line[2]), float(line[3]), float(line[4])]
class coordinates():
def __init__(self, n1, n2, FragType, name=''):
## n1,n2 is the number of atoms in mole1 and mole2:
self.n1 = n1
self.n2 = n2
## records of operations of translation and rotation:
self.OperateNdx = []
self.Operation = []
## fragment type:
self.FT = FragType
## symmetry faces:
self.symface = DS[self.FT].symface
self.IsOriented = False
self.facendx = {'yx':2, 'xy':2,
'yz':0, 'zy':0,
'zx':1, 'xz':1,
'zarg':5,
'zben':6}
self.symm = [1,1,1]
self.center = 0
self.natoms = 0
self.original_atoms = []
self.name = name
def addatom(self, line, ftype='pdb'):
temp = new_atom(line, ftype)
self.original_atoms.append(temp)
self.natoms += 1
def addpdbatom(self, line):
self.original_atoms.append(new_atom(line, 'pdb'))
self.natoms += 1
def set_atom(self, i, atom):
if i>=len(self.original_atoms):
self.original_atoms.append( deepcopy(atom) )
self.natoms += 1
else: self.original_atoms[i] = deepcopy(atom)
def MirrorAll(self):
"""
According to the coords of the 1st atom in mole2.
"""
self.orignal_com = deepcopy(self.center2)
for face in self.symface:
fndx = self.facendx[face]
if self.center2[fndx] < 0.0:
self.symm[ fndx ] = -1
for i in range(self.n1, self.natoms):
self.atoms[i].x[fndx] *= -1
self._spherical_x()
def MirrorBackProperty(self):
for face in self.symface:
fndx = self.facendx[face]
if self.orignal_com[fndx] < 0.0:
self.symm[ fndx ] = -1
self.force[fndx] *= -1
for i in range(3):
if not i == fndx:
self.torque[i] *= -1
def ReorientToOrigin(self, cut=0.0000001):
self.atoms = deepcopy(self.original_atoms)
import pdb
pdb.set_trace()
coord1 = get_com([self.atoms[0].x, self.atoms[1].x, self.atoms[2].x ])
coord2 = get_com([self.atoms[3].x, self.atoms[4].x, self.atoms[5].x ])
self.origin_center_coord = get_unit([coord2[i] - coord1[i] for i in range(3)])
dvec = DS[self.FT].calt_dvec( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
for i in range(self.natoms):
self.atoms[i].x = translate(self.atoms[i].x, dvec)
self.OperateNdx.append(0)
self.Operation.append(np.array(dvec))
vec, ax0 = DS[self.FT].calt_vec1( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
ang = angle(vec, ax0)
ax = get_normal(vec, ax0)
if ax[0]==0.0 and ax[1]==0.0 and ax[2]==0.0: pass
else:
for i in range(self.natoms):
self.atoms[i].x = rotate(self.atoms[i].x, ax, ang)
self.OperateNdx.append(1)
self.Operation.append([ax, ang])
vec, ax0 = DS[self.FT].calt_vec2( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
ang = angle(vec, ax0)
if abs(ang)<cut: pass
else:
if abs(ang-np.pi)<cut: ax = [1,0,0]
else: ax = get_normal(vec, ax0)
for i in range(self.natoms):
self.atoms[i].x = rotate(self.atoms[i].x, ax, ang)
self.OperateNdx.append(2)
self.Operation.append([ax, ang])
self.IsOriented = True
self._spherical_x()
def ReorientToOldVec(self):
ax, ang = self.Operation[self.OperateNdx.index(2)]
self.force = rotate(self.force, ax, -1*ang)
self.torque = rotate(self.torque, ax, -1*ang)
ax, ang = self.Operation[self.OperateNdx.index(1)]
self.force = rotate(self.force, ax, -1*ang)
self.torque = rotate(self.torque, ax, -1*ang)
def _spherical_x(self):
"""
Calculate the coords in spherical coordination system for molecule 2.
"""
totalM = 0
x = [0,0,0]
for i in range(self.n1,self.natoms):
x = [ x[k]+self.atoms[i].x[k]*tMass[i-self.n1] for k in range(3)]
totalM += tMass[i-self.n1]
x = [x[k]/totalM for k in range(3)]
r = np.sqrt(x[0]*x[0]+x[1]*x[1]+x[2]*x[2])
#print "probe vector:", 4.0*x[0]/r, 4.0*x[1]/r, 4.0*x[2]/r
## phi of principal coords:
ang1 = np.pi*0.5 - np.arccos(x[2]/r)
## theta of principal coords (from -pi to pi):
if abs(x[0])<0.000001:
if x[1]>0: ang2 = np.pi*0.5
else: ang2 = np.pi*1.5
else:
ang2 = np.arctan(x[1]/x[0])
if x[0]<0: ang2 += np.pi
elif x[1]<0: ang2 += np.pi*2
self.r = r
self.ang1 = ang1
self.ang2 = ang2
self.center2 = x
def _spherical_orient(self):
"""
calculate the spherical coordinates for the orientational vector
"""
x = self.orientVec
r = length(x)
# phi, [-pi/2, pi/2]
ang1 = np.pi*0.5 - np.arccos(x[2]/r)
# theta, [0, 2*pi]
if abs(x[0])<0.000001:
if x[1]>0: ang2 = np.pi*0.5
else: ang2 = np.pi*1.5
else:
ang2 = np.arctan(x[1]/x[0])
if x[0]<0: ang2 += np.pi
elif x[1] <0: ang2 += np.pi*2
self.orient_ang1 = ang1
self.orient_ang2 = ang2
def indexing_orient_auto3(self,ri):
"""
find the index automatically for each subsection in which the orientational vector resides
"""
ang1 = self.orient_ang1
ang2 = self.orient_ang2
#print "<<<<<",ang1*R2D,ang2*R2D
OrientDS = self.OrientDS[ri]
#print "attention!!!"
#print OrientDS['wtr'].nGrid
if ang1<OrientDS['wtr'].PHI_angles[0] or ang1>OrientDS['wtr'].PHI_angles[-1]: ih = -1
for i in range(1,OrientDS['wtr'].nPhi):
if ang1 <= OrientDS['wtr'].PHI_angles[i]:
ih = i-1
break
ang1_ndx1 = ih
ang1_ndx2 = ih + 1
if ang1_ndx1 == OrientDS['wtr'].nPhi-2: # near the up vertex
ang1_ndx3 = ih -1
elif ang1_ndx1 == 0: # near the down vertex
ang1_ndx3 = ih + 2
else:
tmp1 = OrientDS['wtr'].PHI_angles[ih+2] - ang1
tmp2 = ang1 - OrientDS['wtr'].PHI_angles[ih-1]
if abs(tmp1) < abs(tmp2):
ang1_ndx3 = ih + 2
else:
ang1_ndx3 = ih - 1
phiList = [ang1_ndx1,ang1_ndx2,ang1_ndx3]
dgrids_sub_ndx = {}
dtheta_ndx = {}
# determine if use linear interpolation or use quadratic interpolation
if len(set(phiList)) == 2:
iflinear = 1
elif len(set(phiList)) == 3:
iflinear = 0
for kk in set(phiList):
dgrids_sub_ndx[kk] = []
dtheta_ndx[kk] = []
ip = -1
for i in range(1, OrientDS['wtr'].NTheta[kk]):
if ang2 <= OrientDS['wtr'].THETA_angles[kk][i]:
ip = i-1
break
if ip == -1: ip = OrientDS['wtr'].NTheta[kk]-1
#print kk, ip
ig = 0
for i in range(kk): ig += OrientDS['wtr'].NTheta[i]
ig += ip
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(ip)
if ip == OrientDS['wtr'].NTheta[kk]-1:
if OrientDS['wtr'].NTheta[kk] == 1: #vertex
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(0)
if iflinear == 0:
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(0)
else:
dgrids_sub_ndx[kk].append(ig-OrientDS['wtr'].NTheta[kk]+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk])
if iflinear == 0:
tmp1 = OrientDS['wtr'].THETA_angles[kk][1] - ang2 + 2*np.pi
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrids_sub_ndx[kk].append(ig-OrientDS['wtr'].NTheta[kk]+1+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk]+1)
else:
dgrids_sub_ndx[kk].append(ig-1)
dtheta_ndx[kk].append(ip-1)
else:
dgrids_sub_ndx[kk].append(ig+1)
dtheta_ndx[kk].append(ip+1)
if iflinear == 0:
if ip+2 == OrientDS['wtr'].NTheta[kk]:
tmp1 = 2*np.pi - ang2
else:
tmp1 = OrientDS['wtr'].THETA_angles[kk][ip+2] - ang2
if ip == 0:
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][OrientDS['wtr'].NTheta[kk]-1] + 2*np.pi
else:
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
if ip+2 == OrientDS['wtr'].NTheta[kk]:
dgrids_sub_ndx[kk].append(ig+1-OrientDS['wtr'].NTheta[kk]+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk])
else:
dgrids_sub_ndx[kk].append(ig+2)
dtheta_ndx[kk].append(ip+2)
else:
if ip == 0:
dgrids_sub_ndx[kk].append(ig+OrientDS['wtr'].NTheta[kk]-1)
dtheta_ndx[kk].append(-1)
else:
dgrids_sub_ndx[kk].append(ig-1)
dtheta_ndx[kk].append(ip-1)
self.dgrids_sub_ndx[ri] = dgrids_sub_ndx
self.dtheta_ndx[ri] = dtheta_ndx
def indexing_auto3(self):
if not self.IsOriented: raise Exception, "Error: indexing beforce reorientation."
r = self.r
ang1 = self.ang1
ang2 = self.ang2
#print "probe angles", ang1*R2D, ang2*R2D
## ndx of r:
ir = 10001
if r<DS[self.FT].R_NDX[0]: ir = -1
else:
for i in range(1,DS[self.FT].nDist):
if r<=DS[self.FT].R_NDX[i]:
ir = i-1
break
#print 'ir',ir
if ir>10000:
self.r_ndxs = [ir]
self.vbis = [0,0,0]
self.vnrm = [0,0,0]
self.dgrid_ndx_layer = {}
self.dtheta_ndx_layer = {}
return 10000,0,0
elif ir<0:
self.r_ndxs = [ir]
self.vbis = [0,0,0]
self.vnrm = [0,0,0]
self.dgrid_ndx_layer = {}
self.dtheta_ndx_layer = {}
return -1, 0,0
#print "r=%.1f"%r, ir
r_ndxs = [ir,ir+1]
# find 3 layers which are close to the query one
if ir == 0:
r_ndxs.append(ir+2)
elif ir == DS[self.FT].nDist -2:
r_ndxs.append(ir-1)
else:
tmp1 = r - DS[self.FT].R_NDX[ir-1]
tmp2 = DS[self.FT].R_NDX[ir+2] - r
if abs(tmp1) < abs(tmp2):
r_ndxs.append(ir-1)
else:
r_ndxs.append(ir+2)
## ndx of ang1 (Phi):
if ang1<DS[self.FT].PHI_angles[0]: ih = -1
for i in range(1, DS[self.FT].nPhi):
if ang1<=DS[self.FT].PHI_angles[i]:
ih = i-1
break
ang1_ndx1 = ih
ang1_ndx2 = ih + 1
if ang1_ndx1 == DS[self.FT].nPhi -2:
ang1_ndx3 = ih - 1
elif ang1_ndx1 == 0:
ang1_ndx3 = ih + 2
else:
tmp1 = DS[self.FT].PHI_angles[ih+2] - ang1
tmp2 = ang1 - DS[self.FT].PHI_angles[ih-1]
if tmp1 < tmp2:
ang1_ndx3 = ih+2
else:
ang1_ndx3 = ih-1
phiList = [ang1_ndx1,ang1_ndx2,ang1_ndx3]
dgrid_ndx_layer = {}
dtheta_ndx_layer = {}
# determine if use linear interpolation or use quadratic interpolation
if len(set(phiList)) == 2:
iflinear = 1
elif len(set(phiList)) == 3:
iflinear = 0
for kk in set(phiList):
dgrid_ndx_layer[kk] = []
dtheta_ndx_layer[kk] = []
## ndx_of_ang2 (Theta):
ip = -1
for i in range(1,DS[self.FT].NTheta[kk]):
if ang2<=DS[self.FT].THETA_angles[kk][i]:
ip = i-1
break
if ip==-1: ip = DS[self.FT].NTheta[kk]-1
ig = 0
for i in range(kk): ig += DS[self.FT].NTheta[i]
ig += ip
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(ip)
#print "check", kk, ip, ig
if ip == DS[self.FT].NTheta[kk]-1:
if DS[self.FT].NTheta[kk] == 1: #vertex
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(0)
if iflinear == 0:
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(0)
elif self.FT in ['cys','alc','bck','hid','trp','tyr','gln']:
dgrid_ndx_layer[kk].append(ig-DS[self.FT].NTheta[kk]+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk])
if iflinear == 0:
tmp1 = DS[self.FT].THETA_angles[kk][1] - ang2 + 2*np.pi
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrid_ndx_layer[kk].append(ig-DS[self.FT].NTheta[kk]+1+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk]+1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
if iflinear == 0:
dgrid_ndx_layer[kk].append(ig-2)
dtheta_ndx_layer[kk].append(ip-2)
else:
dgrid_ndx_layer[kk].append(ig+1)
dtheta_ndx_layer[kk].append(ip+1)
if iflinear == 0:
if self.FT in ['cys','alc','bck','hid','trp','tyr','gln']:
if ip+2 == DS[self.FT].NTheta[kk]:
tmp1 = 2*np.pi -ang2
else:
tmp1 = DS[self.FT].THETA_angles[kk][ip+2] - ang2
if ip == 0:
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][DS[self.FT].NTheta[kk]-1] + 2*np.pi
else:
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
if ip+2 == DS[self.FT].NTheta[kk]:
dgrid_ndx_layer[kk].append(ig+1-DS[self.FT].NTheta[kk]+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk])
else:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
if ip == 0:
dgrid_ndx_layer[kk].append(ig+DS[self.FT].NTheta[kk]-1)
dtheta_ndx_layer[kk].append(-1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
else:
if ip == DS[self.FT].NTheta[kk]-2:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
elif ip == 0:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
tmp1 = DS[self.FT].THETA_angles[kk][ip+2] - ang2
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
self.dgrid_ndx_layer = dgrid_ndx_layer
self.dtheta_ndx_layer = dtheta_ndx_layer
## calculate the vectors of bisector and normal of mole2:
a20 = self.atoms[self.n1].x
a21 = self.atoms[self.n1+1].x
a22 = self.atoms[self.n1+2].x
a20 = np.array(a20)
a21 = np.array(a21)
a22 = np.array(a22)
v0 = a21 - a20
v1 = a22 - a20
## These two vectors must be unit vector:
bisect = get_bisect_unit(v0,v1)
normal = get_normal_unit(v0,v1)
self.r_ndxs = r_ndxs
self.vbis = bisect
self.vnrm = normal
def calt_conf_energy(self, allconfigs, IsForce=False, ehigh=100.0):
ri_ndxs = self.r_ndxs
self.exit_before = False
for ri in ri_ndxs:
if ri>100:
self.properties = {'E':0.0}
return
elif ri<0:
fuck = [self.origin_center_coord[i] * ehigh for i in range(3)]
self.properties = {'E':ehigh, "Fx": fuck[0], "Fy": fuck[1], "Fz": fuck[2],
"Tx": 0, "Ty": 0, "Tz": 0}
self.exit_before = True
return
bisv = self.vbis
nrmv = self.vnrm
dtheta_ndx_layer = self.dtheta_ndx_layer
grid_ndx_layer = []
for ih in self.dgrid_ndx_layer:
grid_ndx_layer += self.dgrid_ndx_layer[ih]
self.orientVec = bisv
#print "orient vector:%.5f\t%.5f\t%.5f\n"%(bisv[0]*4.0,bisv[1]*4.0,bisv[2]*4.0)
self._spherical_orient()
ang1 = self.orient_ang1
ang2 = self.orient_ang2
ang2 = (ang2*R2D+180)%360 #the original orientational vector of water is located at -x axis
ang2 = ang2/R2D
self.orient_ang2 = ang2
self.OrientDS = {}
self.orient_tr = {}
self.orient_pr = {}
self.dgrids_sub_ndx = {}
self.dtheta_ndx = {}
grids_sub_ndx = {}
dtheta_ndx = {}
wghx1 = {}
wghx2 = {}
wghy = {}
label = {}
for i in ri_ndxs:
dist = DS[self.FT].R_NDX[i] # choose corresponding orientational sampling based on distance
#print "which layer:", dist
if dist > 5.5000001:
cart_ndx, grids_sub_ndx_tmp, wghx_tmp, wghy_tmp = weights_in_subsection( bisv )
grids_sub_ndx[i] = grids_sub_ndx_tmp
wghx1[i] = wghx_tmp/pi4
wghx2[i] = wghx_tmp/pi4
wghy[i] = wghy_tmp/pi4
label[i] = 0
else:
if dist < 2.5000001:
OrientDS = OrientDS_2
elif dist > 2.5000001 and dist < 3.5000001:
OrientDS = OrientDS_3
else:
OrientDS = OrientDS_2
self.OrientDS[i] = OrientDS
self.indexing_orient_auto3(i)
dtheta_ndx[i] = self.dtheta_ndx[i]
if len(dtheta_ndx[i]) == 2: # not in this script
pass
#orient_pr =[]
#for kk in dtheta_ndx[i]:
# ip1=dtheta_ndx[i][kk][0]
# ip2=dtheta_ndx[i][kk][1]
# if ip1 == 0 and ip2 == 0: # vertex
# wtmp = 0
# elif ip1 == OrientDS['wtr'].NTheta[kk]-1:
# wtmp = (ang2-OrientDS['wtr'].THETA_angles[kk][ip1])/(2*np.pi+OrientDS['wtr'].THETA_angles[kk][0]-OrientDS['wtr'].THETA_angles[kk][ip1])
# else:
# wtmp = (ang2-OrientDS['wtr'].THETA_angles[kk][ip1])/(OrientDS['wtr'].THETA_angles[kk][ip2]-OrientDS['wtr'].THETA_angles[kk][ip1])
# orient_pr.append(wtmp)
#wghx1[i] = orient_pr[0]
#wghx2[i] = orient_pr[1]
#ihs = dtheta_ndx[i].keys()
#wghy[i] = (ang1 - OrientDS['wtr'].PHI_angles[ihs[0]])/(OrientDS['wtr'].PHI_angles[ihs[1]]-OrientDS['wtr'].PHI_angles[ihs[0]])
#label[i] = 1
##print "++++++",wghx1[i],wghx2[i],wghy[i]
#grids_sub_ndx[i] = self.dgrids_sub_ndx[i][ihs[0]] + self.dgrids_sub_ndx[i][ihs[1]]
if len(dtheta_ndx[i]) == 3:
ihs = dtheta_ndx[i].keys()
grids_sub_ndx[i] = self.dgrids_sub_ndx[i][ihs[0]] + self.dgrids_sub_ndx[i][ihs[1]] + self.dgrids_sub_ndx[i][ihs[2]]
label[i] = 2
#print "grids_sub_ndx:",grids_sub_ndx[i]
properties = {'E':[], 'Fx':[], 'Fy':[], 'Fz':[], 'Tx':[], 'Ty':[], 'Tz':[]}
propnames = ['E','Fx','Fy','Fz','Tx','Ty','Tz']
tempprop = deepcopy(properties)
for i in ri_ndxs:
for j in grid_ndx_layer:
prop = deepcopy(tempprop)
for ni in grids_sub_ndx[i]:
inpfiles = []
for k in range(DS[self.FT].nNorm[i]):
inpfile = 'r%3.2f/tempconf_d%3.2f_g%03d_c%02d.inp'%(DS[self.FT].R_NDX[i],DS[self.FT].R_NDX[i],j,ni+k*DS[self.FT].nConf[i])
inpfiles.append(inpfile)
xvecs = []
for ff in range(len(inpfiles)):
xconf = allconfigs.allcfg[i][j][ni][ff].xmole2
xvecs.append( norm_prob(xconf,[0,1,2],'wtr') )
nvec = len(xvecs)
if nvec == 2: # linear interpolation for normal vectors
w0, w1, ndx0, ndx1 = weights_for_normal_general( nrmv, xvecs)
#print 'test',i, j, ni, ndx0, ndx1
for pp in propnames:
p0 = allconfigs.get_prop(i,j,ni,ndx0,pp,w0, ehigh=ehigh)
p1 = allconfigs.get_prop(i,j,ni,ndx1,pp,w1, ehigh=ehigh)
p = p1*abs(w1) + p0*abs(w0)
prop[pp].append(p)
#print pp, inpfiles[ndx0],p0,w0,inpfiles[ndx1],p1,w1,p
elif nvec > 2: # quadratic interpolation for normal vectors
angNorm, ndx1, ndx2, ndx3 = get_neighors_for_normal(nrmv, xvecs)
angNorm_1 = ndx1*np.pi/nvec
angNorm_2 = ndx2*np.pi/nvec
angNorm_3 = ndx3*np.pi/nvec
#print "lagrange", i, j, ni, ndx1, ndx2, ndx3, angNorm*R2D, angNorm_1*R2D, angNorm_2*R2D, angNorm_3*R2D
for pp in propnames:
if ndx1 == nvec: ndx1 = 0
if ndx2 == nvec: ndx2 = 0
if ndx3 == nvec: ndx3 = 0
p1 = allconfigs.get_prop(i,j,ni,ndx1,pp,0, ehigh=ehigh)
p2 = allconfigs.get_prop(i,j,ni,ndx2,pp,0, ehigh=ehigh)
p3 = allconfigs.get_prop(i,j,ni,ndx3,pp,0, ehigh=ehigh)
points = [(angNorm_1,p1),(angNorm_2,p2),(angNorm_3,p3)]
p = lagrange_interp(points,angNorm)
prop[pp].append(p)
#print pp, inpfiles[ndx1],p1,inpfiles[ndx2],p2,inpfiles[ndx3],p3,p
for pp in propnames:
# on the level of orientation, theta and phi
if len(prop[pp]) == 4:
psub = bilinear_gen(prop[pp][0], prop[pp][1], prop[pp][2], prop[pp][3], wghx1[i], wghx2[i], wghy[i],label[i])
properties[pp].append(psub)
#print pp, prop[pp][0], prop[pp][1], prop[pp][2], prop[pp][3], grids_sub_ndx[i], wghx1[i], wghx2[i], wghy[i],psub
elif len(prop[pp]) == 9:
cn = 0
points_phi = []
for kk in dtheta_ndx[i]:
#print "here",kk, self.OrientDS[i]['wtr'].nPhi
angPhi = self.OrientDS[i]['wtr'].PHI_angles[kk]
#print "for orientation with phi=",angPhi*R2D
if len(set(dtheta_ndx[i][kk])) == 1: # vertex
p = prop[pp][cn]
points_phi.append((angPhi,p))
cn += 3
continue
points_theta = []
for ip in dtheta_ndx[i][kk]:
if ip >= self.OrientDS[i]['wtr'].NTheta[kk]:
angTheta = 2*np.pi + self.OrientDS[i]['wtr'].THETA_angles[kk][ip-self.OrientDS[i]['wtr'].NTheta[kk]]
elif ip < 0:
angTheta = self.OrientDS[i]['wtr'].THETA_angles[kk][ip] - 2*np.pi
else:
angTheta = self.OrientDS[i]['wtr'].THETA_angles[kk][ip]
points_theta.append((angTheta,prop[pp][cn]))
#print pp, angTheta*R2D, prop[pp][cn]
cn += 1
p = lagrange_interp(points_theta,ang2)
#print 'quadratic interpolation gives',p, 'for property', pp
points_phi.append((angPhi,p))
psub = lagrange_interp(points_phi,ang1)
#print 'interpolated orientational property of %s:'%pp,psub
properties[pp].append(psub)
## on the level of r, theta, phi
self.properties = {}
if len(dtheta_ndx_layer) == 2: # for grids near vertex of each layers, linear interpolation for grids and quadratic interpolation for layers; NOT IN THIS SCRIPT
pass
#Wghx = []
#For kk in dtheta_ndx_layer:
# ip1 = dtheta_ndx_layer[kk][0]
# ip2 = dtheta_ndx_layer[kk][1]
# if ip1 == 0 and ip2 == 0:
# wtmp = 0
# else:
# wtmp = (self.ang2-DS[self.FT].THETA_angles[kk][ip1])/(DS[self.FT].THETA_angles[kk][ip2]-DS[self.FT].THETA_angles[kk][ip1])
# wghx.append(wtmp)
#Ihs = dtheta_ndx_layer.keys()
#Wghy = (self.ang1-DS[self.FT].PHI_angles[ihs[0]])/(DS[self.FT].PHI_angles[ihs[1]]-DS[self.FT].PHI_angles[ihs[0]])
#For pp in propnames:
# psub_r = []
# for m in range(0,len(properties[pp]),4): # for each layer
# #print pp, properties[pp][m], properties[pp][m+1],properties[pp][m+2], properties[pp][m+3], wghx[0], wghx[1], wghy
# psub = bilinear_gen(properties[pp][m], properties[pp][m+1],properties[pp][m+2], properties[pp][m+3], wghx[0], wghx[1], wghy,1)
# psub_r.append(psub)
# if not len(psub_r) == 3:
# #print 'quadratic interpolation needs 3 layers'
# sys.exit()
# points = []
# for t in range(len(ri_ndxs)):
# dist = DS[self.FT].R_NDX[ri_ndxs[t]]
# points.append((dist,psub_r[t]))
# p = lagrange_interp(points,self.r)
# self.properties[pp] = p
elif len(dtheta_ndx_layer) == 3: # quadratic interpolation for layers and grids
for pp in propnames:
psub_r = []
for m in range(0,len(properties[pp]),9): # for each layer
count = 0
points_th = []
for kk in dtheta_ndx_layer:
if len(set(dtheta_ndx_layer[kk])) == 1: # vertex
p = properties[pp][m+count]
points_th.append((DS[self.FT].PHI_angles[kk],p))
count += 3
continue
ip1 = dtheta_ndx_layer[kk][0]
ip2 = dtheta_ndx_layer[kk][1]
ip3 = dtheta_ndx_layer[kk][2]
th1 = DS[self.FT].THETA_angles[kk][ip1]
th2 = DS[self.FT].THETA_angles[kk][ip2]
th3 = DS[self.FT].THETA_angles[kk][ip3]
points = [(th1,properties[pp][m+count]),(th2,properties[pp][m+count+1]),(th3,properties[pp][m+count+2])]
p = lagrange_interp(points,self.ang2)
points_th.append((DS[self.FT].PHI_angles[kk],p))
count += 3
p = lagrange_interp(points_th,self.ang1)
psub_r.append(p)
if not len(psub_r) == 3:
#print 'quadratic interpolation needs 3 layers'
sys.exit()
points = []
for t in range(len(ri_ndxs)):
dist = DS[self.FT].R_NDX[ri_ndxs[t]]
points.append((dist,psub_r[t]))
p = lagrange_interp(points,self.r)
self.properties[pp] = p
def reverse_force_toque(self):
Fx = self.properties['Fx']
Fy = self.properties['Fy']
Fz = self.properties['Fz']
self.force = [Fx, Fy, Fz]
Tx = self.properties['Tx']
Ty = self.properties['Ty']
Tz = self.properties['Tz']
self.torque = [Tx, Ty, Tz]
if self.exit_before:
return
self.MirrorBackProperty()
self.ReorientToOldVec()
def get_interp_energy(self):
return self.properties['E']
def get_interp_force(self):
return self.force
def get_interp_torque(self):
return self.torque
| sethbrin/QM | version2/python/calculate_energy_new_coords_general.py | Python | mit | 32,462 |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.conf import settings
import pymongo
from datetime import datetime
from .models import PQDataModel
class ParliamentSearchPipeline(object):
def __init__(self):
self.connection = None
def process_item(self, items, spider):
if spider.name == "ls_questions":
questions = items['questions']
# self.insert_in_db(questions)
else:
raise ValueError("Invalid collection:", spider.name)
return items
def insert_in_db(self, questions):
with PQDataModel.batch_write() as batch:
records = []
for q in questions:
record = PQDataModel()
record.question_number = q['question_number']
record.question_origin = q['question_origin']
record.question_type = q['question_type']
record.question_session = q['question_session']
record.question_ministry = q['question_ministry']
record.question_member = q['question_member']
record.question_subject = q['question_subject']
record.question_type = q['question_type']
record.question_annex = q['question_annex']
record.question_url = q['question_url']
record.question_text = q['question_text']
record.question_url = q['question_url']
record.question_date = datetime.strptime(q['question_date'], '%d.%m.%Y')
records.append(record)
for record in records:
batch.save(record)
| mthipparthi/parliament-search | parliamentsearch/pipelines.py | Python | mit | 1,789 |
import arrow
import settings
from . import misc
from . import voting
from . import comments
from . import exceptions as exc
def merge_pr(api, urn, pr, votes, total, threshold):
""" merge a pull request, if possible, and use a nice detailed merge commit
message """
pr_num = pr["number"]
pr_title = pr['title']
pr_description = pr['body']
path = "/repos/{urn}/pulls/{pr}/merge".format(urn=urn, pr=pr_num)
record = voting.friendly_voting_record(votes)
if record:
record = "Vote record:\n" + record
votes_summary = formatted_votes_summary(votes, total, threshold)
pr_url = "https://github.com/{urn}/pull/{pr}".format(urn=urn, pr=pr_num)
title = "merging PR #{num}: {pr_title}".format(
num=pr_num, pr_title=pr_title)
desc = """
{pr_url}: {pr_title}
Description:
{pr_description}
:ok_woman: PR passed {summary}.
{record}
""".strip().format(
pr_url=pr_url,
pr_title=pr_title,
pr_description=pr_description,
summary=votes_summary,
record=record,
)
data = {
"commit_title": title,
"commit_message": desc,
"merge_method": "merge",
# if some clever person attempts to submit more commits while we're
# aggregating votes, this sha check will fail and no merge will occur
"sha": pr["head"]["sha"],
}
try:
resp = api("PUT", path, json=data)
return resp["sha"]
except HTTPError as e:
resp = e.response
# could not be merged
if resp.status_code == 405:
raise exc.CouldntMerge
# someone trying to be sneaky and change their PR commits during voting
elif resp.status_code == 409:
raise exc.CouldntMerge
else:
raise
def formatted_votes_summary(votes, total, threshold):
vfor = sum(v for v in votes.values() if v > 0)
vagainst = abs(sum(v for v in votes.values() if v < 0))
return "with a vote of {vfor} for and {vagainst} against, with a weighted total of {total:.1f} and a threshold of {threshold:.1f}" \
.strip().format(vfor=vfor, vagainst=vagainst, total=total, threshold=threshold)
def formatted_votes_short_summary(votes, total, threshold):
vfor = sum(v for v in votes.values() if v > 0)
vagainst = abs(sum(v for v in votes.values() if v < 0))
return "vote: {vfor}-{vagainst}, weighted total: {total:.1f}, threshold: {threshold:.1f}" \
.strip().format(vfor=vfor, vagainst=vagainst, total=total, threshold=threshold)
def label_pr(api, urn, pr_num, labels):
""" set a pr's labels (removes old labels) """
if not isinstance(labels, (tuple, list)):
labels = [labels]
path = "/repos/{urn}/issues/{pr}/labels".format(urn=urn, pr=pr_num)
data = labels
resp = api("PUT", path, json=data)
def close_pr(api, urn, pr):
""" https://developer.github.com/v3/pulls/#update-a-pull-request """
path = "/repos/{urn}/pulls/{pr}".format(urn=urn, pr=pr["number"])
data = {
"state": "closed",
}
return api("patch", path, json=data)
def get_pr_last_updated(pr_data):
""" a helper for finding the utc datetime of the last pr branch
modifications """
repo = pr_data["head"]["repo"]
if repo:
dt = repo["pushed_at"]
else:
dt = pr_data["created_at"]
return arrow.get(dt)
def get_pr_comments(api, urn, pr_num):
""" yield all comments on a pr, weirdly excluding the initial pr comment
itself (the one the owner makes) """
params = {
"per_page": settings.DEFAULT_PAGINATION
}
path = "/repos/{urn}/issues/{pr}/comments".format(urn=urn, pr=pr_num)
comments = api("get", path, params=params)
for comment in comments:
yield comment
def get_ready_prs(api, urn, window):
""" yield mergeable, non-WIP prs that have had no modifications for longer
than the voting window. these are prs that are ready to be considered for
merging """
open_prs = get_open_prs(api, urn)
for pr in open_prs:
pr_num = pr["number"]
now = arrow.utcnow()
updated = get_pr_last_updated(pr)
delta = (now - updated).total_seconds()
is_wip = "WIP" in pr["title"]
if not is_wip and delta > window:
# we check if its mergeable if its outside the voting window,
# because there seems to be a race where a freshly-created PR exists
# in the paginated list of PRs, but 404s when trying to fetch it
# directly
mergeable = get_is_mergeable(api, urn, pr_num)
if mergeable is True:
label_pr(api, urn, pr_num, [])
yield pr
elif mergeable is False:
label_pr(api, urn, pr_num, ["conflicts"])
if delta >= 60 * 60 * settings.PR_STALE_HOURS:
comments.leave_stale_comment(
api, urn, pr["number"], round(delta / 60 / 60))
close_pr(api, urn, pr)
# mergeable can also be None, in which case we just skip it for now
def voting_window_remaining_seconds(pr, window):
now = arrow.utcnow()
updated = get_pr_last_updated(pr)
delta = (now - updated).total_seconds()
return window - delta
def is_pr_in_voting_window(pr, window):
return voting_window_remaining_seconds(pr, window) <= 0
def get_pr_reviews(api, urn, pr_num):
""" get all pr reviews on a pr
https://help.github.com/articles/about-pull-request-reviews/ """
params = {
"per_page": settings.DEFAULT_PAGINATION
}
path = "/repos/{urn}/pulls/{pr}/reviews".format(urn=urn, pr=pr_num)
data = api("get", path, params=params)
return data
def get_is_mergeable(api, urn, pr_num):
return get_pr(api, urn, pr_num)["mergeable"]
def get_pr(api, urn, pr_num):
""" helper for fetching a pr. necessary because the "mergeable" field does
not exist on prs that come back from paginated endpoints, so we must fetch
the pr directly """
path = "/repos/{urn}/pulls/{pr}".format(urn=urn, pr=pr_num)
pr = api("get", path)
return pr
def get_open_prs(api, urn):
params = {
"state": "open",
"sort": "updated",
"direction": "asc",
"per_page": settings.DEFAULT_PAGINATION,
}
path = "/repos/{urn}/pulls".format(urn=urn)
data = api("get", path, params=params)
return data
def get_reactions_for_pr(api, urn, pr):
path = "/repos/{urn}/issues/{pr}/reactions".format(urn=urn, pr=pr)
params = {"per_page": settings.DEFAULT_PAGINATION}
reactions = api("get", path, params=params)
for reaction in reactions:
yield reaction
def post_accepted_status(api, urn, pr, voting_window, votes, total, threshold):
sha = pr["head"]["sha"]
remaining_seconds = voting_window_remaining_seconds(pr, voting_window)
remaining_human = misc.seconds_to_human(remaining_seconds)
votes_summary = formatted_votes_short_summary(votes, total, threshold)
post_status(api, urn, sha, "success",
"remaining: {time}, {summary}".format(time=remaining_human, summary=votes_summary))
def post_rejected_status(api, urn, pr, voting_window, votes, total, threshold):
sha = pr["head"]["sha"]
remaining_seconds = voting_window_remaining_seconds(pr, voting_window)
remaining_human = misc.seconds_to_human(remaining_seconds)
votes_summary = formatted_votes_short_summary(votes, total, threshold)
post_status(api, urn, sha, "failure",
"remaining: {time}, {summary}".format(time=remaining_human, summary=votes_summary))
def post_pending_status(api, urn, pr, voting_window, votes, total, threshold):
sha = pr["head"]["sha"]
remaining_seconds = voting_window_remaining_seconds(pr, voting_window)
remaining_human = misc.seconds_to_human(remaining_seconds)
votes_summary = formatted_votes_short_summary(votes, total, threshold)
post_status(api, urn, sha, "pending",
"remaining: {time}, {summary}".format(time=remaining_human, summary=votes_summary))
def post_status(api, urn, sha, state, description):
""" apply an issue label to a pr """
path = "/repos/{urn}/statuses/{sha}".format(urn=urn, sha=sha)
data = {
"state": state,
"description": description,
"context": "chaosbot"
}
api("POST", path, json=data)
| eukaryote31/chaos | github_api/prs.py | Python | mit | 8,393 |
#!/usr/bin/env python
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
import sys
import radical.pilot as rp
# READ: The RADICAL-Pilot documentation:
# http://radicalpilot.readthedocs.org/en/latest
#
# Try running this example with RADICAL_PILOT_VERBOSE=debug set if
# you want to see what happens behind the scenes!
#------------------------------------------------------------------------------
#
def pilot_state_cb (pilot, state):
""" this callback is invoked on all pilot state changes """
print "[Callback]: ComputePilot '%s' state: %s." % (pilot.uid, state)
if state == rp.FAILED:
sys.exit (1)
#------------------------------------------------------------------------------
#
def unit_state_cb (unit, state):
""" this callback is invoked on all unit state changes """
print "[Callback]: ComputeUnit '%s' state: %s." % (unit.uid, state)
if state == rp.FAILED:
sys.exit (1)
# ------------------------------------------------------------------------------
#
if __name__ == "__main__":
# we can optionally pass session name to RP
if len(sys.argv) > 1:
session_name = sys.argv[1]
else:
session_name = None
# Create a new session. No need to try/except this: if session creation
# fails, there is not much we can do anyways...
session = rp.Session(name=session_name)
print "session id: %s" % session.uid
# all other pilot code is now tried/excepted. If an exception is caught, we
# can rely on the session object to exist and be valid, and we can thus tear
# the whole RP stack down via a 'session.close()' call in the 'finally'
# clause...
try:
# Add a Pilot Manager. Pilot managers manage one or more ComputePilots.
pmgr = rp.PilotManager(session=session)
# Register our callback with the PilotManager. This callback will get
# called every time any of the pilots managed by the PilotManager
# change their state.
pmgr.register_callback(pilot_state_cb)
# Define a X-core on stamped that runs for N minutes and
# uses $HOME/radical.pilot.sandbox as sandbox directoy.
pdesc = rp.ComputePilotDescription()
pdesc.resource = "xsede.stampede"
pdesc.runtime = 15 # N minutes
pdesc.cores = 16 # X cores
pdesc.project = "TG-MCB090174"
# Launch the pilot.
pilot = pmgr.submit_pilots(pdesc)
cud_list = []
for unit_count in range(0, 4):
cu = rp.ComputeUnitDescription()
cu.pre_exec = ["module load python intel mvapich2 mpi4py"]
cu.executable = "python"
cu.arguments = ["helloworld_mpi.py"]
cu.input_staging = ["helloworld_mpi.py"]
# These two parameters are relevant to MPI execution:
# 'cores' sets the number of cores required by the task
# 'mpi' identifies the task as an MPI taskg
cu.cores = 8
cu.mpi = True
cud_list.append(cu)
# Combine the ComputePilot, the ComputeUnits and a scheduler via
# a UnitManager object.
umgr = rp.UnitManager(
session=session,
scheduler=rp.SCHED_DIRECT_SUBMISSION)
# Register our callback with the UnitManager. This callback will get
# called every time any of the units managed by the UnitManager
# change their state.
umgr.register_callback(unit_state_cb)
# Add the previously created ComputePilot to the UnitManager.
umgr.add_pilots(pilot)
# Submit the previously created ComputeUnit descriptions to the
# PilotManager. This will trigger the selected scheduler to start
# assigning ComputeUnits to the ComputePilots.
units = umgr.submit_units(cud_list)
# Wait for all compute units to reach a terminal state (DONE or FAILED).
umgr.wait_units()
if not isinstance(units, list):
units = [units]
for unit in units:
print "* Task %s - state: %s, exit code: %s, started: %s, finished: %s, stdout: %s" \
% (unit.uid, unit.state, unit.exit_code, unit.start_time, unit.stop_time, unit.stdout)
except Exception as e:
# Something unexpected happened in the pilot code above
print "caught Exception: %s" % e
raise
except (KeyboardInterrupt, SystemExit) as e:
# the callback called sys.exit(), and we can here catch the
# corresponding KeyboardInterrupt exception for shutdown. We also catch
# SystemExit (which gets raised if the main threads exits for some other
# reason).
print "need to exit now: %s" % e
finally:
# always clean up the session, no matter if we caught an exception or
# not.
print "closing session"
session.close ()
# the above is equivalent to
#
# session.close (cleanup=True, terminate=True)
#
# it will thus both clean out the session's database record, and kill
# all remaining pilots (none in our example).
#-------------------------------------------------------------------------------
| JensTimmerman/radical.pilot | examples/running_mpi_executables.py | Python | mit | 5,342 |
from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames | DenBaum/lolm8guesser | friendship.py | Python | mit | 3,046 |
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from excel_helper.helper import DataSeriesLoader
class TestDataFrameWithCAGRCalculation(unittest.TestCase):
def test_simple_CAGR(self):
"""
Basic test case, applying CAGR to a Pandas Dataframe.
:return:
"""
# the time axis of our dataset
times = pd.date_range('2009-01-01', '2009-04-01', freq='MS')
# the sample axis our dataset
samples = 2
dfl = DataSeriesLoader.from_excel('test.xlsx', times, size=samples, sheet_index=0)
res = dfl['static_one']
print (res)
assert res.loc[[datetime(2009, 1, 1)]][0] == 1
assert np.abs(res.loc[[datetime(2009, 4, 1)]][0] - pow(1.1, 3. / 12)) < 0.00001
def test_CAGR_ref_date_within_bounds(self):
"""
Basic test case, applying CAGR to a Pandas Dataframe.
:return:
"""
# the time axis of our dataset
times = pd.date_range('2009-01-01', '2009-04-01', freq='MS')
# the sample axis our dataset
samples = 2
dfl = DataSeriesLoader.from_excel('test.xlsx', times, size=samples, sheet_index=0)
res = dfl['static_one']
assert res.loc[[datetime(2009, 1, 1)]][0] == 1
assert np.abs(res.loc[[datetime(2009, 4, 1)]][0] - pow(1.1, 3. / 12)) < 0.00001
def test_CAGR_ref_date_before_start(self):
"""
Basic test case, applying CAGR to a Pandas Dataframe.
:return:
"""
# the time axis of our dataset
times = pd.date_range('2009-01-01', '2009-04-01', freq='MS')
# the sample axis our dataset
samples = 2
dfl = DataSeriesLoader.from_excel('test.xlsx', times, size=samples, sheet_index=0)
# equivalent to dfl['test_ref_date_before_start']
self.assertRaises(AssertionError, dfl.__getitem__, 'test_ref_date_before_start')
def test_CAGR_ref_date_after_end(self):
"""
Basic test case, applying CAGR to a Pandas Dataframe.
:return:
"""
# the time axis of our dataset
times = pd.date_range('2009-01-01', '2009-04-01', freq='MS')
# the sample axis our dataset
samples = 2
dfl = DataSeriesLoader.from_excel('test.xlsx', times, size=samples, sheet_index=0)
# equivalent to dfl['test_ref_date_before_start']
self.assertRaises(AssertionError, dfl.__getitem__, 'test_ref_date_after_end')
def test_simple_CAGR_from_pandas(self):
times = pd.date_range('2009-01-01', '2009-04-01', freq='MS')
xls = pd.ExcelFile('test.xlsx')
df = xls.parse('Sheet1')
ldr = DataSeriesLoader.from_dataframe(df, times, size=2)
res = ldr['static_one']
assert res.loc[[datetime(2009, 1, 1)]][0] == 1
assert np.abs(res.loc[[datetime(2009, 4, 1)]][0] - pow(1.1, 3. / 12)) < 0.00001
def test_simple_CAGR_mm(self):
"""
Basic test case, applying CAGR to a Pandas Dataframe.
:return:
"""
# the time axis of our dataset
times = pd.date_range('2015-01-01', '2016-01-01', freq='MS')
# the sample axis our dataset
samples = 2
dfl = DataSeriesLoader.from_excel('test.xlsx', times, size=samples, sheet_index=0)
res = dfl['mm']
print(res)
# assert res.loc[[datetime(2009, 1, 1)]][0] == 1
# assert np.abs(res.loc[[datetime(2009, 4, 1)]][0] - pow(1.1, 3. / 12)) < 0.00001
if __name__ == '__main__':
unittest.main()
| dschien/PyExcelModelingHelper | tests/test_DataSeriesLoader.py | Python | mit | 3,547 |
from django.dispatch import Signal
pre_save = Signal(providing_args=['instance', 'action', ])
post_save = Signal(providing_args=['instance', 'action', ])
pre_delete = Signal(providing_args=['instance', 'action', ])
post_delete = Signal(providing_args=['instance', 'action', ])
| thoas/django-sequere | sequere/contrib/timeline/signals.py | Python | mit | 279 |
#!/usr/bin/env python
'''
Import this module to have access to a global redis cache named GLOBAL_CACHE.
USAGE:
from caching import GLOBAL_CACHE
GLOBAL_CACHE.store('foo', 'bar')
GLOBAL_CACHE.get('foo')
>> bar
'''
from redis_cache import SimpleCache
try:
GLOBAL_CACHE
except NameError:
GLOBAL_CACHE = SimpleCache(limit=1000, expire=60*60*24, namespace="GLOBAL_CACHE")
else:
# Already defined...
pass
| miketwo/pylacuna | pylacuna/caching.py | Python | mit | 434 |
# https://graphics.stanford.edu/~seander/bithacks.html#NextBitPermutation
def selector(values, setBits):
maxBits = len(values)
def select(v):
out = []
for i in range(maxBits):
if (v & (1 << i)):
out.append(values[i])
return out
v = (2 ** setBits) - 1
endState = v << (maxBits - setBits)
yield select(v)
while v != endState:
t = (v | (v - 1)) + 1
v = t | ((((t & (-t % (1 << maxBits))) // (v & (-v % (1 << maxBits)))) >> 1) - 1)
yield select(v)
def normalize(perm):
ref = sorted(perm)
return [ref.index(x) for x in perm]
def contains_pattern(perm, patt):
if len(patt) > len(perm):
return False
for p in selector(perm, len(patt)):
if normalize(p) == patt:
return True
return False
if __name__ == '__main__':
print(contains_pattern(
[14, 12, 6, 10, 0, 9, 1, 11, 13, 16, 17, 3, 7, 5, 15, 2, 4, 8],
[3, 0, 1, 2]))
print(True)
| asgeir/old-school-projects | python/verkefni2/cpattern.py | Python | mit | 1,006 |
# testStr = "Hello {name}, How long have you bean?. I'm {myName}"
#
# testStr = testStr.format(name="Leo", myName="Serim")
#
# print(testStr)
limit = None
hello = str(limit, "")
print(hello)
# print( "4" in "3.5")
| SELO77/seloPython | 3.X/ex/strFormat.py | Python | mit | 217 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'App.created_at'
db.add_column('mobile_apps_app', 'created_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'App.created_at'
db.delete_column('mobile_apps_app', 'created_at')
models = {
'core.level': {
'Meta': {'ordering': "['order']", 'object_name': 'Level'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mobile_apps.app': {
'Meta': {'object_name': 'App'},
'content_areas': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'content_areas'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Level']"}),
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'levels': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'levels'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Level']"}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mobile_apps.Type']"})
},
'mobile_apps.type': {
'Meta': {'object_name': 'Type'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['mobile_apps']
| katemsu/kate_website | kate3/mobile_apps/migrations/0002_auto__add_field_app_created_at.py | Python | mit | 2,545 |
import math
import urwid
from mitmproxy.tools.console import common
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import grideditor
class SimpleOverlay(urwid.Overlay):
def __init__(self, master, widget, parent, width, valign="middle"):
self.widget = widget
self.master = master
super().__init__(
widget,
parent,
align="center",
width=width,
valign=valign,
height="pack"
)
def keypress(self, size, key):
key = super().keypress(size, key)
if key == "esc":
signals.pop_view_state.send(self)
if key == "?":
self.master.view_help(self.widget.make_help())
else:
return key
class Choice(urwid.WidgetWrap):
def __init__(self, txt, focus, current):
if current:
s = "option_active_selected" if focus else "option_active"
else:
s = "option_selected" if focus else "text"
return super().__init__(
urwid.AttrWrap(
urwid.Padding(urwid.Text(txt)),
s,
)
)
def selectable(self):
return True
def keypress(self, size, key):
return key
class ChooserListWalker(urwid.ListWalker):
def __init__(self, choices, current):
self.index = 0
self.choices = choices
self.current = current
def _get(self, idx, focus):
c = self.choices[idx]
return Choice(c, focus, c == self.current)
def set_focus(self, index):
self.index = index
def get_focus(self):
return self._get(self.index, True), self.index
def get_next(self, pos):
if pos >= len(self.choices) - 1:
return None, None
pos = pos + 1
return self._get(pos, False), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos, False), pos
class Chooser(urwid.WidgetWrap):
def __init__(self, title, choices, current, callback):
self.choices = choices
self.callback = callback
choicewidth = max([len(i) for i in choices])
self.width = max(choicewidth, len(title) + 5)
self.walker = ChooserListWalker(choices, current)
super().__init__(
urwid.AttrWrap(
urwid.LineBox(
urwid.BoxAdapter(
urwid.ListBox(self.walker),
len(choices)
),
title= title
),
"background"
)
)
def selectable(self):
return True
def keypress(self, size, key):
key = common.shortcuts(key)
if key == "enter":
self.callback(self.choices[self.walker.index])
signals.pop_view_state.send(self)
return super().keypress(size, key)
def make_help(self):
text = []
keys = [
("enter", "choose option"),
("esc", "exit chooser"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
return text
class OptionsOverlay(urwid.WidgetWrap):
def __init__(self, master, name, vals, vspace):
"""
vspace: how much vertical space to keep clear
"""
cols, rows = master.ui.get_cols_rows()
self.ge = grideditor.OptionsEditor(master, name, vals)
super().__init__(
urwid.AttrWrap(
urwid.LineBox(
urwid.BoxAdapter(self.ge, rows - vspace),
title=name
),
"background"
)
)
self.width = math.ceil(cols * 0.8)
def make_help(self):
return self.ge.make_help()
| xaxa89/mitmproxy | mitmproxy/tools/console/overlay.py | Python | mit | 3,855 |
import os
ADMINS = (
# ('Eduardo Lopez', 'eduardo.biagi@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(os.path.dirname(__file__), 'highways.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Mexico_City'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'es-MX'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/static/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'bre7b$*6!iagzqyi1%q@%_ofbb)e!rawcnm9apx^%kf@b%)le!'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'project.urls'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'carreteras',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "templates"),
)
| tapichu/highway-maps | project/settings.py | Python | mit | 3,183 |
import MySQLdb as _mysql
from collections import namedtuple
import re
# Only needs to compile one time so we put it here
float_match = re.compile(r'[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$').match
def is_number(string):
return bool(float_match(string))
class MySQLDatabase(object):
"""
This is the driver class that we will use
for connecting to our database. In here we'll
create a constructor (__init__) that will connect
to the database once the driver class is instantiated
and a destructor method that will close the database
connection once the driver object is destroyed.
"""
def __init__(self, database_name, username,
password, host='localhost'):
"""
Here we'll try to connect to the database
using the variables that we passed through
and if the connection fails we'll print out the error
"""
try:
self.db = _mysql.connect(db=database_name, host=host, user=username, passwd=password)
self.database_name = database_name
print "Connected to MySQL!"
except _mysql.Error, e:
print e
def __del__(self):
"""
Here we'll do a check to see if `self.db` is present.
This will only be the case if the connection was
successfully made in the initialiser.
Inside that condition we'll close the connection
"""
if hasattr(self, 'db'):
self.db.close()
print "MySQL Connection Closed"
def get_available_tables(self):
"""
This method will allow us to see what
tables are available to us when we're
running our queries
"""
cursor = self.db.cursor()
cursor.execute("SHOW TABLES;")
self.tables = cursor.fetchall()
cursor.close()
return self.tables
def convert_to_named_tuples(self, cursor):
results = None
names = " ".join(d[0] for d in cursor.description)
klass = namedtuple('Results', names)
try:
results = map(klass._make, cursor.fetchall())
except _mysql.ProgrammingError, e:
print e
return results
def get_columns_for_table(self, table_name):
"""
This method will enable us to interact
with our database to find what columns
are currently in a specific table
"""
cursor = self.db.cursor()
cursor.execute("SHOW COLUMNS FROM `%s`" % table_name)
self.columns = cursor.fetchall()
cursor.close()
return self.columns
def select(self, table, columns=None, named_tuples=False, **kwargs):
"""
We'll create our `select` method in order
to make it simpler for extracting data from
the database.
select(table_name, [list_of_column_names])
"""
sql_str = "SELECT "
# add columns or just use the wildcard
if not columns:
sql_str += " * "
else:
for column in columns:
sql_str += "%s, " % column
sql_str = sql_str[:-2] # remove the last comma!
# add the to the SELECT query
sql_str += " FROM `%s`.`%s`" % (self.database_name, table)
# if there's a JOIN clause attached
if kwargs.has_key('join'):
sql_str += " JOIN %s " % kwargs.get('join')
# if there's a WHERE clause attached
if kwargs.has_key('where'):
sql_str += " WHERE %s " % kwargs.get('where')
# if there's a LIMIT clause attached
if kwargs.has_key('limit'):
sql_str += " LIMIT %s " % kwargs.get('limit')
# Finalise out SQL string
sql_str += ";"
cursor = self.db.cursor()
cursor.execute(sql_str)
if named_tuples:
results = self.convert_to_named_tuples(cursor)
else:
results = cursor.fetchall()
cursor.close()
return results
def delete(self, table, **wheres):
"""
This function will allow us to delete data from a given tables
based on wether or not a WHERE clause is present or not
"""
sql_str = "DELETE FROM `%s`.`%s`" % (self.database_name, table)
if wheres is not None:
first_where_clause = True
for where, term in wheres.iteritems():
if first_where_clause:
# This is the first WHERE clause
sql_str += " WHERE `%s`.`%s` %s" % (table, where, term)
first_where_clause = False
else:
# this is the second (additional) WHERE clause so we use AND
sql_str += " AND `%s`.`%s` %s" % (table, where, term)
sql_str += ";"
cursor = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
# Only needs to compile one time so we put it here
float_match = re.compile(r'[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$').match
def is_number(string):
return bool(float_match(string))
def insert(self, table, **column_names):
"""
Insert function
Example usages:-
db.insert('people', first_name='Ringo',
second_name='Starr', DOB=STR_TO_DATE('01-01-1999', '%d-%m-%Y'))
"""
sql_str = "INSERT INTO `%s`.`%s` " % (self.database_name, table)
if column_names is not None:
columns = "("
values = "("
for arg, value in column_names.iteritems():
columns += "`%s`, " % arg
# Check how we should add this to the columns string
if is_number(value) or arg == 'DOB':
# It's a number or date so we don't add the ''
values += "%s, " % value
else:
# It's a string so we add the ''
values += "5S, " % value
columns = columns[:-2] # Strip off the spare ',' from the end
values = values[:-2] # Same here too
columns += ") VALUES" # Add the connecting keyword and brace
values += ");" # Add the brace and like terminator
sql_str += "%s %s" % (columns, values)
cursor = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
def update(self, table, where=None, **column_values):
sql_str = "UPDATE `%s`.`%s` SET " % (self.database_name, table)
if column_values is not None:
for column_name, value in column_names.iteritems():
sql_str += "`%s`=" % column_name
# check how we should add this to the column string
if is_number(value):
# it's a number so we don't add ''
sql_str += "%s, " % value
else:
# it's a date or string so add the ''
sql_str += "'%s', " % value
sql_str = sql_str[:-2] # strip off the last , and space character
if where:
sql_str += " WHERE %s" % where
cusrsor = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
| GunnerJnr/_CodeInstitute | Stream-2/Back-End-Development/18.Using-Python-with-MySQL-Part-Three-Intro/3.How-to-Build-an-Update-SQL-String/database/mysql.py | Python | mit | 7,289 |
"""
https://codility.com/programmers/task/equi_leader/
"""
from collections import Counter, defaultdict
def solution(A):
def _is_equi_leader(i):
prefix_count_top = running_counts[top]
suffix_count_top = total_counts[top] - prefix_count_top
return (prefix_count_top * 2 > i + 1) and (suffix_count_top * 2 > len(A) - i - 1)
total_counts = Counter(A)
running_counts = defaultdict(int)
top = A[0]
result = 0
for i in xrange(len(A) - 1):
n = A[i]
running_counts[n] += 1
top = top if running_counts[top] >= running_counts[n] else n
if _is_equi_leader(i):
result += 1
return result
| py-in-the-sky/challenges | codility/equi_leader.py | Python | mit | 707 |
import os
import shutil
from glob import glob
print 'Content-type:text/html\r\n\r\n'
print '<html>'
found_pages = glob('archive/*.py')
if found_pages:
path = "/cgi-bin/archive/"
moveto = "/cgi-bin/pages/"
files = os.listdir(path)
files.sort()
for f in files:
src = path+f
dst = moveto+f
shutil.move(src, dst)
print 'All pages restored'
print '<meta http-equiv="refresh" content="1";>'
if not found_pages:
print 'Nothing to restore'
print '</html>'
# EOF
| neva-nevan/ConfigPy | ConfigPy-Portable/ConfigPy/cgi-bin/restore.py | Python | mit | 516 |
"""
@brief test log(time=0s)
"""
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.filehelper import explore_folder_iterfile
from pyquickhelper.ipythonhelper import upgrade_notebook, remove_execution_number
class TestConvertNotebooks(unittest.TestCase):
"""Converts notebooks from v3 to v4. Should not be needed anymore."""
def test_convert_notebooks(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
fold = os.path.abspath(os.path.dirname(__file__))
fold2 = os.path.normpath(
os.path.join(fold, "..", "..", "_doc", "notebooks"))
for nbf in explore_folder_iterfile(fold2, pattern=".*[.]ipynb"):
t = upgrade_notebook(nbf)
if t:
fLOG("modified", nbf)
# remove numbers
remove_execution_number(nbf, nbf)
fold2 = os.path.normpath(os.path.join(fold, "..", "..", "_unittests"))
for nbf in explore_folder_iterfile(fold2, pattern=".*[.]ipynb"):
t = upgrade_notebook(nbf)
if t:
fLOG("modified", nbf)
if __name__ == "__main__":
unittest.main()
| sdpython/python3_module_template | _unittests/ut_module/test_convert_notebooks.py | Python | mit | 1,224 |
from itertools import permutations
import re
def create_formula(combination,numbers):
formula = ""
index = 0
for op in combination:
formula += str(numbers[index]) + op
index += 1
formula += numbers[index]
return formula
'''
Unnecessary Funtion
'''
def evaluate(form):
result = 0
for index in range(len(form)):
if form[index] == "+":
result += int(form[index+1])
index += 1
elif form[index] == "-":
result -= int(form[index+1])
index += 1
elif form[index] == "*":
result *= int(form[index+1])
index += 1
elif form[index] == "/":
result //= int(form[index+1])
index += 1
else:
result += int(form[index])
return result
def countdown(numbers):
rightCombinations = []
finalScore = numbers.pop()
combinations = returnAllCombinations(len(numbers) - 1)
perms = list(permutations(numbers))
for combination in combinations:
for permut in perms:
formula = create_formula(combination,permut)
#form = re.split("([*+-/])",formula)
#if int(evaluate(form)) == int(finalScore):
if int(eval(formula)) == int(finalScore):
rightCombinations.append(formula)
return rightCombinations
def returnAllCombinations(size):
listFinal = []
for x in range(0,size):
if len(listFinal) == 0:
for y in range(0,4):
if y == 0:
listFinal.append("+")
elif y == 1:
listFinal.append("-")
elif y == 2:
listFinal.append("*")
else:
listFinal.append("/")
else:
newList = []
for l in listFinal:
for y in range(0,4):
newLine = list(l)
if y == 0:
newLine.append("+")
elif y == 1:
newLine.append("-")
elif y == 2:
newLine.append("*")
else:
newLine.append("/")
newList.append(newLine)
listFinal = list(newList)
return listFinal
out = open("output.txt",'w')
for line in open("input.txt",'r'):
for formula in countdown(line.split(" ")):
out.write(formula)
out.write("\n")
out.write("\n\n")
| F0lha/UJunior-Projects | DailyProgrammer/Challenge#318/src.py | Python | mit | 2,546 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Destination(Model):
"""Capture storage details for capture description.
:param name: Name for capture destination
:type name: str
:param storage_account_resource_id: Resource id of the storage account to
be used to create the blobs
:type storage_account_resource_id: str
:param blob_container: Blob container Name
:type blob_container: str
:param archive_name_format: Blob naming convention for archive, e.g.
{Namespace}/{EventHub}/{PartitionId}/{Year}/{Month}/{Day}/{Hour}/{Minute}/{Second}.
Here all the parameters (Namespace,EventHub .. etc) are mandatory
irrespective of order
:type archive_name_format: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'},
'blob_container': {'key': 'properties.blobContainer', 'type': 'str'},
'archive_name_format': {'key': 'properties.archiveNameFormat', 'type': 'str'},
}
def __init__(self, name=None, storage_account_resource_id=None, blob_container=None, archive_name_format=None):
self.name = name
self.storage_account_resource_id = storage_account_resource_id
self.blob_container = blob_container
self.archive_name_format = archive_name_format
| AutorestCI/azure-sdk-for-python | azure-mgmt-servicebus/azure/mgmt/servicebus/models/destination.py | Python | mit | 1,856 |
"""94. Binary Tree Inorder Traversal
https://leetcode.com/problems/binary-tree-inorder-traversal/
Given a binary tree, return the in-order traversal of its nodes' values.
Example:
Input: [1,null,2,3]
1
\
2
/
3
Output: [1,3,2]
Follow up: Recursive solution is trivial, could you do it iteratively?
"""
from typing import List
from common.tree_node import TreeNode
class Solution:
def iterative_inorder_traversal(self, root: TreeNode) -> List[int]:
"""
iterative traversal
"""
ans = []
stack = []
while root or stack:
if root:
stack.append(root)
root = root.left
else:
root = stack.pop()
ans.append(root.val)
root = root.right
return ans
def recursive_inorder_traversal(self, root: TreeNode) -> List[int]:
"""
recursive traversal, process left if needed, then val, at last right
"""
if not root:
return []
ans = []
ans += self.recursive_inorder_traversal(root.left)
ans.append(root.val)
ans += self.recursive_inorder_traversal(root.right)
return ans
| isudox/leetcode-solution | python-algorithm/leetcode/problem_94.py | Python | mit | 1,229 |
# -*- coding: utf-8 -*-
from django.core.cache import cache
from django.shortcuts import render
from django.http import Http404
from styleguide.utils import (Styleguide, STYLEGUIDE_DIR_NAME,
STYLEGUIDE_DEBUG, STYLEGUIDE_CACHE_NAME,
STYLEGUIDE_ACCESS)
def index(request, module_name=None, component_name=None):
if not STYLEGUIDE_ACCESS(request.user):
raise Http404()
styleguide = None
if not STYLEGUIDE_DEBUG:
styleguide = cache.get(STYLEGUIDE_CACHE_NAME)
if styleguide is None:
styleguide = Styleguide()
cache.set(STYLEGUIDE_CACHE_NAME, styleguide, None)
if module_name is not None:
styleguide.set_current_module(module_name)
context = {'styleguide': styleguide}
index_path = "%s/index.html" % STYLEGUIDE_DIR_NAME
return render(request, index_path, context)
| andrefarzat/django-styleguide | styleguide/views.py | Python | mit | 897 |
from quotes.models import Quote
from django.contrib import admin
class QuoteAdmin(admin.ModelAdmin):
list_display = ('message', 'name', 'program', 'class_of',
'submission_time')
admin.site.register(Quote, QuoteAdmin)
| k4rtik/alpo | quotes/admin.py | Python | mit | 243 |
from tkinter import *
import tkinter
import HoursParser
class UserInterface(tkinter.Frame):
def __init__(self, master):
self.master = master
self.events_list = []
# Set window size
master.minsize(width=800, height=600)
master.maxsize(width=800, height=600)
# File Parser
self.parser = HoursParser.FileParser()
# Filename Label
self.file_select_text = tkinter.StringVar()
self.file_select_text.set(" ")
# Initialize Widgets
super().__init__(master)
self.pack()
# Label for Application
self.title_label = LabelFrame(master, text="Technical Services - Scheduler (Alpha)")
self.title_label.pack(fill="both", expand="yes")
self.inner_label = Label(self.title_label, text="Choose Hours File")
self.inner_label.pack()
# Button for File Selection
self.file_select_button = Button(self.title_label, text="Select File", command=lambda: self.file_button_press())
self.file_select_button.pack()
# Label for File Selection Button
self.file_select_label = Label(self.title_label, textvariable=self.file_select_text)
self.file_select_label.pack()
# Button for Parsing File
self.file_parse_button = Button(self.title_label, state=DISABLED, text="Read File", command=lambda: self.parse_button_pressed())
self.file_parse_button.pack()
# List of Events
self.events_list_box = Listbox(self.title_label)
self.events_list_box.pack()
# Show Info Button
self.show_info_button = Button(self.title_label, state="disabled", command=lambda: self.show_event_info())
self.show_info_button.pack()
# Shows information about event
self.text_area = Text(self.title_label)
self.text_area.pack()
# Called when Select File button is pressed.
def file_button_press(self):
self.parser.choose_file()
self.file_select_text.set(self.parser.file_name)
if self.parser.file_name is not None:
self.file_parse_button['state'] = 'normal'
def parse_button_pressed(self):
self.events_list = self.parser.parse_file()
self.populate_list_box(self.events_list_box)
# Puts names of events in a list from parsed file.
def populate_list_box(self, list_box):
i = 0
for event in self.events_list:
list_box.insert(i, event.get_event_name())
i += 1
self.show_info_button['state'] = 'normal'
def show_event_info(self):
# Store Active Time Index
event_list_index = int(self.events_list_box.index(ACTIVE))
# Clear text box from previous event details
# if self.text_area.get(END) is not "\n":
# self.text_area.delete(0, 'end')
# Display Formatted Information about Event.
self.text_area.insert(END, "Event: " + self.events_list[event_list_index].get_event_name())
self.text_area.insert(END, '\n')
self.text_area.insert(END, "Location: " + self.events_list[event_list_index].get_event_location())
self.text_area.insert(END, '\n')
self.text_area.insert(END, "Start Time: " + self.events_list[event_list_index].get_event_start_time())
self.text_area.insert(END, '\n')
self.text_area.insert(END, "End Time: " + self.events_list[event_list_index].get_event_end_time())
self.text_area.insert(END, '\n')
self.text_area.insert(END, "# of Staff: " + self.events_list[event_list_index].get_event_number_employees())
self.text_area.insert(END, '\n')
root = Tk()
root.wm_title("Scheduler (Alpha)")
main_app = UserInterface(master=root)
main_app.mainloop()
| itsknob/TechnicalServicesScheduler-Python | interface.py | Python | mit | 3,856 |
required_states = ['accept', 'reject', 'init']
class TuringMachine(object):
def __init__(self, sigma, gamma, delta):
self.sigma = sigma
self.gamma = gamma
self.delta = delta
self.state = None
self.tape = None
self.head_position = None
return
def initialize(self, input_string):
for char in input_string:
assert char in self.sigma
self.tape = list(input_string)
self.state = 'init'
self.head_position = 0
return
def simulate_one_step(self, verbose=False):
if self.state in ['accept', 'reject']:
print "# %s " % self.state
cur_symbol = self.tape[self.head_position]
transition = self.delta[(self.state, cur_symbol)]
if verbose:
self.print_tape_contents()
template = "delta({q_old}, {s_old}) = ({q}, {s}, {arr})"
print(template.format(q_old=self.state,
s_old=cur_symbol,
q=transition[0],
s=transition[1],
arr=transition[2])
)
self.state = transition[0]
self.tape[self.head_position] = transition[1]
if(transition[2] == 'left'):
self.head_position = max(0, self.head_position - 1)
else:
assert(transition[2] == 'right')
if self.head_position == len(self.tape) - 1:
self.tape.append('#')
self.head_position +=1
if verbose:
self.print_tape_contents()
return
def print_tape_contents(self):
formatted = ''.join(char if i != self.head_position else '[%s]' % char
for i, char in enumerate(self.tape))
print(formatted)
def run(self, input_string, verbose=False):
self.initialize(input_string)
while self.state not in ['reject', 'accept']:
self.simulate_one_step(verbose)
return str(self.tape)
| ssanderson/turing.py | turing.py | Python | mit | 2,182 |
#!/usr/bin/python
import argparse
from os import path as os_path
import demo_project as demo
import traceback
def set_host_url_arg():
parser.add_argument('--host', required=True,
help='the url for the Materials Commons server')
def set_datapath_arg():
parser.add_argument('--datapath', required=True,
help='the path to the directory containing the files used by the build')
def set_apikey_arg():
parser.add_argument('--apikey', required=True, help='rapikey for the user building the demo project')
parser = argparse.ArgumentParser(description='Build Demo Project.')
set_host_url_arg()
set_datapath_arg()
set_apikey_arg()
args = parser.parse_args()
host = args.host
path = os_path.abspath(args.datapath)
key = args.apikey
# log_messages
# print "Running script to build demo project: "
# print " host = " + host + ", "
# print " key = " + key + ", "
# print " path = " + path
try:
builder = demo.DemoProject(host, path, key)
# a basic get request that makes no changes; will fail if there is a problem with the host or key
flag = builder.does_project_exist()
project = builder.build_project()
if flag:
print "Refreshed project with name = " + project.name
else:
print "Built project with name = " + project.name
except Exception as err:
traceback.print_exc()
print 'Error: ', err
| materials-commons/materialscommons.org | backend/scripts/demo-project/build_project.py | Python | mit | 1,400 |
# -*- coding: utf-8 -*-
# ProjectEuler/src/python/problem404.py
#
# Crisscross Ellipses
# ===================
# Published on Sunday, 2nd December 2012, 01:00 am
#
# Ea is an ellipse with an equation of the form x2 + 4y2 = 4a2. Ea' is the
# rotated image of Ea by θ degrees counterclockwise around the origin O(0, 0)
# for 0° θ 90°. b is the distance to the origin of the two intersection
# points closest to the origin and c is the distance of the two other
# intersection points. We call an ordered triplet (a, b, c) a canonical
# ellipsoidal triplet if a, b and c are positive integers. For example, (209,
# 247, 286) is a canonical ellipsoidal triplet. Let C(N) be the number of
# distinct canonical ellipsoidal triplets (a, b, c) for a N. It can be
# verified that C(103) = 7, C(104) = 106 and C(106) = 11845. Find C(1017).
import projecteuler as pe
def main():
pass
if __name__ == "__main__":
main()
| olduvaihand/ProjectEuler | src/python/problem404.py | Python | mit | 944 |
from pwn.internal.shellcode_helper import *
@shellcode_reqs(arch=['i386', 'amd64'], os=['linux', 'freebsd'])
def fork(parent, child = None, os = None, arch = None):
"""Fork this shit."""
if arch == 'i386':
if os in ['linux', 'freebsd']:
return _fork_i386(parent, child)
elif arch == 'amd64':
if os in ['linux', 'freebsd']:
return _fork_amd64(parent, child)
bug('OS/arch combination (%s, %s) was not supported for fork' % (os, arch))
def _fork_amd64(parent, child):
code = """
push SYS_fork
pop rax
syscall
test rax, rax
jne %s
""" % parent
if child is not None:
code += 'jmp %s\n' % child
return code
def _fork_i386(parent, child):
code = """
push SYS_fork
pop eax
int 0x80
test eax, eax
jne %s
""" % parent
if child is not None:
code += 'jmp %s\n' % child
return code
| Haabb/pwnfork | pwn/shellcode/misc/fork.py | Python | mit | 910 |
Subsets and Splits