content
stringlengths 5
1.05M
|
---|
import argparse
from torch.utils.data import DataLoader
from model import BERT
from trainer import BERTTrainer
from dataset import BERTDataset, WordVocab
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--train_dataset", required=True, type=str)
parser.add_argument("-t", "--test_dataset", type=str, default=None)
parser.add_argument("-v", "--vocab_path", required=True, type=str)
parser.add_argument("-o", "--output_dir", required=True, type=str)
parser.add_argument("-hs", "--hidden", type=int, default=256)
parser.add_argument("-n", "--layers", type=int, default=8)
parser.add_argument("-a", "--attn_heads", type=int, default=8)
parser.add_argument("-s", "--seq_len", type=int, default=20)
parser.add_argument("-b", "--batch_size", type=int, default=64)
parser.add_argument("-e", "--epochs", type=int, default=10)
parser.add_argument("-w", "--num_workers", type=int, default=5)
parser.add_argument("-cl", "--corpus_lines", type=int, default=None)
args = parser.parse_args()
print("Loading Vocab", args.vocab_path)
vocab = WordVocab.load_vocab(args.vocab_path)
print("Loading Train Dataset", args.train_dataset)
train_dataset = BERTDataset(args.train_dataset, vocab, seq_len=args.seq_len, corpus_lines=args.corpus_lines)
print("Loading Test Dataset", args.test_dataset)
test_dataset = BERTDataset(args.test_dataset, vocab, seq_len=args.seq_len) if args.test_dataset is not None else None
train_data_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.num_workers)
test_data_loader = DataLoader(test_dataset, batch_size=args.batch_size, num_workers=args.num_workers) \
if test_dataset is not None else None
bert = BERT(len(vocab), hidden=args.hidden, n_layers=args.layers, attn_heads=args.attn_heads)
trainer = BERTTrainer(bert, len(vocab), train_dataloader=train_data_loader, test_dataloader=test_data_loader)
for epoch in range(args.epochs):
trainer.train(epoch)
trainer.save(args.output_dir, epoch)
if test_data_loader is not None:
trainer.test(epoch)
|
# Creates an HTML file consisting of an interactive plot from Ontario Covid-19 database.
import pandas as pd
import numpy as np
import ssl
import bokeh.plotting as plt
from bokeh.models import LinearAxis, Range1d, HoverTool, SingleIntervalTicker
from scipy.signal import savgol_filter as sf
data_url = 'https://data.ontario.ca/dataset/f4f86e54-872d-43f8-8a86-3892fd3cb5e6/resource/ed270bb8-340b-41f9-a7c6-e8ef587e6d11/download/covidtesting.csv'
school_data_url = 'https://data.ontario.ca/dataset/b1fef838-8784-4338-8ef9-ae7cfd405b41/resource/7fbdbb48-d074-45d9-93cb-f7de58950418/download/schoolcovidsummary.csv'
ssl._create_default_https_context = ssl._create_unverified_context
data = pd.read_csv(data_url)
sch_data = pd.read_csv(school_data_url)
columns = list(data)
sc_columns = list(sch_data)
tot_cases = np.nan_to_num(np.array(data['Total Cases'])).astype(np.int64)
new_cases = [tot_cases[x] - tot_cases[x - 1] for x in range(2, len(tot_cases))]
new_sch_cases = np.array(sch_data[sc_columns[5]])
tot_tests = np.nan_to_num(np.array(data[columns[9]])).astype(np.int64)
dates = pd.to_datetime(data[columns[0]])[2:]
dates_num = np.arange(1, len(dates) - 1)
tot_deaths = np.nan_to_num(np.array(data['Deaths']).astype(np.int64))
new_deaths = [tot_deaths[x] - tot_deaths[x - 1] for x in range(2, len(tot_deaths))]
axis2 = np.nan_to_num(np.array(new_deaths)) # Change column selection here
axis3 = np.nan_to_num(np.array(data[columns[9]][2:]))
smoothened_y1 = sf(new_cases, window_length=31, polyorder=3)
# Creating first figure and setting parameters
fig = plt.figure(x_axis_type="datetime", sizing_mode='stretch_both')
ticker = SingleIntervalTicker(interval=5, num_minor_ticks=10)
fig.xaxis.axis_label = 'Date'
fig.y_range = Range1d(start=0, end=max(new_cases) * 1.1)
fig.yaxis.axis_label = 'New Daily Cases'
# Create second axis and add it to plot
fig.extra_y_ranges = {"axis2": Range1d(start=0, end=max(axis2) * 1.1)}
fig.add_layout(LinearAxis(y_range_name="axis2", axis_label='Total Deaths'), 'right')
source = plt.ColumnDataSource(data={
'x': dates,
'y1': new_cases,
'y2': axis2,
'y3': smoothened_y1
})
plot1 = fig.line(
x='x',
y='y1',
legend_label='New daily cases',
color='green',
source=source
)
fig.add_tools(HoverTool(renderers=[plot1], tooltips=[('Value', '@y1'),
('Date', '@x{%F}')], formatters={'@x': 'datetime'}, mode='vline'))
plot1_1 = fig.line(
x='x',
y='y3',
color='green',
source=source,
line_width=6,
line_alpha=0.5,
legend_label='Savitzky-Golay Filter Smoothened'
)
plot2 = fig.line(
x='x',
y='y2',
legend_label='New Deaths',
color='purple',
y_range_name='axis2',
source=source
)
fig.add_tools(HoverTool(renderers=[plot2], tooltips=[('Value', '@y2'),
('Date', '@x{%F}')], formatters={'@x': 'datetime'}, mode='vline'))
fig.toolbar.logo = None
fig.toolbar_location = 'above'
fig.legend.location = 'top_left'
fig.ygrid.minor_grid_line_color = 'grey'
fig.ygrid.minor_grid_line_alpha = 0.1
fig.xgrid.minor_grid_line_color = 'grey'
fig.xgrid.minor_grid_line_alpha = 0.1
plt.output_file('covid_ontario_visual.html')
plt.show(fig)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AmpeDeviceVO(object):
def __init__(self):
self._add_time = None
self._device_id = None
self._model_id = None
self._model_name = None
self._model_no = None
self._product_id = None
self._product_name = None
@property
def add_time(self):
return self._add_time
@add_time.setter
def add_time(self, value):
self._add_time = value
@property
def device_id(self):
return self._device_id
@device_id.setter
def device_id(self, value):
self._device_id = value
@property
def model_id(self):
return self._model_id
@model_id.setter
def model_id(self, value):
self._model_id = value
@property
def model_name(self):
return self._model_name
@model_name.setter
def model_name(self, value):
self._model_name = value
@property
def model_no(self):
return self._model_no
@model_no.setter
def model_no(self, value):
self._model_no = value
@property
def product_id(self):
return self._product_id
@product_id.setter
def product_id(self, value):
self._product_id = value
@property
def product_name(self):
return self._product_name
@product_name.setter
def product_name(self, value):
self._product_name = value
def to_alipay_dict(self):
params = dict()
if self.add_time:
if hasattr(self.add_time, 'to_alipay_dict'):
params['add_time'] = self.add_time.to_alipay_dict()
else:
params['add_time'] = self.add_time
if self.device_id:
if hasattr(self.device_id, 'to_alipay_dict'):
params['device_id'] = self.device_id.to_alipay_dict()
else:
params['device_id'] = self.device_id
if self.model_id:
if hasattr(self.model_id, 'to_alipay_dict'):
params['model_id'] = self.model_id.to_alipay_dict()
else:
params['model_id'] = self.model_id
if self.model_name:
if hasattr(self.model_name, 'to_alipay_dict'):
params['model_name'] = self.model_name.to_alipay_dict()
else:
params['model_name'] = self.model_name
if self.model_no:
if hasattr(self.model_no, 'to_alipay_dict'):
params['model_no'] = self.model_no.to_alipay_dict()
else:
params['model_no'] = self.model_no
if self.product_id:
if hasattr(self.product_id, 'to_alipay_dict'):
params['product_id'] = self.product_id.to_alipay_dict()
else:
params['product_id'] = self.product_id
if self.product_name:
if hasattr(self.product_name, 'to_alipay_dict'):
params['product_name'] = self.product_name.to_alipay_dict()
else:
params['product_name'] = self.product_name
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AmpeDeviceVO()
if 'add_time' in d:
o.add_time = d['add_time']
if 'device_id' in d:
o.device_id = d['device_id']
if 'model_id' in d:
o.model_id = d['model_id']
if 'model_name' in d:
o.model_name = d['model_name']
if 'model_no' in d:
o.model_no = d['model_no']
if 'product_id' in d:
o.product_id = d['product_id']
if 'product_name' in d:
o.product_name = d['product_name']
return o
|
from browser import document, bind
# Note: If you could avoid unnecessary import, your script will save several seconds loading time
# For example, use `"message".title()` instead of `import string; string.capwords("message")`
import datetime
from collections import Counter
from dateutil.easter import easter # Came from python-dateutil package
from charts.css import bar, column, line # Came from charts.css.py package
def easter_stat(starting_year):
easters = list(easter(year) for year in range(starting_year, starting_year + 100))
months = Counter(e.month for e in easters)
easters_per_month = sorted(months.items())
document["easters_per_month"].html = bar(
easters_per_month,
headers_in_first_column=True,
heading="How many Easters happen per month during the sampling period",
)
dates = Counter("{}-{:>2}".format(e.month, e.day) for e in easters)
easters_per_date = sorted(dates.items())
document["easters_per_date"].html = column(
easters_per_date,
headers_in_first_column=True,
heading="How many Easters happen per date during the sampling period",
)
year_by_year = [(e, (e - datetime.date(e.year, 1, 1)).days) for e in easters]
document["year_by_year"].html = line(
year_by_year,
heading="""How Easter day swings back and forth, year after year.
(Y-axis is the number of days between January 1st to Easter data of that year)""",
headers_in_first_column=True,
hide_label=lambda row_number, header: bool(row_number % 10), # Hide most labels
hide_data=True, # Otherwise it would be messy
tooltip_builder="{label}".format,
)
@bind("#trigger", "click")
def trigger(event):
easter_stat(int(document["year_starts_at"].value))
|
from typing import Union
class UInt160(bytes):
"""
Represents a 160-bit unsigned integer.
"""
def __init__(self, arg: Union[bytes, int] = 0):
super().__init__()
pass
class UInt256(bytes):
"""
Represents a 256-bit unsigned integer.
"""
def __init__(self, arg: Union[bytes, int] = 0):
super().__init__()
pass
class ECPoint(bytes):
"""
Represents a coordinate pair for elliptic curve cryptography (ECC) structures.
"""
def __init__(self, arg: bytes):
super().__init__()
pass
|
import json
import asyncio
import pytest
from model_mommy import mommy
from aiohttp import ws_connect, WSServerHandshakeError
from aiohttp.web import Application, MsgType
from rest_framework.authtoken.models import Token
from redis_pubsub.contrib.websockets import websocket, websocket_pubsub
from redis_pubsub.contrib.websockets.util import _clean_route
from testapp.models import Message
@pytest.mark.parametrize("route, expect", [
("/hello", "/hello/"),
("hello", "/hello/"),
("hello/world", "/hello/world/"),
("/hello/world/", "/hello/world/"),
])
def test_clean_route(route, expect):
route = _clean_route(route)
assert route == expect
def test_websocket_wrapper():
loop = asyncio.get_event_loop()
@websocket("/")
def handler(ws, params, **kwargs):
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
client = yield from ws_connect("http://localhost:9000")
message = yield from client.receive()
assert message.data == "hello, world!"
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_websocket_pubsub_wrapper(subscription):
loop = asyncio.get_event_loop()
@websocket_pubsub("/")
def handler(ws, params, **kwargs):
reader = subscription.get_reader(kwargs["manager"])
@reader.callback
def send_message(channel_name, model):
ws.send_str(model.name)
return False
listener = yield from reader.listen()
yield from asyncio.gather(listener)
@asyncio.coroutine
def pub():
yield from asyncio.sleep(1) # wait a second for the listener to start
return subscription.channel.publish(subscription.channel)
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
client = yield from ws_connect("http://localhost:9000")
yield from pub()
message = yield from client.receive()
assert message.data == subscription.channel.name
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
def test_websocket_wrapper_authentication_error():
loop = asyncio.get_event_loop()
@websocket("/", authenticate=True)
def handler(ws, params, **kwargs):
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
with pytest.raises(WSServerHandshakeError):
client = yield from ws_connect("http://localhost:9000")
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_websocket_wrapper_invalid_token_error():
loop = asyncio.get_event_loop()
@websocket("/", authenticate=True)
def handler(ws, params, **kwargs):
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
with pytest.raises(WSServerHandshakeError):
client = yield from ws_connect("http://localhost:9000?token=ooo")
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_websocket_wrapper_valid_token(subscription):
loop = asyncio.get_event_loop()
token, _ = Token.objects.get_or_create(user=subscription.subscriber)
token = token.key
@websocket("/", authenticate=True)
def handler(ws, params, **kwargs):
assert kwargs["user"].id == subscription.subscriber.id
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
uri = "http://localhost:9000?token=" + token
client = yield from ws_connect(uri)
message = yield from client.receive()
assert message.data == "hello, world!"
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
def test_websocket_pubsub_wrapper_authentication_error():
loop = asyncio.get_event_loop()
@websocket_pubsub("/", authenticate=True)
def handler(ws, params, **kwargs):
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
with pytest.raises(WSServerHandshakeError):
client = yield from ws_connect("http://localhost:9000")
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_websocket_pubsub_wrapper_invalid_token_error():
loop = asyncio.get_event_loop()
@websocket_pubsub("/", authenticate=True)
def handler(ws, params, **kwargs):
ws.send_str("hello, world!")
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
with pytest.raises(WSServerHandshakeError):
client = yield from ws_connect("http://localhost:9000?token=ooo")
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_websocket_pubsub_wrapper_valid_token(subscription):
loop = asyncio.get_event_loop()
token, _ = Token.objects.get_or_create(user=subscription.subscriber)
token = token.key
@websocket_pubsub("/", authenticate=True)
def handler(ws, params, **kwargs):
assert kwargs["user"].id == subscription.subscriber.id
reader = subscription.get_reader(kwargs["manager"])
@reader.callback
def send_message(channel_name, model):
ws.send_str(model.name)
return False
listener = yield from reader.listen()
yield from asyncio.gather(listener)
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*handler.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def pub():
yield from asyncio.sleep(1) # wait a second for the listener to start
return subscription.channel.publish(subscription.channel)
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
uri = "http://localhost:9000?token=" + token
client = yield from ws_connect(uri)
yield from pub()
message = yield from client.receive()
assert message.data == subscription.channel.name
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
@pytest.mark.django_db
def test_all_subscriptions(subscription):
loop = asyncio.get_event_loop()
token, _ = Token.objects.get_or_create(user=subscription.subscriber)
token = token.key
message = mommy.make(Message,
channel=subscription.channel,
to_user=subscription.subscriber)
@websocket_pubsub("/", authenticate=True)
def subscriptions(ws, params, user, manager):
def callback(channel_name, model):
ws.send_str(model.serialize())
return False
yield from manager.listen_to_all_subscriptions(user, callback)
while True:
message = yield from ws.receive()
if message.tp in (MsgType.error, MsgType.close):
break
@asyncio.coroutine
def start_server(loop):
app = Application()
app.router.add_route(*subscriptions.route)
srv = yield from loop.create_server(app.make_handler(), "localhost", 9000)
return srv
@asyncio.coroutine
def go(loop):
srv = yield from start_server(loop)
uri = "http://localhost:9000?token=" + token
client = yield from ws_connect(uri)
yield from asyncio.sleep(1)
message.save()
message_ = yield from client.receive()
data = json.loads(message_.data)
message.refresh_from_db()
assert data[0]["pk"] == message.pk
yield from client.close()
srv.close()
yield from srv.wait_closed()
loop.run_until_complete(go(loop))
|
from typing import Any
from pyspark.ml.clustering import KMeans
from pyspark.ml.evaluation import MulticlassClassificationEvaluator
from pyspark.sql import DataFrame
from sparksampling.core.job.base_job import BaseJob
import numpy as np
from pyspark.sql.types import DoubleType
from sparksampling.core.mlsamplinglib.func import vectorized_feature
import random
class KmeansEvaluationJob(BaseJob):
type_map = {
'source_path': str,
'selected_features_list': list,
'K': Any,
'round': int,
'key': str,
}
def __init__(self, source_path=None, selected_features_list=None, key=None, K=None, round=10, *args, **kwargs):
super(KmeansEvaluationJob, self).__init__(*args, **kwargs)
self.source_path = source_path
self.selected_features_list = selected_features_list
self.K = K
self.round = round
self.key = key
self.check_type()
def KM(self, dataset=None, K=2, predict=None, seed=np.random.randint(1, 65535)):
# Trains a k-means model.
kmeans = KMeans(seed=seed).setK(K).setFeaturesCol('features').setPredictionCol(
'prediction')
model = kmeans.fit(dataset)
# Make predictions
predictions = model.transform(predict)
# Shows the result.
centers = model.clusterCenters()
return centers, predictions
def evaluation_centers(self, centers, sample_centers):
def metric_corrcoef_matrix(corrcoef_matrix):
corrcoef = abs(corrcoef_matrix[0][1])
score = int(corrcoef * 100)
return score
result = []
for i in range(len(centers)):
mix = np.array([centers[i], sample_centers[i]])
corrcoef_matrix = np.corrcoef(mix)
result.append(metric_corrcoef_matrix(corrcoef_matrix))
return int(np.mean(result))
def evaluation_prediction(self, prediction, sample_prediction):
prediction = prediction.withColumnRenamed('prediction', 'label')
df = sample_prediction.join(prediction, ['features'])
df = df.select(['prediction', 'label']).withColumn("label", df.label.cast(DoubleType())).withColumn(
"prediction", df.prediction.cast(DoubleType()))
evaluator_accuracy = MulticlassClassificationEvaluator(predictionCol="prediction", metricName="accuracy",
labelCol='label')
return evaluator_accuracy.evaluate(df)
def _statistics(self, df: DataFrame, *args, **kwargs) -> dict:
if not self.K:
self.K = self.get_K_num_by_label(df)
source_df = self._get_df_from_source(self.source_path, dataio=kwargs.get('data_io')).select(
*self.selected_features_list)
df = df.select(*self.selected_features_list)
source_df = vectorized_feature(source_df)
df = vectorized_feature(df)
acc, cent, score = 0, 0, 0
for _ in range(self.round):
t_acc, t_cent = self.__kmeans_acc(source_df, df, *args, **kwargs)
t_acc_score = int((t_acc * 100) ** (1 / 2) * 10)
t_score = np.mean([t_acc_score, t_cent])
if t_score > score:
score, acc, cent = t_score, t_acc, t_cent
print(f"acc: {t_acc_score}, cent: {t_cent}, score:{t_score}")
return {
"score": score,
"accuracy": acc,
"centers_result": cent,
}
def __kmeans_acc(self, source_df: DataFrame, df: DataFrame, *args, **kwargs):
seed = np.random.randint(1, 65535)
centers, predictions = self.KM(source_df, self.K, df, seed=seed)
sample_centers, sample_predictions = self.KM(df, self.K, df, seed=seed)
# 计算中心的相关系数
centers_result = self.evaluation_centers(centers, sample_centers)
# 计算抽样中的准确率召回率
accuracy = self.evaluation_prediction(predictions, sample_predictions)
return accuracy, centers_result
def get_K_num_by_label(self, df: DataFrame):
if not self.key:
return 2
print(f"Get K by label col:{self.key}")
return df.select(self.key).distinct().count()
|
#!python
import click
# from prettytable import PrettyTable
import json
import requests
import os
from string import Template
import re
METAURL = 'https://api.softlayer.com/metadata/v3.1'
SERVICEMARKDOWN = """---
title: "$service"
description: "$documentation"
date: "2018-02-12"
tags:
- "$layoutType"
- "sldn"
- "$serviceType"
classes:
- "$mainService"
type: "reference"
layout: "$layoutType"
mainService : "$mainService"
---
"""
LISTMARKDOWN = """---
title: "$type"
description: "List of $type"
date: "2018-02-12"
type: reference
layout: $listType
url: /reference/$type/list.html
---
"""
def wikiToMarkdownFilter(text):
# the r'(\|[0-9A-Za-z_\'\(\) ]*)?' Regex is required (over r'(\|.*)?' ) because for some reason the smaller regex
# was causing the whole JSON string to be truncated.
# [[SoftLayer_Account]] -> reference/datatypes/SoftLayer_Account
text = re.sub(r'\[\[(?P<one>\w+)( \(type\))?(\|[0-9A-Za-z_\'\(\) ]*)?\]\]', '[\g<one>](/reference/datatypes/\g<one>)', text)
#text1 = re.sub(r'\[\[(?P<one>\w+)\]\]', '[\g<one>](reference/datatypes/\g<one>)', text)
# [[SoftLayer_Account/getObject]] -> reference/services/SoftLayer_Account/getObject
text = re.sub(r'\[\[(?P<one>\w+)\/(?P<two>\w+)(\|[0-9A-Za-z_\'\(\) ]*)?\]\]', "[\g<one>::\g<two>](/reference/services/\g<one>/\g<two>)", text)
# [[SoftLayer_Account::id]] -> reference/datatypes/SoftLayer_ACccount/#id
text = re.sub(r'\[\[(?P<one>\w+)::(?P<two>\w+)(\|[0-9A-Za-z_\'\(\) ]*)?\]\]', "[\g<one>::\g<two>](/reference/datatypes/$1/#$2)", text)
return text
def cleanupYaml(text):
# Remove double quotes, because hugo will complain about that.
text = text.replace('"', "'")
# Even if they are escaped.
text = text.replace('\\"', "'")
# Newlines need to end with 2 spaces and have another newline for the markdown to respect them.
# text = text.replace('\n\n', ' \n\n')
return text
class SLDNgenerator():
def __init__(self):
cwd = os.getcwd()
if not cwd.endswith('githubio_source'):
raise Exception(f"Working Directory should be githubio_source, is currently {cwd}")
# Make sure required directories exist
if not os.path.isdir(f'{cwd}/content/reference/datatypes'):
os.mkdir(f'{cwd}/content/reference/datatypes')
substitions = {
"type": "datatypes",
"listType": "datatypelist"
}
template = Template(LISTMARKDOWN)
with open(f"{cwd}/content/reference/datatypes/list.md", "w", encoding="utf-8") as f:
f.write(template.substitute(substitions))
if not os.path.isdir(f'{cwd}/content/reference/services'):
os.mkdir(f'{cwd}/content/reference/services')
substitions = {
"type": "services",
"listType": "servicelist"
}
template = Template(LISTMARKDOWN)
with open(f"{cwd}/content/reference/services/list.md", "w", encoding="utf-8") as f:
f.write(template.substitute(substitions))
self.metajson = None
def getMetadata(self, url):
response = requests.get(url)
if response.status_code != 200:
raise Exception(f"{url} returned \n{response.text}\nHTTP CODE: {response.status_code}")
self.metajson = response.json()
return self.metajson
def getLocalMetadata(self, filename='data/sldn_metadata.json'):
with open(filename, "r", encoding="utf-8") as f:
metadata = f.read()
self.metajson = json.loads(metadata)
return self.metajson
def saveMetadata(self, filename='data/sldn_metadata.json'):
print(f"Writing SLDN Metadata to {filename}")
with open(filename, 'w') as f:
json.dump(self.metajson, f, indent=4)
def generateMarkdown(self):
for serviceName, service in self.metajson.items():
print(f"Working on: {serviceName}")
# noservice means datatype only.
if service.get('noservice', False) == False:
self.writeServiceMarkdown(service)
for methodName, method in service.get('methods', {}).items():
self.writeMethodMarkdown(method, serviceName=serviceName)
self.writeDatatypeMarkdown(service)
def addInORMMethods(self):
for serviceName, service in self.metajson.items():
# noservice means datatype only.
if service.get('noservice', False) == False:
for propName, prop in service.get('properties', {}).items():
if prop.get('form', '') == 'relational':
# capitlize() sadly lowercases the other letters in the string
ormName = f"get{propName[0].upper()}{propName[1:]}"
ormMethod = {
'doc': prop.get('doc', ''),
'docOverview': "",
'name': ormName,
'type': prop.get('type'),
'typeArray': prop.get('typeArray', None),
'ormMethod': True,
'maskable': True,
'filterable': True,
'deprecated': prop.get('deprecated', False)
}
if ormMethod['typeArray']:
ormMethod['limitable'] = True
self.metajson[serviceName]['methods'][ormName] = ormMethod
return self.metajson
def addInChildMethods(self):
for serviceName, service in self.metajson.items():
self.metajson[serviceName]['methods'] = self.getBaseMethods(serviceName, 'methods')
self.metajson[serviceName]['properties'] = self.getBaseMethods(serviceName, 'properties')
def getBaseMethods(self, serviceName, objectType):
"""Responsible for pulling in properties or methods from the base class of the service requested"""
service = self.metajson[serviceName]
methods = service.get(objectType, {})
if service.get('base', "SoftLayer_Entity") != "SoftLayer_Entity":
baseMethods = self.getBaseMethods(service.get('base'), objectType)
for bName, bMethod in baseMethods.items():
if not methods.get(bName, False):
methods[bName] = bMethod
return methods
def writeServiceMarkdown(self, serviceJson):
service_dir = f"./content/reference/services/{serviceJson.get('name')}/"
if not (os.path.isdir(service_dir)):
os.mkdir(service_dir)
template = Template(SERVICEMARKDOWN)
# Needed to get the category of the service.
service_parts = serviceJson.get('name').split('_')
documentation = serviceJson.get('serviceDoc', '')
substitions = {
'service': serviceJson.get('name'),
'documentation': cleanupYaml(documentation),
'serviceType': service_parts[1],
'layoutType' : 'service',
'mainService': serviceJson.get('name')
}
with open(f"{service_dir}/_index.md", "w", encoding="utf-8") as f:
f.write(template.substitute(substitions))
def writeDatatypeMarkdown(self, serviceJson):
service_dir = f"./content/reference/datatypes/{serviceJson.get('name')}.md"
# if not (os.path.isdir(service_dir)):
# os.mkdir(service_dir)
template = Template(SERVICEMARKDOWN)
# Needed to get the category of the service.
service_parts = serviceJson.get('name').split('_')
# For datatypes, docs will either be in one of these 2 fields.
documentation = serviceJson.get('typeDoc', '')
if documentation == '':
documentation = serviceJson.get('serviceDoc', '')
substitions = {
'service': serviceJson.get('name'),
'documentation': cleanupYaml(documentation),
'serviceType': service_parts[1],
'layoutType' : 'datatype',
'mainService': serviceJson.get('name')
}
with open(f"{service_dir}", "w", encoding="utf-8") as f:
f.write(template.substitute(substitions))
def writeMethodMarkdown(self, serviceJson, serviceName):
service_dir = f"./content/reference/services/{serviceName}/"
if not (os.path.isdir(service_dir)):
os.mkdir(service_dir)
template = Template(SERVICEMARKDOWN)
# Needed to get the category of the service.
service_parts = serviceName.split('_')
documentation = serviceJson.get('doc', '')
if documentation == '':
documentation = serviceJson.get('docOverview', '')
substitions = {
'service': serviceJson.get('name'),
'documentation': cleanupYaml(documentation),
'serviceType': service_parts[1],
'layoutType' : 'method',
'mainService': serviceName
}
with open(f"{service_dir}/{serviceJson.get('name','')}.md", "w", encoding="utf-8") as f:
f.write(template.substitute(substitions))
@click.command()
@click.option('--download', default=False, is_flag=True)
def main(download):
generator = SLDNgenerator()
if download:
try:
metajson = generator.getMetadata(url = METAURL)
generator.saveMetadata()
except Exception as e:
print("========== ERROR ==========")
print(f"{e}")
print("========== ERROR ==========")
else:
metajson = generator.getLocalMetadata()
# fix mediaWiki links. So far its easiest just to regex the whole JSON string
jsonString = json.dumps(metajson)
jsonString = wikiToMarkdownFilter(jsonString)
# print(jsonString)
generator.metajson = json.loads(jsonString)
generator.addInChildMethods()
generator.addInORMMethods()
generator.saveMetadata()
print("Generating Markdown....")
# print(metajson)
generator.generateMarkdown()
if __name__ == "__main__":
main() |
import gzip
from io import BytesIO
try:
from jsmin import jsmin
from cssmin import *
except:
jsmin = None
cssmin = None
def gzip_content(headers, stream):
"Gzips a file"
headers['Content-Encoding'] = 'gzip'
buff = BytesIO()
gz = gzip.GzipFile(filename="tmp", fileobj=buff, mode='w')
gz.write(stream.read())
gz.close()
stream.close()
stream = BytesIO(buff.getvalue())
return headers, stream
def minify(filename, filetype, stream):
if ".min" in filename or ".pack" in filename:
return stream, 'already minified'
if filetype == 'application/javascript':
# Remove console statements
js = re.sub("console.\\w+\\(.*?\\);?", "", stream.read())
js = jsmin(js)
stream = BytesIO(js)
elif filetype == 'text/css':
css = cssmin(stream.read())
stream = BytesIO(css)
return stream, 'minified'
|
import django_filters
from django_filters import rest_framework as filters
from rest_framework import viewsets
from .models import Ilm, Jaam
from .serializers import IlmSerializer, JaamSerializer
class IlmFilter(filters.FilterSet):
# Võimaldab API päringuid: http://18.196.203.237:8000/api/i/?m=2&y=2013&d=24&h=12
# või http://18.196.203.237:8000/api/i/?y=2013
m = django_filters.NumberFilter(field_name='timestamp', lookup_expr='month')
y = django_filters.NumberFilter(field_name='timestamp', lookup_expr='year')
d = django_filters.NumberFilter(field_name='timestamp', lookup_expr='day')
h = django_filters.NumberFilter(field_name='timestamp', lookup_expr='hour')
class IlmViewSet(viewsets.ModelViewSet):
queryset = Ilm.objects.all().order_by('-timestamp')
serializer_class = IlmSerializer
# Järgnev vajalik, et saaks teha filtreeritud API päringuid
filter_backends = (filters.DjangoFilterBackend,)
filter_class = IlmFilter
class JaamViewSet(viewsets.ModelViewSet):
queryset = Jaam.objects.all()
serializer_class = JaamSerializer
|
import streamlit as st
import pandas as pd
import numpy as np
import functools
import fetch
import data
import error
def main():
if st.sidebar.button("Refresh Data"):
st.caching.clear_cache()
league_id = st.sidebar.text_input("League ID", value="309333")
try:
league_id = int(league_id)
except ValueError as exc:
st.sidebar.error(
f"Invalid League ID: {league_id}. Must be an integer."
)
raise st.StopException from exc
try:
league = data.H2HLeague.create(
functools.partial(fetch.get_league_json, league_id)
)
except Exception as exc:
st.error(f"Error obtaining league data: {exc}")
raise st.StopException from exc
st.title(f"FPL H2H Tool: {league.name}")
st.header("Current Standings")
st.dataframe(league.display_df)
st.dataframe(league.standings_df)
if __name__ == "__main__":
main()
|
import math
import os
from payton.scene import Scene
from payton.scene.geometry import Wavefront
scene = Scene()
scene.background.top_color = [0, 0, 0, 1]
scene.background.bottom_color = [0, 0, 0, 1]
amount = 0.5
total_angles = -30
def swing(period, total):
global total_angles, amount
scene.objects["lamp"].rotate_around_x(math.radians(amount))
total_angles += amount
if total_angles >= 30:
amount = -0.5
if total_angles <= -60:
amount = 0.5
light_pos = scene.objects["lamp"].to_absolute([0, 0, -3.4])
scene.lights[0].position = light_pos
table_file = os.path.join(os.path.dirname(__file__), "scene", "table.obj")
lamp_file = os.path.join(os.path.dirname(__file__), "scene", "lamp.obj")
table = Wavefront(table_file)
lamp = Wavefront(lamp_file)
lamp.fix_normals(reverse=True)
lamp.position = [0, 0, 12]
scene.create_clock("swing", 0.01, swing)
scene.active_camera.position = [
8.261520800759284,
8.259030103723475,
17.54799562339614,
]
scene.lights[0].position = [0, 0, 8.6]
scene.add_object("table", table)
scene.add_object("lamp", lamp)
scene.run(start_clocks=True)
|
import numpy as np
# Fuel is infinite, so an agent can learn to fly and then land on its first attempt.
# Action is two real values vector from -1 to +1. First controls main engine, -1..0 off, 0..+1 throttle from 50% to 100% power.
# Engine can't work with less than 50% power.
# Second value -1.0..-0.5 fire left engine, +0.5..+1.0 fire right engine, -0.5..0.5 off.
class Shield:
def __init__(self, thresholds_main_engine=0.9):
self.thresholds_main_engine = thresholds_main_engine
def shield_action(self, action):
action_main_engine = np.clip(action[0], -self.thresholds_main_engine,
self.thresholds_main_engine)
action = [action_main_engine, action[1]]
return action
if __name__ == '__main__':
shield = Shield(thresholds_main_engine=0.9)
a = np.array([0, 1])
action = shield_action(a)
print(action) |
from six.moves.urllib.parse import (
parse_qsl,
quote_plus,
unquote_plus,
urlencode,
urlparse,
urlunparse,
)
WEB2DBND_MAP = {"http": "dbnd", "https": "dbnd+s"}
DBND2WEB_MAP = {
"dbnd": "http",
"databand": "http",
"dbnd+s": "https",
"databand+s": "https",
}
def is_composite_uri(uri):
"""Check if the passed uri has a dbnd store uri schema
Parameters:
uri (str): uri to be checked
Returns:
bool: is the uri has a dbnd store uri schema
>>> is_composite_uri('dbnd://localhost:8080')
True
>>> is_composite_uri('dbnd+s://localhost:8080')
True
>>> is_composite_uri('databand://localhost:8080')
True
>>> is_composite_uri('databand+s://localhost:8080')
True
>>> is_composite_uri('http://localhost:8080')
False
"""
parsed_url = urlparse(uri)
return parsed_url.scheme in DBND2WEB_MAP
def build_composite_uri(dbnd_store_url, duplicate_tracking_to):
"""Returns dbnd store uri that contain uri to duplicate tracking data to.
E.g. dbnd://localhost:8080?duplicate_tracking_to=http%3A%2F%2Fmlflow-store%3A80%2F
Parameters:
dbnd_store_url (str): dbnd store url to send tracking data to
duplicate_tracking_to (str): mlflow store uri to duplicate tracking data to
Returns:
str: dbnd store composite uri to be used by MLFlow
>>> build_composite_uri('http://localhost:8080', 'http://mlflow-store:80/')
'dbnd://localhost:8080?duplicate_tracking_to=http%253A%252F%252Fmlflow-store%253A80%252F'
>>> build_composite_uri('https://localhost:8080', 'http://mlflow-store:80/')
'dbnd+s://localhost:8080?duplicate_tracking_to=http%253A%252F%252Fmlflow-store%253A80%252F'
>>> build_composite_uri('http://localhost:8080', '')
'dbnd://localhost:8080?duplicate_tracking_to='
>>> build_composite_uri('http://localhost:8080', None)
'dbnd://localhost:8080'
"""
parsed_url = urlparse(dbnd_store_url)
assert parsed_url.scheme in WEB2DBND_MAP
parsed_query_dict = dict(parse_qsl(parsed_url.query))
if duplicate_tracking_to is not None:
parsed_query_dict["duplicate_tracking_to"] = quote_plus(duplicate_tracking_to)
parsed_url = parsed_url._replace(scheme=WEB2DBND_MAP[parsed_url.scheme])
parsed_url = parsed_url._replace(query=urlencode(parsed_query_dict))
return urlunparse(parsed_url)
def parse_composite_uri(composite_uri):
"""Returns a tuple with a parsed dbnd store url and mlflow uri to duplicate tracking data to.
E.g. dbnd://localhost:8080?duplicate_tracking_to=http%3A%2F%2Fmlflow-store%3A80%2F
Parameters:
composite_uri (str): dbnd store uri with dbnd schema
Returns:
tuple: dbnd_store_url and duplicate_tracking_to url
>>> parse_composite_uri('dbnd://localhost:8080?duplicate_tracking_to=http%253A%252F%252Fmlflow-store%253A80%252F')
('http://localhost:8080', 'http://mlflow-store:80/')
>>> parse_composite_uri('dbnd+s://localhost:8080?duplicate_tracking_to=http%253A%252F%252Fmlflow-store%253A80%252F')
('https://localhost:8080', 'http://mlflow-store:80/')
>>> parse_composite_uri('dbnd://localhost:8080?duplicate_tracking_to=')
('http://localhost:8080', None)
>>> parse_composite_uri('dbnd+s://localhost:8080?duplicate_tracking_to=')
('https://localhost:8080', None)
>>> parse_composite_uri('databand://localhost:8080?duplicate_tracking_to=')
('http://localhost:8080', None)
>>> parse_composite_uri('databand+s://localhost:8080?duplicate_tracking_to=')
('https://localhost:8080', None)
>>> parse_composite_uri('dbnd://localhost:8080')
('http://localhost:8080', None)
"""
parsed_url = urlparse(composite_uri)
assert parsed_url.scheme in DBND2WEB_MAP
parsed_query_dict = dict(parse_qsl(parsed_url.query))
duplicate_tracking_to = parsed_query_dict.pop("duplicate_tracking_to", None)
if duplicate_tracking_to:
duplicate_tracking_to = unquote_plus(duplicate_tracking_to)
parsed_url = parsed_url._replace(scheme=DBND2WEB_MAP[parsed_url.scheme])
parsed_url = parsed_url._replace(query=urlencode(parsed_query_dict))
return urlunparse(parsed_url), duplicate_tracking_to
|
# Copyright 2022 ConvolutedDog (https://github.com/ConvolutedDog/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python3
import torch
import torchvision
from utils.utils_v2 import gradient_backward_v2
# __all__ is all the models we now support.
__all__ = ['resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152', 'resnext50_32x4d', 'resnext101_32x8d',
'wide_resnet50_2', 'wide_resnet101_2',
'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16',
'vgg16_bn', 'vgg19', 'vgg19_bn', 'alexnet']
# You can choose the model from __all__.
model = torchvision.models.alexnet()
print(model, end='\n')
# Alexnet can use (224, 224) pictures or (228, 228) pictures;
# VGG series network models can use (224, 224) or (228, 228) pictures;
# ResNet and ResNext series network models can use (228, 228) pictures.
# Choose the input shape (BacthSize x InputChannel x Height x Width).
img = torch.rand(2,3,228,228)
# You also can edit the label.
label = torch.Tensor([1 for i in range(2)]).long()
# g_view = True means means that the structure diagram of neural network model will be drawn.
# For details on the implementation of back propagation on each layer, refer to the source code
# in utils_v2.py.
dz_list, dw, db = gradient_backward_v2(model, img, label, num_class=1000, g_view=True)
|
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/casch/Dropbox/humanoid_robot/catkin_ws/src/pr2_simulator/pr2_gazebo_plugins/msg/PlugCommand.msg;/home/casch/Dropbox/humanoid_robot/catkin_ws/src/pr2_simulator/pr2_gazebo_plugins/msg/ModelJointsState.msg"
services_str = "/home/casch/Dropbox/humanoid_robot/catkin_ws/src/pr2_simulator/pr2_gazebo_plugins/srv/SetModelsJointsStates.srv"
pkg_name = "pr2_gazebo_plugins"
dependencies_str = "std_msgs;nav_msgs;sensor_msgs;pr2_msgs;geometry_msgs;diagnostic_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "pr2_gazebo_plugins;/home/casch/Dropbox/humanoid_robot/catkin_ws/src/pr2_simulator/pr2_gazebo_plugins/msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg;nav_msgs;/opt/ros/kinetic/share/nav_msgs/cmake/../msg;sensor_msgs;/opt/ros/kinetic/share/sensor_msgs/cmake/../msg;pr2_msgs;/opt/ros/kinetic/share/pr2_msgs/cmake/../msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg;diagnostic_msgs;/opt/ros/kinetic/share/diagnostic_msgs/cmake/../msg;actionlib_msgs;/opt/ros/kinetic/share/actionlib_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/home/casch/anaconda2/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
|
source_domain = None # set by user
target_domain = None # set by user
init_cfg = dict(type='studio')
# model settings
model = dict(
type='Pix2Pix',
generator=dict(
type='SAGANGenerator',
output_scale=128,
base_channels=64,
attention_cfg=dict(type='SelfAttentionBlock'),
attention_after_nth_block=4,
with_spectral_norm=True,
use_cbn=False,
# num_classes=1000,
init_cfg=init_cfg),
discriminator=dict(
type='ProjDiscriminator',
input_scale=128,
base_channels=64,
attention_cfg=dict(type='SelfAttentionBlock'),
attention_after_nth_block=1,
with_spectral_norm=True,
# use_cbn=False,
# num_classes=1000,
init_cfg=init_cfg),
gan_loss=dict(
type='GANLoss',
gan_type='hinge',
real_label_val=1.0,
fake_label_val=0.0,
loss_weight=1.0),
default_domain=target_domain,
reachable_domains=[target_domain],
related_domains=[target_domain, source_domain],
gen_auxiliary_loss=dict(
type='L1Loss',
loss_weight=100.0,
loss_name='pixel_loss',
data_info=dict(
pred=f'fake_{target_domain}', target=f'real_{target_domain}'),
reduction='mean'))
# model training and testing settings
train_cfg = dict(disc_steps=1)
test_cfg = None
|
"""
Created on 13/nov/2019 with PyCharm
INPI - Instituto Nacional da Propriedade Industrial
@author: Rafael Nunes - rafael.nunes@inpi.gov.br
@title: hackerrank - test_averybigsum.py
--------------------------------------------------------------------------------
*** Description of the module function ***
--------------------------------------------------------------------------------
"""
import unittest
import averybigsum
class MyTestCase(unittest.TestCase):
def setUp(self) -> None:
pass
def test_0(self):
self.assertEqual(5000000015, averybigsum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]))
def tearDown(self) -> None:
pass
if __name__ == '__main__':
unittest.main()
|
from django.apps import AppConfig
class CalendarioConfig(AppConfig):
name = 'frequencia.calendario'
|
import cv2 as cv
def gray_to_ascii(img):
'''convert grayscale image into ascii formated image'''
ascii_format = ['.', ',', '*', '+', '^', ';', '?', '%', '$', "#", '@']
ascii_draw = ''
i=0
for x in img.flatten():
i+=1
ascii_draw+= ascii_format[x//25]
if i%img.shape[1]==0:
ascii_draw+='\n'
print(ascii_draw)
def video_display(filepath):
"""converts a video to ascii and display both cmd text and original video"""
cap = cv.VideoCapture(filepath)
height = cap.get(cv.CAP_PROP_FRAME_HEIGHT)
width = cap.get(cv.CAP_PROP_FRAME_WIDTH)
aspect_ratio = 3 if width//height<=1 else width//height
new_height = 50
new_width = new_height*aspect_ratio
while cap.isOpened:
ret, frame = cap.read()
if not ret:
print("thanks for watching")
break
cv.imshow('display',frame)
frame = cv.resize(cv.cvtColor(frame, cv.COLOR_BGR2GRAY),(new_width, new_height))
gray_to_ascii(frame)
if cv.waitKey(1) == ord('q'):
break
cap.release()
cv.destroyAllWindows()
def main():
video_display('assets//badapple.mp4')
if __name__ == "__main__":
main() |
"""
Utilities
---------
Utility functions for general operations and plotting.
Contents:
normalize,
gen_list_of_lists,
gen_faction_groups,
gen_parl_points,
swap_parl_allocations,
hex_to_rgb,
rgb_to_hex,
scale_saturation
"""
import colorsys
import numpy as np
import pandas as pd
from colormath.color_objects import sRGBColor
def normalize(vals):
"""Returns respective normalized values."""
total_vals = sum(vals)
return [1.0 * v / total_vals for v in vals]
def gen_list_of_lists(original_list, new_structure):
"""Generates a list of lists with a given structure from a given list."""
assert len(original_list) == sum(
new_structure
), "The number of elements in the original list and desired structure don't match."
return [
[original_list[i + sum(new_structure[:j])] for i in range(new_structure[j])]
for j in range(len(new_structure))
]
def gen_faction_groups(original_list, factions_indexes):
"""
Reorders a list into a list of lists where sublists are faction amounts.
Parameters
----------
original_list : list
The data to be reorganized.
factions_indexes : list of lists (contains ints)
The structure of original_list indexes to output.
Returns
-------
factioned_list : list of lists
The values of original_list ordered as the indexes of factions_indexes.
"""
factions_structure = [len(sublist) for sublist in factions_indexes]
flat_indexes = [item for sublist in factions_indexes for item in sublist]
ordered_original_list = [original_list[i] for i in flat_indexes]
return gen_list_of_lists(ordered_original_list, factions_structure)
def gen_parl_points(
allocations, labels=None, style="semicircle", num_rows=2, speaker=False
):
"""
Produces a df with coordinates for a parliament plot.
Parameters
----------
allocations : list
The share of seats given to the regions or parties.
labels : list : optional (default=None)
The names of the groups.
style : str (default=semicircle)
Whether to plot the parliament as a semicircle or a rectangle.
num_rows : int (default=2)
The number of rows in the plot.
speaker : bool : optional (default=False)
Whether to include a point for the speaker of the house colored by their group.
Note: 'True' colors the point based on the largest group, but passing a name from 'labels' is also possible.
Returns
-------
df_seat_lctns : pd.DataFrame
A dataframe with points to be converted to a parliament plot via seaborn's scatterplot.
"""
assert style in [
"semicircle",
"rectangle",
], "Please choose one of semicircle or rectangle for the plotting style."
total_seats = sum(allocations)
if not labels:
# For dataframe assignment.
labels = [f"group_{i}" for i in range(len(allocations))]
if speaker:
assert (speaker == True) or (
speaker in labels
), "Either the 'speaker' argument must be true, or must match an element from the provided 'labels' argument."
total_seats -= 1
allocations = list(allocations)
if speaker == True:
assert (
len([c for c in allocations if c == max(allocations)]) == 1
), "Two parties got the highest number of seats in the allocation. Please assign the speaker via passing one of their names."
largest_group_index = allocations.index(max(allocations))
allocations[largest_group_index] -= 1
# Reassign 'speaker' to the largest group's name so it can be assigned later.
speaker = labels[largest_group_index]
elif speaker in labels:
largest_group_index = labels.index(speaker)
allocations[largest_group_index] -= 1
# Make an empty dataframe and fill it with coordinates for the structure.
# Then assign group values for allocation based on the rows.
df_seat_lctns = pd.DataFrame(
columns=["group", "row", "row_position", "x_loc", "y_loc"]
)
if style == "semicircle":
def arc_coordinates(r, seats):
"""
Generates an arc of the parliament plot given a radius and the number of seats.
"""
angles = np.linspace(start=np.pi, stop=0, num=seats)
x_coordinates, y_coordinates = [], []
# Broadcast angles to their corresponding coordinates.
x_coordinates = list(r * np.cos(angles))
y_coordinates = list(r * np.sin(angles))
return x_coordinates, y_coordinates
xs, ys = [], []
radii = range(2, 2 + num_rows)
row_seats = [int(total_seats / num_rows)] * num_rows
extra_seat = total_seats - sum(
row_seats
) # 0 or 1 based on whether the seats divide evenly into the rows
row_seats[-1] += extra_seat
# Shift the seats per row such that it's always increasing.
if num_rows % 2 != 0:
seats_shift = list(range(-int(num_rows / 2), int(num_rows / 2) + 1, 1))
else:
positive_shift = list(range(1, int(num_rows / 2) + 1, 1))
negative_shift = [-1 * i for i in positive_shift[::-1]]
seats_shift = negative_shift + positive_shift
seats_shift = [
i * int(num_rows / 2) for i in seats_shift
] # greater shift for higher rows for more equal spacing
seats_per_row = [rs + seats_shift[i] for i, rs in enumerate(row_seats)]
row_indexes = []
row_position_indexes = []
for i, spr in enumerate(seats_per_row):
arc_xs, arc_ys = arc_coordinates(radii[i], spr)
xs += arc_xs
ys += arc_ys
row_indexes += [i] * spr
# List of lists for position indexes such that they can be accessed by row and position.
row_position_indexes += [list(range(spr))]
for i in range(total_seats):
df_seat_lctns.loc[i, "x_loc"] = xs[i]
df_seat_lctns.loc[i, "y_loc"] = ys[i]
df_seat_lctns["row"] = row_indexes
df_seat_lctns["row_position"] = [
item for sublist in row_position_indexes for item in sublist
]
# Index the group and deplete a copy of allocations at its location.
group_index = 0
seats_to_allocate = allocations.copy()
row_index = 0
while total_seats > 0:
# Assign based on row and the current index within that row.
if row_position_indexes[row_index] != []:
index_to_assign = [
i
for i in df_seat_lctns.index
if df_seat_lctns.loc[i, "row"] == row_index
and df_seat_lctns.loc[i, "row_position"]
== row_position_indexes[row_index][0]
][0]
df_seat_lctns.loc[index_to_assign, "group"] = labels[group_index]
total_seats -= 1
seats_to_allocate[group_index] -= 1
if seats_to_allocate[group_index] == 0:
group_index += 1
row_position_indexes[row_index].pop(0)
row_index += 1
if row_index == num_rows:
row_index = 0
# Make sure that radii are filled before rows are completed.
for i in range(num_rows):
if len(row_position_indexes[i]) < i + 2:
if i != 0:
row_index -= 1
else:
row_index = num_rows - 1
else:
while row_position_indexes[row_index] == []:
row_index += 1
elif style == "rectangle":
x_coordinate = 0
# y_coordinates are split by baseline of 2 units, with double that for
# the middle aisle.
equa_distant_indexes = list(range(0, num_rows * 2, 2))
y_coordinates = [
i
if (
equa_distant_indexes.index(i) < int(len(equa_distant_indexes) / 2)
and len(equa_distant_indexes) % 2 == 0
)
or (
equa_distant_indexes.index(i) < int(len(equa_distant_indexes) / 2) + 1
and len(equa_distant_indexes) % 2 != 0
)
else i + 2
for i in equa_distant_indexes
]
if num_rows == 1:
for i in range(total_seats):
df_seat_lctns.loc[i, "x_loc"] = x_coordinate
df_seat_lctns.loc[i, "y_loc"] = 0
x_coordinate += 2
df_seat_lctns["row"] = [0] * len(df_seat_lctns)
list_of_name_lists = [[labels[i]] * a for i, a in enumerate(allocations)]
df_seat_lctns["group"] = [
item for sublist in list_of_name_lists for item in sublist
]
else:
row_index = 0
position_index = 0
row_seats_no_remainder = int(total_seats / num_rows) * num_rows
for i in range(row_seats_no_remainder):
y_coordinate = y_coordinates[row_index]
df_seat_lctns.loc[i, "row"] = row_index
df_seat_lctns.loc[i, "row_position"] = position_index
df_seat_lctns.loc[i, "x_loc"] = x_coordinate
df_seat_lctns.loc[i, "y_loc"] = y_coordinate
x_coordinate += 2
position_index += 1
# Reset to the start of the next row.
if (i + 1) % int(total_seats / num_rows) == 0:
row_index += 1
x_coordinate = 0
position_index = 0
# Add last seats that were rounded off.
max_x = max(df_seat_lctns["x_loc"])
max_pos = max(df_seat_lctns["x_loc"])
row_index = 0 # reset to first row
for i in list(range(total_seats))[row_seats_no_remainder:]:
y_coordinate = y_coordinates[row_index]
df_seat_lctns.loc[i, "row"] = row_index
df_seat_lctns.loc[i, "row_position"] = max_pos + 1
df_seat_lctns.loc[i, "x_loc"] = max_x + 2
df_seat_lctns.loc[i, "y_loc"] = y_coordinate
row_index += 1
# Sort df for index based assignment.
df_seat_lctns.sort_values(
["row", "x_loc", "y_loc"], ascending=[True, True, True], inplace=True
)
df_seat_lctns.reset_index(inplace=True, drop=True)
# Define the top and bottom rows so they can be filled in order.
top_rows = y_coordinates[int((len(y_coordinates) + 1) / 2) :]
bottom_rows = y_coordinates[: int((len(y_coordinates) + 1) / 2)]
# Find the total seats in each section to be depleated.
total_top_seats = 0
for row in top_rows:
total_top_seats += len(df_seat_lctns[df_seat_lctns["y_loc"] == row])
total_bottom_seats = 0
for row in bottom_rows:
total_bottom_seats += len(df_seat_lctns[df_seat_lctns["y_loc"] == row])
# Index the group and deplete a copy of allocations at its location.
group_index = 0
seats_to_allocate = allocations.copy()
# Top assignment from low to high and left to right.
top_x = 0
top_y = top_rows[0]
while total_top_seats > 0:
index_to_assign = [
i
for i in df_seat_lctns.index
if df_seat_lctns.loc[i, "x_loc"] == top_x
and df_seat_lctns.loc[i, "y_loc"] == top_y
][0]
df_seat_lctns.loc[index_to_assign, "group"] = labels[group_index]
seats_to_allocate[group_index] -= 1
if seats_to_allocate[group_index] == 0:
group_index += 1
if top_y == top_rows[-1]:
# Move right and reset vertical.
top_x += 2
top_y = top_rows[0]
else:
# Move up.
top_y += 2
total_top_seats -= 1
# Bottom assignment from high to low and right to left.
bottom_x = max(df_seat_lctns["x_loc"])
bottom_y = bottom_rows[-1]
# Fix initial position in case of unequal seats per row.
while (
len(
[
i
for i in df_seat_lctns.index
if df_seat_lctns.loc[i, "x_loc"] == bottom_x
and df_seat_lctns.loc[i, "y_loc"] == bottom_y
]
)
== 0
):
# Move down.
bottom_y -= 2
while total_bottom_seats > 0:
index_to_assign = [
i
for i in df_seat_lctns.index
if df_seat_lctns.loc[i, "x_loc"] == bottom_x
and df_seat_lctns.loc[i, "y_loc"] == bottom_y
][0]
df_seat_lctns.loc[index_to_assign, "group"] = labels[group_index]
seats_to_allocate[group_index] -= 1
if seats_to_allocate[group_index] == 0:
group_index += 1
if bottom_y == bottom_rows[0]:
# Move left and reset vertical
bottom_x -= 2
bottom_y = bottom_rows[-1]
else:
# Move down.
bottom_y -= 2
total_bottom_seats -= 1
else:
ValueError("The 'style' argument must be either 'semicircle' or 'rectangle'")
if speaker:
index_to_assign = len(df_seat_lctns)
if style == "semicircle":
df_seat_lctns.loc[index_to_assign, "x_loc"] = 0
df_seat_lctns.loc[index_to_assign, "y_loc"] = 0
df_seat_lctns.loc[index_to_assign, "group"] = speaker
elif style == "rectangle":
if len(y_coordinates) % 2 == 0:
middle_index_1 = len(y_coordinates) / 2 - 1
middle_index_2 = len(y_coordinates) / 2
y_coordinate = (
y_coordinates[int(middle_index_1)]
+ y_coordinates[int(middle_index_2)]
) / 2
else:
middle_index = int(len(y_coordinates) / 2)
y_coordinate = float(y_coordinates[middle_index] + 2)
df_seat_lctns.loc[index_to_assign, "x_loc"] = 0
df_seat_lctns.loc[index_to_assign, "y_loc"] = y_coordinate
df_seat_lctns.loc[index_to_assign, "group"] = speaker
return df_seat_lctns
def swap_parl_allocations(df, row_0, pos_0, row_1, pos_1):
"""
Replaces two allocations of the parliament plot df to clean up coloration.
Parameters
----------
row_0 : int
The row of one seat to swap.
pos_0 : int
The position in the row of one seat to swap.
row_1 : int
The row of the other seat to swap.
pos_1 : int
The position in the row of the other seat to swap.
Returns
-------
df_seat_lctns : pd.DataFrame
A parliament plot allocations data frame with two allocations swapped
"""
allocation_0 = df[(df["row"] == row_0) & (df["row_position"] == pos_0)][
"group"
].values[0]
index_1 = df[(df["row"] == row_0) & (df["row_position"] == pos_0)].index
allocation_1 = df[(df["row"] == row_1) & (df["row_position"] == pos_1)][
"group"
].values[0]
index_2 = df[(df["row"] == row_1) & (df["row_position"] == pos_1)].index
df.loc[index_1, "group"] = allocation_1
df.loc[index_2, "group"] = allocation_0
def hex_to_rgb(hex_rep):
"""
Converts a hexadecimal representation to its RGB ratios.
Parameters
----------
hex_rep : str
The hex representation of the color.
Returns
-------
rgb_trip : tuple
An RGB tuple color representation.
"""
return sRGBColor(
*[int(hex_rep[i + 1 : i + 3], 16) for i in (0, 2, 4)], is_upscaled=True
)
def rgb_to_hex(rgb_trip):
"""
Converts rgb ratios to their hexadecimal representation.
Parameters
----------
rgb_trip : tuple
An RGB tuple color representation.
Returns
-------
hex_rep : str
The hex representation of the color.
"""
trip_0, trip_1, trip_2 = rgb_trip[0], rgb_trip[1], rgb_trip[2]
if isinstance(trip_0, (float, np.float64)):
trip_0 *= 255
trip_1 *= 255
trip_2 *= 255
return "#%02x%02x%02x" % (int(trip_0), int(trip_1), int(trip_2))
def scale_saturation(rgb_trip, sat):
"""
Changes the saturation of an rgb color.
Parameters
----------
rgb_trip : tuple
An RGB tuple color representation.
sat : float
The saturation it rgb_trip should be modified by.
Returns
-------
saturated_rgb : tuple
colorsys.hls_to_rgb saturation of the given color.
"""
if (isinstance(rgb_trip, str)) and (len(rgb_trip) == 9) and (rgb_trip[-2:] == "00"):
# An RGBA has been provided and its alpha is 00, so return it for
# a transparent marker.
return rgb_trip
if (isinstance(rgb_trip, str)) and (len(rgb_trip) == 7):
rgb_trip = hex_to_rgb(rgb_trip)
if isinstance(rgb_trip, sRGBColor):
rgb_trip = rgb_trip.get_value_tuple()
h, l, s = colorsys.rgb_to_hls(*rgb_trip)
return colorsys.hls_to_rgb(h, min(1, l * sat), s=s)
|
from hopex.client.trade import TradeClient
from hopex.constant.test import t_api_key, t_secret_key
from hopex.utils.log_info import LogInfo
trade_client = TradeClient(api_key=t_api_key, secret_key=t_secret_key)
"""
The condition orders information
:params
contract_code_list: Contract List, Being blank to search all contracts
task_type_list: 1:Buy Long, 2:Sell Short, 3:Buy to Close Short, 4:Sell to Close Long, Being blank to search all
trig_type_list: 1:Market Price 2:Faire Price,Being blank to search all
task_status_list: 1: Untriggered 2.Canceled 3.Order Submitted 4.Trigger failed, Being blank to search all
direct: 1 LONG,2 SHORT,0:search all
side: 1:Sell 2Buy,0:search all
start_time: 0:search all,Start Time Stamp(Unit microsecond)
end_time: 0:search all,End Time Stamp(Unit microsecond)
"""
# case query condition orders all
contract_code_list = ['BTCUSDT', 'ETHUSDT']
task_type_list = []
trig_type_list = []
task_status_list = []
direct = 0
side = 0
start_time = 0
end_time = 0
list_obj = trade_client.req_condition_orders(contract_code_list=contract_code_list, task_type_list=task_type_list,
trig_type_list=trig_type_list,
task_status_list=task_status_list, direct=direct, side=side,
start_time=start_time,
end_time=end_time)
LogInfo.output_list(list_obj, "==query condition orders all==")
|
n=int(input())
a='abcdefghijklmnopqrstuvwxyz'
pad=n+n-1+n+1
res=[]
for i in range(n):
p1=a[i:n]
p2=a[i+1:n][::-1]
a1='-'.join(map(str,list(p1)))
a2='-'.join(map(str,list(p2)))
print(a1,a2)
if a2=='':
req=a1
else:
req=a2+'-'+a1
res.append(req.center(pad,'-'))
res=res[1:][::-1]+res[:]
print(*res,sep='\n')
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "AWS Firewall Manager"
prefix = "fms"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
AssociateAdminAccount = Action("AssociateAdminAccount")
DeleteAppsList = Action("DeleteAppsList")
DeleteNotificationChannel = Action("DeleteNotificationChannel")
DeletePolicy = Action("DeletePolicy")
DeleteProtocolsList = Action("DeleteProtocolsList")
DisassociateAdminAccount = Action("DisassociateAdminAccount")
GetAdminAccount = Action("GetAdminAccount")
GetAppsList = Action("GetAppsList")
GetComplianceDetail = Action("GetComplianceDetail")
GetNotificationChannel = Action("GetNotificationChannel")
GetPolicy = Action("GetPolicy")
GetProtectionStatus = Action("GetProtectionStatus")
GetProtocolsList = Action("GetProtocolsList")
GetViolationDetails = Action("GetViolationDetails")
ListAppsLists = Action("ListAppsLists")
ListComplianceStatus = Action("ListComplianceStatus")
ListMemberAccounts = Action("ListMemberAccounts")
ListPolicies = Action("ListPolicies")
ListProtocolsLists = Action("ListProtocolsLists")
ListTagsForResource = Action("ListTagsForResource")
PutAppsList = Action("PutAppsList")
PutNotificationChannel = Action("PutNotificationChannel")
PutPolicy = Action("PutPolicy")
PutProtocolsList = Action("PutProtocolsList")
TagResource = Action("TagResource")
UntagResource = Action("UntagResource")
|
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
import torch
import numpy as np
from collections import OrderedDict
import re
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from collections import OrderedDict
__all__ = ['DPN', 'dpn92', 'dpn98', 'dpn131', 'dpn107', 'dpns', "SimpleNet",
'se_resnet18', 'se_resnet34', 'se_resnet50', 'se_resnet101', 'se_resnet152',
"densenet121", "densenet169", "densenet201", "densenet161"]
class SimpleNet(nn.Module):
def __init__(self):
super(SimpleNet, self).__init__()
# Conv2d
# CLASS torch.nn.Conv2d(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True)
self.conv1 = nn.Conv2d(3, 32, kernel_size=10, stride=3)
self.bn1 = nn.BatchNorm2d(32)
self.conv2 = nn.Conv2d(32, 64, kernel_size=5, stride=2)
self.bn2 = nn.BatchNorm2d(64)
# self.conv3 = nn.Conv2d(64, 128, kernel_size=3, stride=1)
# self.bn3 = nn.BatchNorm2d(128)
self.pool = nn.MaxPool2d(2, stride=1)
self.fc1 = nn.Linear(135424, 1000)
self.fc2 = nn.Linear(1000, 2)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x = self.relu(self.bn1(self.conv1(x)))
x = self.pool(self.relu(self.bn2(self.conv2(x))))
# x = self.pool(self.relu(self.bn3(self.conv3(x))))
x = x.view(x.size(0), -1)
x = self.relu(self.fc1(x))
# x = self.relu(self.fc2(x))
x = self.fc2(x)
return x
# Shake-shake implementation from https://github.com/owruby/shake-shake_pytorch/blob/master/models/shakeshake.py
class ShakeShake(torch.autograd.Function):
@staticmethod
def forward(ctx, x1, x2, training=True):
if training:
alpha = torch.FloatTensor(x1.size(0)).uniform_().to("cuda:1")
alpha = alpha.view(alpha.size(0), 1, 1, 1).expand_as(x1)
else:
alpha = 0.5
return alpha * x1 + (1 - alpha) * x2
@staticmethod
def backward(ctx, grad_output):
beta = torch.FloatTensor(grad_output.size(0)).uniform_().to("cuda:1")
beta = beta.view(beta.size(0), 1, 1, 1).expand_as(grad_output)
# beta = Variable(beta)
return beta * grad_output, (1 - beta) * grad_output, None
# SENet
# https://github.com/moskomule/senet.pytorch/blob/master/senet/se_resnet.py
# from torchvision.models import ResNet
class SELayer(nn.Module):
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, int(channel // reduction), bias=False),
nn.ReLU(inplace=True),
nn.Linear(int(channel // reduction), channel, bias=False),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y)
y = y.view(b, c, 1, 1)
return x * y.expand_as(x)
def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
class SEBasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, reduction=16, shake_shake=False, device="cuda:0"):
super(SEBasicBlock, self).__init__()
self.relu = nn.ReLU(inplace=True)
self.se = SELayer(planes, reduction)
self.downsample = downsample
self.stride = stride
self.reduction = reduction
self.device=device
self.shake_shake = shake_shake
# bn - 3*3 conv - bn - relu - dropout - 3*3 conv - bn - add
# https://arxiv.org/pdf/1610.02915.pdf
self.bn1 = nn.BatchNorm2d(inplanes)
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn2 = nn.BatchNorm2d(planes)
self.drop = nn.Dropout2d(p=0.3)
self.conv2 = conv3x3(planes, planes, 1)
self.bn3 = nn.BatchNorm2d(planes)
if shake_shake:
self.branch1 = self._make_branch(inplanes, planes, stride)
self.branch2 = self._make_branch(inplanes, planes, stride)
def _make_branch(self, inplanes, planes, stride=1):
# bn - 3*3 conv - bn - relu - dropout - 3*3 conv - bn - add
return nn.Sequential(
nn.BatchNorm2d(inplanes),
conv3x3(inplanes, planes, stride),
nn.BatchNorm2d(planes),
nn.ReLU(inplace=False),
nn.Dropout2d(p=0.3),
conv3x3(planes, planes, stride),
nn.BatchNorm2d(planes),
SELayer(planes, self.reduction))
def forward(self, x):
residual = x
if not self.shake_shake:
# bn - 3*3 conv - bn - relu - dropout - 3*3 conv - bn - add
out = self.bn1(x)
out = self.conv1(out)
out = self.bn2(out)
out = self.relu(out)
out = self.drop(out)
out = self.conv2(out)
out = self.bn3(out)
out = self.se(out)
#######
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
elif self.shake_shake:
h1 = self.branch1(x)
h2 = self.branch2(x)
out = ShakeShake.apply(h1, h2, self.training)
assert h1.size() == out.size()
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class SEBottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, reduction=16, shake_shake=False):
super(SEBottleneck, self).__init__()
self.relu = nn.ReLU(inplace=True)
self.se = SELayer(planes * 4, reduction)
self.downsample = downsample
self.stride = stride
# bn - 1*1conv - bn - relu - 3*3conv - bn - relu - 1*1conv - bn
self.bn1 = nn.BatchNorm2d(inplanes)
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride=stride)
self.bn3 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn4 = nn.BatchNorm2d(planes * 4)
def forward(self, x):
residual = x
# bn - 1*1conv - bn - relu - 3*3conv - bn - relu - 1*1conv - bn
# This architecture is proposed in Deep Pyramidal Residual Networks.
out = self.bn1(x)
out = self.conv1(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn3(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn4(out)
out = self.se(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
def se_resnet18(num_classes, if_mixup=False, if_shake_shake=False, first_conv_stride=2, first_pool=True):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(SEBasicBlock, [2, 2, 2, 2], num_classes=num_classes, mixup_hidden=if_mixup, shake_shake=if_shake_shake,
first_conv_stride=first_conv_stride, first_pool=first_pool)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model
def se_resnet34(num_classes, if_mixup=False, if_shake_shake=False, first_conv_stride=2, first_pool=True):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(SEBasicBlock, [3, 4, 6, 3], num_classes=num_classes, mixup_hidden=if_mixup, shake_shake=if_shake_shake,
first_conv_stride=first_conv_stride, first_pool=first_pool)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model
def se_resnet50(num_classes, if_mixup=False, if_shake_shake=False, first_conv_stride=2, first_pool=True):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(SEBottleneck, [3, 4, 6, 3], num_classes=num_classes, mixup_hidden=if_mixup, shake_shake=if_shake_shake,
first_conv_stride=first_conv_stride, first_pool=first_pool)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model
def se_resnet101(num_classes, if_mixup=False, if_shake_shake=False, first_conv_stride=2, first_pool=True):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(SEBottleneck, [3, 4, 23, 3],num_classes=num_classes, mixup_hidden=if_mixup, shake_shake=if_shake_shake,
first_conv_stride=first_conv_stride, first_pool=first_pool)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model
def se_resnet152(num_classes, if_mixup=False, if_shake_shake=False, first_conv_stride=2, first_pool=True):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(SEBottleneck, [3, 8, 36, 3], num_classes=num_classes, mixup_hidden=if_mixup, shake_shake=if_shake_shake)
model.avgpool = nn.AdaptiveAvgPool2d(1)
return model
class ResNet(nn.Module):
# This ResNet does Manifold-Mixup.
# https://arxiv.org/pdf/1806.05236.pdf
def __init__(self, block, layers, num_classes=2, zero_init_residual=True, mixup_hidden=True, shake_shake=False,
first_conv_stride=2, first_pool=True, device="cuda:0"):
super(ResNet, self).__init__()
self.mixup_hidden = mixup_hidden
self.shake_shake = shake_shake
self.inplanes = 64
self.num_classes = num_classes
self.first_pool = first_pool
self.device=device
widen_factor = 1
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=first_conv_stride, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64*widen_factor, layers[0])
self.layer2 = self._make_layer(block, 128*widen_factor, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256*widen_factor, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512*widen_factor, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion * widen_factor, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
# Heの初期化
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual and (not shake_shake):
for m in self.modules():
if isinstance(m, SEBottleneck):
nn.init.constant_(m.bn4.weight, 0)
elif isinstance(m, SEBasicBlock):
nn.init.constant_(m.bn3.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
if self.shake_shake:
layers.append(block(self.inplanes, planes, shake_shake=True))
else:
layers.append(block(self.inplanes, planes, shake_shake=False))
return nn.Sequential(*layers)
def forward(self, x, lam=None, target=None, device=None):
def mixup_process(out, target_reweighted, lam):
# target_reweighted is one-hot vector
# target is the taerget class.
# shuffle indices of mini-batch
indices = np.random.permutation(out.size(0))
out = out*lam.expand_as(out) + out[indices]*(1-lam.expand_as(out))
target_shuffled_onehot = target_reweighted[indices]
target_reweighted = target_reweighted * lam.expand_as(target_reweighted) + target_shuffled_onehot * (1 - lam.expand_as(target_reweighted))
return out, target_reweighted
def to_one_hot(inp, num_classes):
y_onehot = torch.FloatTensor(inp.size(0), num_classes)
y_onehot.zero_()
y_onehot.scatter_(1, inp.unsqueeze(1).cpu(), 1)
return y_onehot.to(device)
if self.mixup_hidden:
layer_mix = np.random.randint(0,3)
else:
layer_mix = 0
out = x
if lam is not None:
target_reweighted = to_one_hot(target, self.num_classes)
if lam is not None and self.mixup_hidden and layer_mix == 0:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.conv1(out)
out = self.bn1(out)
out = self.relu(out)
if self.first_pool:
out = self.maxpool(out)
else:
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.layer1(out)
if lam is not None and self.mixup_hidden and layer_mix == 1:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.layer2(out)
if lam is not None and self.mixup_hidden and layer_mix == 2:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.layer3(out)
out = self.layer4(out)
out = self.avgpool(out)
out = out.view(out.size(0), -1)
out = self.fc(out)
if lam is None:
return out
else:
return out, target_reweighted
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
return model
def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
return model
def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))
return model
def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet101']))
return model
def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))
return model
class _DenseLayer(nn.Sequential):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm2d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv2d(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', nn.BatchNorm2d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = drop_rate
def forward(self, x):
# nn.sequentilaを継承, forwardする
new_features = super(_DenseLayer, self).forward(x)
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return torch.cat([x, new_features], 1)
class _DenseBlock(nn.Sequential):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, if_selayer=False):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate)
self.add_module('denselayer%d' % (i + 1), layer)
# if if_selayer:
# self.add_module("selayer", SELayer(num_input_features + growth_rate*num_layers, reduction=16))
class _Transition(nn.Sequential):
def __init__(self, num_input_features, num_output_features, if_selayer=False):
super(_Transition, self).__init__()
self.add_module('norm', nn.BatchNorm2d(num_input_features))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', nn.Conv2d(num_input_features, num_output_features,
kernel_size=1, stride=1, bias=False))
self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))
if if_selayer:
# Squeeze-and-Excitation
self.add_module("selayer", SELayer(num_output_features))
class DenseNet(nn.Module):
r"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 4 ints) - how many layers in each pooling block
num_init_features (int) - the number of filters to learn in the first convolution layer
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
"""
def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16),
num_init_features=64, bn_size=4, drop_rate=0, num_classes=2,
mixup_hidden=False, if_selayer=False,
first_conv_stride=2, first_pool=True):
super(DenseNet, self).__init__()
self.mixup_hidden = mixup_hidden
self.num_classes = num_classes
self.se = if_selayer
self.first_pool = first_pool
# First convolution
if first_pool:
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(3, num_init_features, kernel_size=7, stride=first_conv_stride, padding=3, bias=False)),
('norm0', nn.BatchNorm2d(num_init_features)),
('relu0', nn.ReLU(inplace=True)),
('pool0', nn.MaxPool2d(kernel_size=3, stride=2, padding=1)),
]))
else:
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(3, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
('norm0', nn.BatchNorm2d(num_init_features)),
('relu0', nn.ReLU(inplace=True)),
('conv1', nn.Conv2d(num_init_features, num_init_features, kernel_size=3, stride=2, padding=1, bias=False)),
('norm1', nn.BatchNorm2d(num_init_features)),
('relu1', nn.ReLU(inplace=True)),
]))
# Each denseblock
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = _DenseBlock(num_layers=num_layers, num_input_features=num_features,
bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate, if_selayer=self.se)
self.features.add_module('denseblock%d' % (i + 1), block)
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2, if_selayer=self.se)
self.features.add_module('transition%d' % (i + 1), trans)
num_features = num_features // 2
# Final batch norm
self.features.add_module('norm5', nn.BatchNorm2d(num_features))
# Linear layer
self.classifier = nn.Linear(num_features, num_classes)
# Official init from torch repo.
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight)
# elif isinstance(m, nn.BatchNorm2d):
# nn.init.constant_(m.weight, 1)
# nn.init.constant_(m.bias, 0)
# elif isinstance(m, nn.Linear):
# nn.init.constant_(m.bias, 0)
for m in self.modules():
if isinstance(m, nn.Conv2d):
# Heの初期化
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x, lam=None, target=None, device=None):
def mixup_process(out, target_reweighted, lam):
# target_reweighted is one-hot vector of an original target class
# target is the taerget class.
# shuffle indices of mini-batch
indices = np.random.permutation(out.size(0))
out = out*lam.expand_as(out) + out[indices]*(1-lam.expand_as(out))
target_shuffled_onehot = target_reweighted[indices]
target_reweighted = target_reweighted * lam.expand_as(target_reweighted) + target_shuffled_onehot * (1 - lam.expand_as(target_reweighted))
return out, target_reweighted
def to_one_hot(inp, num_classes):
y_onehot = torch.FloatTensor(inp.size(0), num_classes)
y_onehot.zero_()
y_onehot.scatter_(1, inp.unsqueeze(1).cpu(), 1)
return y_onehot.to(device)
# features = self.features(x)
if self.mixup_hidden:
layer_mix = np.random.randint(0,4)
else:
layer_mix = 0
if lam is not None:
target_reweighted = to_one_hot(target, self.num_classes)
out = x
if lam is not None and self.mixup_hidden and layer_mix == 0:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
if self.first_pool:
out = self.features.pool0(self.features.relu0(self.features.norm0(self.features.conv0(out))))
else:
out = self.features.relu0(self.features.norm0(self.features.conv0(out)))
out = self.features.relu1(self.features.norm1(self.features.conv1(out)))
out = self.features.denseblock1(out)
out = self.features.transition1(out)
if lam is not None and self.mixup_hidden and layer_mix == 1:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.features.denseblock2(out)
out = self.features.transition2(out)
if lam is not None and self.mixup_hidden and layer_mix == 2:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.features.denseblock3(out)
out = self.features.transition3(out)
if lam is not None and self.mixup_hidden and layer_mix == 3:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.features.denseblock4(out)
out = self.features.norm5(out)
out = F.relu(out, inplace=True)
# out = F.relu(features, inplace=True)
# out = F.adaptive_avg_pool2d(out, (1, 1)).view(features.size(0), -1)
out = F.adaptive_avg_pool2d(out, (1, 1)).view(out.size(0), -1)
out = self.classifier(out)
if lam is not None:
return out, target_reweighted
else:
return out
def densenet121(pretrained=False, if_mixup=False, if_selayer=False, first_conv_stride=2, first_pool=True, drop_rate=0.2):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16), mixup_hidden=if_mixup, if_selayer=if_selayer,
drop_rate=drop_rate, first_conv_stride=first_conv_stride, first_pool=first_pool)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet121'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet169(pretrained=False, if_mixup=False, if_selayer=False, first_conv_stride=2, first_pool=True):
r"""Densenet-169 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 32, 32), mixup_hidden=if_mixup, if_selayer=if_selayer,
drop_rate=0.2, first_conv_stride=first_conv_stride, first_pool=first_pool)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet169'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet201(pretrained=False, if_mixup=False, if_selayer=False, first_conv_stride=2, first_pool=True):
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 48, 32), mixup_hidden=if_mixup,
if_selayer=if_selayer, drop_rate=0.2, first_conv_stride=first_conv_stride, first_pool=first_pool)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet201'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def densenet161(pretrained=False, mixup_hidden=False, **kwargs):
r"""Densenet-161 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = DenseNet(num_init_features=96, growth_rate=48, block_config=(6, 12, 36, 24), mixup_hidden=mixup_hidden,
**kwargs)
if pretrained:
# '.'s are no longer allowed in module names, but pervious _DenseLayer
# has keys 'norm.1', 'relu.1', 'conv.1', 'norm.2', 'relu.2', 'conv.2'.
# They are also in the checkpoints in model_urls. This pattern is used
# to find such keys.
pattern = re.compile(
r'^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$')
state_dict = model_zoo.load_url(model_urls['densenet161'])
for key in list(state_dict.keys()):
res = pattern.match(key)
if res:
new_key = res.group(1) + res.group(2)
state_dict[new_key] = state_dict[key]
del state_dict[key]
model.load_state_dict(state_dict)
return model
def dpn92(num_classes=2, if_selayer=False, if_mixup=False, first_conv_stride=2, first_pool=True):
return DPN(num_init_features=64, k_R=96, G=32, k_sec=(3,4,20,3), inc_sec=(16,32,24,128), num_classes=num_classes,
if_selayer=if_selayer, if_mixup=if_mixup, first_conv_stride=first_conv_stride, first_pool=first_pool)
def dpn98(num_classes=2, if_selayer=False, if_mixup=False, first_conv_stride=2, first_pool=True):
return DPN(num_init_features=96, k_R=160, G=40, k_sec=(3,6,20,3), inc_sec=(16,32,32,128), num_classes=num_classes,
if_selayer=if_selayer, if_mixup=if_mixup, first_conv_stride=first_conv_stride, first_pool=first_pool)
def dpn131(num_classes=2, if_selayer=False, if_mixup=False, first_conv_stride=2, first_pool=True):
return DPN(num_init_features=128, k_R=160, G=40, k_sec=(4,8,28,3), inc_sec=(16,32,32,128), num_classes=num_classes,
if_selayer=if_selayer, if_mixup=if_mixup, first_conv_stride=first_conv_stride, first_pool=first_pool)
def dpn107(num_classes=2, if_selayer=False, if_mixup=False, first_conv_stride=2, first_pool=True):
return DPN(num_init_features=128, k_R=200, G=50, k_sec=(4,8,20,3), inc_sec=(20,64,64,128), num_classes=num_classes,
if_selayer=if_selayer, if_mixup=if_mixup, first_conv_stride=first_conv_stride, first_pool=first_pool)
class DualPathBlock(nn.Module):
def __init__(self, in_chs, num_1x1_a, num_3x3_b, num_1x1_c, increase, Groups, _type='normal', if_selayer=False):
super(DualPathBlock, self).__init__()
self.num_1x1_c = num_1x1_c
self.increase = increase
if _type is 'proj':
key_stride = 1
self.has_proj = True
if _type is 'down':
key_stride = 2
self.has_proj = True
if _type is 'normal':
key_stride = 1
self.has_proj = False
if self.has_proj:
self.c1x1_w = self.BN_ReLU_Conv(in_chs=in_chs, out_chs=num_1x1_c+2*increase, kernel_size=1, stride=key_stride)
if not if_selayer:
self.layers = nn.Sequential(OrderedDict([
('c1x1_a', self.BN_ReLU_Conv(in_chs=in_chs, out_chs=num_1x1_a, kernel_size=1, stride=1)),
('c3x3_b', self.BN_ReLU_Conv(in_chs=num_1x1_a, out_chs=num_3x3_b, kernel_size=3, stride=key_stride, padding=1, groups=Groups)),
('c1x1_c', self.BN_ReLU_Conv(in_chs=num_3x3_b, out_chs=num_1x1_c+increase, kernel_size=1, stride=1))
]))
else:
self.layers = nn.Sequential(OrderedDict([
('c1x1_a', self.BN_ReLU_Conv(in_chs=in_chs, out_chs=num_1x1_a, kernel_size=1, stride=1)),
('c3x3_b', self.BN_ReLU_Conv(in_chs=num_1x1_a, out_chs=num_3x3_b, kernel_size=3, stride=key_stride, padding=1, groups=Groups)),
('c1x1_c', self.BN_ReLU_Conv(in_chs=num_3x3_b, out_chs=num_1x1_c+increase, kernel_size=1, stride=1)),
('se_layer', SELayer(num_1x1_c+increase))
]))
def BN_ReLU_Conv(self, in_chs, out_chs, kernel_size, stride, padding=0, groups=1):
return nn.Sequential(OrderedDict([
('norm', nn.BatchNorm2d(in_chs)),
('relu', nn.ReLU(inplace=True)),
('conv', nn.Conv2d(in_chs, out_chs, kernel_size, stride, padding, groups=groups, bias=False)),
]))
def forward(self, x):
data_in = torch.cat(x, dim=1) if isinstance(x, list) else x
if self.has_proj:
data_o = self.c1x1_w(data_in)
data_o1 = data_o[:, :self.num_1x1_c, :, :]
data_o2 = data_o[:, self.num_1x1_c:, :, :]
else:
data_o1 = x[0]
data_o2 = x[1]
out = self.layers(data_in)
summ = data_o1 + out[:, :self.num_1x1_c, :, :]
dense = torch.cat([data_o2, out[:, self.num_1x1_c:, :, :]], dim=1)
return [summ, dense]
class DPN(nn.Module):
def __init__(self, num_init_features=64, k_R=96, G=32,
k_sec=(3, 4, 20, 3), inc_sec=(16,32,24,128) #DPN-92
, num_classes=2, if_selayer=False, if_mixup=False,
first_conv_stride=2, first_pool=True):
super(DPN, self).__init__()
self.mixup_hidden=if_mixup
self.num_classes = num_classes
self.first_pool = first_pool
blocks = OrderedDict()
# conv1
if first_pool:
blocks['conv1'] = nn.Sequential(
nn.Conv2d(3, num_init_features, kernel_size=7, stride=first_conv_stride, padding=3, bias=False),
nn.BatchNorm2d(num_init_features),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
)
else:
blocks['conv1'] = nn.Sequential(
nn.Conv2d(3, num_init_features, kernel_size=7, stride=first_conv_stride, padding=3, bias=False),
nn.BatchNorm2d(num_init_features),
nn.ReLU(inplace=True),
nn.Conv2d(num_init_features, num_init_features, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(num_init_features),
nn.ReLU(inplace=True),
)
# conv2
bw = 256
inc = inc_sec[0]
R = int((k_R*bw)/256)
blocks['conv2_1'] = DualPathBlock(num_init_features, R, R, bw, inc, G, 'proj', if_selayer=False)
in_chs = bw + 3 * inc
for i in range(2, k_sec[0]+1):
if i == k_sec[0]:
blocks['conv2_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=if_selayer)
else:
blocks['conv2_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=False)
in_chs += inc
# conv3
bw = 512
inc = inc_sec[1]
R = int((k_R*bw)/256)
blocks['conv3_1'] = DualPathBlock(in_chs, R, R, bw, inc, G, 'down', if_selayer=False)
in_chs = bw + 3 * inc
for i in range(2, k_sec[1]+1):
if i == k_sec[1]:
blocks['conv3_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=if_selayer)
else:
blocks['conv3_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=False)
in_chs += inc
# conv4
bw = 1024
inc = inc_sec[2]
R = int((k_R*bw)/256)
blocks['conv4_1'] = DualPathBlock(in_chs, R, R, bw, inc, G, 'down', if_selayer=False)
in_chs = bw + 3 * inc
for i in range(2, k_sec[2]+1):
if i == k_sec[2]:
blocks['conv4_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=if_selayer)
else:
blocks['conv4_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=False)
in_chs += inc
# conv5
bw = 2048
inc = inc_sec[3]
R = int((k_R*bw)/256)
blocks['conv5_1'] = DualPathBlock(in_chs, R, R, bw, inc, G, 'down', if_selayer=False)
in_chs = bw + 3 * inc
for i in range(2, k_sec[3]+1):
if i == k_sec[3]:
blocks['conv5_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=if_selayer)
else:
blocks['conv5_{}'.format(i)] = DualPathBlock(in_chs, R, R, bw, inc, G, 'normal', if_selayer=False)
in_chs += inc
self.conv2_block = nn.Sequential()
for i in range(1, k_sec[0]+1):
self.conv2_block.add_module("conv2_{}".format(i), blocks['conv2_{}'.format(i)])
self.conv3_block = nn.Sequential()
for i in range(1, k_sec[1]+1):
self.conv3_block.add_module("conv3_{}".format(i), blocks['conv3_{}'.format(i)])
self.conv4_block = nn.Sequential()
for i in range(1, k_sec[2]+1):
self.conv4_block.add_module("conv4_{}".format(i), blocks['conv4_{}'.format(i)])
self.conv5_block = nn.Sequential()
for i in range(1, k_sec[3]+1):
self.conv5_block.add_module("conv5_{}".format(i), blocks['conv5_{}'.format(i)])
self.features = nn.Sequential(blocks)
self.classifier = nn.Linear(in_chs, num_classes)
def forward(self, x, lam=None, target=None):
def mixup_process(out, target_reweighted, lam):
# target_reweighted is one-hot vector
# target is the taerget class.
if isinstance(out, list):
threshold = out[0].size(1)
out = torch.cat(out, dim=1)
# shuffle indices of mini-batch
indices = np.random.permutation(out.size(0))
out = out*lam.expand_as(out) + out[indices]*(1-lam.expand_as(out))
target_shuffled_onehot = target_reweighted[indices]
target_reweighted = target_reweighted * lam.expand_as(target_reweighted) + target_shuffled_onehot * (1 - lam.expand_as(target_reweighted))
if isinstance(out, list):
out = [out[:, :threshold, :, :], out[:, threshold:, :, :]]
return out, target_reweighted
def to_one_hot(inp, num_classes):
y_onehot = torch.FloatTensor(inp.size(0), num_classes)
y_onehot.zero_()
y_onehot.scatter_(1, inp.unsqueeze(1).cpu(), 1)
return y_onehot.to("cuda:0")
if lam is None:
features = torch.cat(self.features(x), dim=1)
out = F.avg_pool2d(features, kernel_size=7).view(features.size(0), -1)
out = self.classifier(out)
return out
else:
layer_mix = np.random.randint(0,4)
if lam is not None:
target_reweighted = to_one_hot(target, self.num_classes)
out = x
if lam is not None and layer_mix == 0:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.features.conv1(out)
out = self.conv2_block(out)
if lam is not None and layer_mix == 1:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.conv3_block(out)
if lam is not None and layer_mix == 2:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.conv4_block(out)
if lam is not None and layer_mix == 3:
out, target_reweighted = mixup_process(out, target_reweighted, lam)
out = self.conv5_block(out)
features = torch.cat(out, dim=1)
out = F.avg_pool2d(features, kernel_size=7).view(features.size(0), -1)
out = self.classifier(out)
return out, target_reweighted
## WideResNet
# https://github.com/xternalz/WideResNet-pytorch/blob/master/wideresnet.py
def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=True)
def conv_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
init.xavier_uniform(m.weight, gain=np.sqrt(2))
init.constant(m.bias, 0)
elif classname.find('BatchNorm') != -1:
init.constant(m.weight, 1)
init.constant(m.bias, 0)
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
|
from dps.hyper import run_experiment
from dps.utils import copy_update
from silot.run import basic_config, alg_configs, env_configs
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--max-digits', type=int, choices=[6, 12], required=True)
args, _ = parser.parse_known_args()
distributions = dict(
final_count_prior_log_odds=[0.0125, 0.025, 0.05, 0.1],
stage_steps=[5000, 10000, 20000, 40000]
)
readme = "Running SILOT experiment on moving_mnist."
run_kwargs = dict(
max_hosts=2, ppn=8, cpp=2, gpu_set="0,1,2,3", pmem=10000, project="rpp-bengioy",
wall_time="71hours", cleanup_time="5mins", slack_time="5mins", n_repeats=1,
copy_locally=True, config=dict(max_steps=200000, render_step=1000000)
)
durations = dict(
long=copy_update(run_kwargs),
medium=copy_update(
run_kwargs,
wall_time="6hours", config=dict(stage_steps=3000, max_steps=12000),
),
short=dict(
wall_time="180mins", gpu_set="0", ppn=4, n_repeats=4, distributions=None,
config=dict(max_steps=3000, render_step=500, eval_step=100, display_step=100, stage_steps=600, curriculum=[dict()]),
),
build=dict(
ppn=1, cpp=1, gpu_set="0", wall_time="60mins", n_repeats=1, distributions=None,
config=dict(do_train=False, render_first=False, render_final=False),
),
)
config = basic_config.copy()
config.update(env_configs['moving_mnist'])
config.update(alg_configs['silot'], max_digits=args.max_digits)
run_experiment(
"moving_mnist_silot",
config, "silot on moving_mnist.",
name_variables="max_digits",
distributions=distributions,
durations=durations
)
|
from django.db import models
from db.base_model import BaseModel
from tinymce.models import HTMLField
# Create your models here.
class GoodsType(BaseModel):
name = models.CharField(max_length=20)
logo = models.CharField(max_length=20)
image = models.ImageField(upload_to='type')
def __str__(self):
return self.name
class Meta:
db_table = 'fs_goods_type'
class GoodsSKU(BaseModel):
STATUS_CHOICES = (
(0, 'Unavailable'),
(1, 'Available'),
)
type = models.ForeignKey('GoodsType', on_delete=False)
goods = models.ForeignKey('Goods', on_delete=True)
name = models.CharField(max_length=20)
desc = models.CharField(max_length=256)
price = models.DecimalField(max_digits=10, decimal_places=2)
unit = models.CharField(max_length=20)
image = models.ImageField(upload_to='goods')
stock = models.IntegerField(default=1)
sales_volume = models.IntegerField(default=0)
status = models.SmallIntegerField(default=1, choices=STATUS_CHOICES)
class Meta:
db_table = 'fs_goods_sku'
class Goods(BaseModel):
name = models.CharField(max_length=20)
detail = HTMLField(blank=True)
class Meta:
db_table = 'fs_goods'
class GoodsImage(BaseModel):
sku = models.ForeignKey('GoodsSKU', on_delete=True)
image = models.ImageField(upload_to='goods')
class Meta:
db_table = 'fs_goods_image'
class IndexGoodsBanner(BaseModel):
sku = models.ForeignKey('GoodsSKU', on_delete=True)
image = models.ImageField(upload_to='banner')
index = models.SmallIntegerField(default=0)
class Meta:
db_table = 'fs_index_banner'
class IndexTypeGoodsBanner(BaseModel):
DISPLAY_TYPE_CHOICES = (
(0, "title"),
(1, "image")
)
type = models.ForeignKey('GoodsType', on_delete=True)
sku = models.ForeignKey('GoodsSKU', on_delete=True)
display_type = models.SmallIntegerField(default=1, choices=DISPLAY_TYPE_CHOICES)
index = models.SmallIntegerField(default=0)
class Meta:
db_table = 'fs_index_type_goods'
class IndexPromotionBanner(BaseModel):
name = models.CharField(max_length=20)
url = models.CharField(max_length=256)
image = models.ImageField(upload_to='banner')
index = models.SmallIntegerField(default=0)
class Meta:
db_table = 'fs_index_promotion'
|
import torch
import torch.nn as nn
import numpy as np
import torchvision
import torch.nn.functional as F
import math
import copy
import collections
from pytorchcv.model_provider import get_model as ptcv_get_model
from pytorchcv.models.common import conv3x3_block
import pretrainedmodels
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
def l2_norm(input,axis=1):
norm = torch.norm(input,2,axis,True)
output = torch.div(input, norm)
return output
class Window(nn.Module):
def forward(self, x):
return torch.clamp(x,0,1)
class ArcMarginProduct(nn.Module):
r"""Implement of large margin arc distance: :
Args:
in_features: size of each input sample
out_features: size of each output sample
s: norm of input feature
m: margin
cos(theta + m)
"""
def __init__(self, in_features, out_features,weights=None):
super(ArcMarginProduct, self).__init__()
if weights is None:
self.weight = nn.Parameter(torch.FloatTensor(out_features, in_features))
self.reset_parameters()
else:
self.weight = nn.Parameter(weights)
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
# self.k.data=torch.ones(1,dtype=torch.float)
def forward(self, features):
cosine = F.linear(l2_norm(features), l2_norm(self.weight))
return cosine
class ArcClassifier(nn.Module):
def __init__(self,in_features, out_features,weights=None):
super(ArcClassifier, self).__init__()
self.classifier = ArcMarginProduct(in_features, out_features,weights=weights)
self.dropout1=nn.Dropout(p=0.5, inplace=True)
def forward(self, x,eq):
out = self.dropout1(x-eq)
out = self.classifier(out)
return out
def no_grad(self):
for param in self.parameters():
param.requires_grad=False
def do_grad(self):
for param in self.parameters():
param.requires_grad=True
class MyDenseNet(nn.Module):
def __init__(self,model,
num_classes,
num_channels=1,
strategy='copy',
add_noise=0.,
drop_out=0.5,
arcface=False,
return_features=False,
norm=False,
intermediate=0,
extra_pool=1,
pool_type='avg',
wso=None,
dont_do_grad=['wso'],
do_bn=False):
super(MyDenseNet, self).__init__()
self.features= torch.nn.Sequential()
self.num_channels=num_channels
self.dont_do_grad=dont_do_grad
self.pool_type=pool_type
self.norm=norm
self.return_features=return_features
self.num_classes=num_classes
self.extra_pool=extra_pool
if wso is not None:
conv_ = nn.Conv2d(1,self.num_channels, kernel_size=(1, 1))
if hasattr(wso, '__iter__'):
conv_.weight.data.copy_(torch.tensor([[[[1./wso[0][1]]]],[[[1./wso[1][1]]]],[[[1./wso[2][1]]]]]))
conv_.bias.data.copy_(torch.tensor([0.5 - wso[0][0]/wso[0][1],
0.5 - wso[1][0]/wso[1][1],
0.5 -wso[2][0]/wso[2][1]]))
self.features.add_module('wso_conv',conv_)
self.features.add_module('wso_window',nn.Sigmoid())
if do_bn:
self.features.add_module('wso_norm',nn.BatchNorm2d(self.num_channels))
else:
self.features.add_module('wso_norm',nn.InstanceNorm2d(self.num_channels))
if (strategy == 'copy') or (num_channels!=3):
base = list(list(model.children())[0].named_children())[1:]
conv0 = model.state_dict()['features.conv0.weight']
new_conv=nn.Conv2d(self.num_channels, conv0.shape[0], kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
a=(np.arange(3*(self.num_channels//3+1),dtype=np.int)%3)
np.random.shuffle(a)
for i in range(self.num_channels):
new_conv.state_dict()['weight'][:,i,:,:]=conv0.clone()[:,a[i],:,:]*(1.0+torch.randn_like(conv0[:,a[i],:,:])*add_noise)
self.features.add_module('conv0',new_conv)
else:
base = list(list(model.children())[0].named_children())
for (n,l) in base:
self.features.add_module(n,l)
if intermediate==0:
self.num_features=list(model.children())[-1].in_features
self.intermediate=None
else:
self.num_features=intermediate
self.intermediate=nn.Linear(list(model.children())[-1].in_features, self.num_features)
self.dropout1=nn.Dropout(p=drop_out, inplace=True)
if arcface:
self.classifier=ArcMarginProduct(self.num_features, num_classes)
else:
self.classifier = nn.Linear(self.num_features//self.extra_pool, self.num_classes)
def forward(self, x):
x = self.features(x)
x = F.relu(x, inplace=True)
if self.pool_type=='avg':
x = F.avg_pool3d(x.unsqueeze(1), kernel_size=(self.extra_pool,)+x.size()[2:]).view(x.size(0), -1)
else:
x = F.max_pool3d(x.unsqueeze(1), kernel_size=(self.extra_pool,)+x.size()[2:]).view(x.size(0), -1)
# x = F.max_pool1d(x.view(x.unsqueeze(1),self.extra_pool).squeeze()
x = self.dropout1(x)
if self.intermediate is not None:
x = self.intermediate(x)
x = F.relu(x)
features = x
if self.norm:
features = l2_norm(features,axis=1)
out = self.classifier(features)
return out if not self.return_features else (out,features)
def parameter_scheduler(self,epoch):
do_first=['classifier','wso']
if epoch>0:
for n,p in self.named_parameters():
p.requires_grad=True
else:
for n,p in self.named_parameters():
p.requires_grad= any(nd in n for nd in do_first)
def no_grad(self):
for param in self.parameters():
param.requires_grad=False
def do_grad(self):
for n,p in self.named_parameters():
p.requires_grad= not any(nd in n for nd in self.dont_do_grad)
class MySENet(nn.Module):
def __init__(self,model,
num_classes,
num_channels=3,
dropout=0.2,
return_features=False,
wso=None,
full_copy=False,
dont_do_grad=['wso'],
extra_pool=1,
do_bn=False):
super(MySENet, self).__init__()
self.num_classes=num_classes
self.return_features=return_features
self.num_channels = num_channels
self.features= torch.nn.Sequential()
self.extra_pool=extra_pool
self.dont_do_grad=dont_do_grad
if full_copy:
for (n,l) in list(list(model.children())[0].named_children()):
self.features.add_module(n,l)
if wso is not None:
self.dont_do_grad=model.dont_do_grad
else:
if wso is not None:
conv_ = nn.Conv2d(1,self.num_channels, kernel_size=(1, 1))
if hasattr(wso, '__iter__'):
conv_.weight.data.copy_(torch.tensor([[[[1./wso[0][1]]]],[[[1./wso[1][1]]]],[[[1./wso[2][1]]]]]))
conv_.bias.data.copy_(torch.tensor([0.5 - wso[0][0]/wso[0][1],
0.5 - wso[1][0]/wso[1][1],
0.5 -wso[2][0]/wso[2][1]]))
self.features.add_module('wso_conv',conv_)
self.features.add_module('wso_relu',nn.Sigmoid())
if do_bn:
self.features.add_module('wso_norm',nn.BatchNorm2d(self.num_channels))
else:
self.features.add_module('wso_norm',nn.InstanceNorm2d(self.num_channels))
# layer0= torch.nn.Sequential()
# layer0.add_module('conv1',model.conv1)
# layer0.add_module('bn1',model.bn1)
se_layers={'layer0':model.layer0,
'layer1':model.layer1,
'layer2':model.layer2,
'layer3':model.layer3,
'layer4':model.layer4}
for key in se_layers:
self.features.add_module(key,se_layers[key])
self.dropout = dropout if dropout is None else nn.Dropout(p=dropout, inplace=True)
self.classifier=nn.Linear(model.last_linear.in_features//self.extra_pool, self.num_classes)
def forward(self, x):
x = self.features(x)
x = F.max_pool3d(x.unsqueeze(1), kernel_size=(self.extra_pool,)+x.size()[2:]).view(x.size(0), -1)
if self.dropout is not None:
x = self.dropout(x)
features = x
out = self.classifier(features)
return out if not self.return_features else (out,features)
def parameter_scheduler(self,epoch):
do_first=['classifier']
if epoch>0:
for n,p in self.named_parameters():
p.requires_grad=True
else:
for n,p in self.named_parameters():
p.requires_grad= any(nd in n for nd in do_first)
def no_grad(self):
for param in self.parameters():
param.requires_grad=False
def do_grad(self):
for n,p in self.named_parameters():
p.requires_grad= not any(nd in n for nd in self.dont_do_grad)
class MyEfficientNet(nn.Module):
def __init__(self,model,num_classes,num_channels=3,dropout=0.5,return_features=False,wso=True,
full_copy=False,
dont_do_grad=['wso'],
extra_pool=1,
num_features=None):
super(MyEfficientNet, self).__init__()
self.num_classes=num_classes
self.return_features=return_features
self.num_channels = num_channels
self.features= torch.nn.Sequential()
self.extra_pool=extra_pool
self.dont_do_grad=dont_do_grad
if full_copy:
for (n,l) in list(list(model.children())[0].named_children()):
self.features.add_module(n,l)
if wso is not None:
self.dont_do_grad=model.dont_do_grad
else:
if wso is not None:
conv_ = nn.Conv2d(1,self.num_channels, kernel_size=(1, 1))
if hasattr(wso, '__iter__'):
conv_.weight.data.copy_(torch.tensor([[[[1./wso[0][1]]]],[[[1./wso[1][1]]]],[[[1./wso[2][1]]]]]))
conv_.bias.data.copy_(torch.tensor([0.5 - wso[0][0]/wso[0][1],
0.5 - wso[1][0]/wso[1][1],
0.5 -wso[2][0]/wso[2][1]]))
self.features.add_module('wso_conv',conv_)
self.features.add_module('wso_relu',nn.Sigmoid())
self.features.add_module('wso_norm',nn.InstanceNorm2d(self.num_channels))
for (n,l) in list(list(model.children())[0].named_children()):
self.features.add_module(n,l)
self.dropout = dropout if dropout is None else nn.Dropout(p=dropout, inplace=True)
if num_features is None:
self.classifier=nn.Linear(model.output.fc.in_features//self.extra_pool, self.num_classes)
else:
self.classifier=nn.Linear(num_features, self.num_classes)
def forward(self, x):
x = self.features(x)
x = F.avg_pool2d(x, kernel_size=x.size(-1)).view(x.size(0), -1)
if self.extra_pool>1:
x = x.view(x.shape[0],x.shape[1]//self.extra_pool,self.extra_pool).mean(-1)
if self.dropout is not None:
x = self.dropout(x)
features = x
out = self.classifier(features)
return out if not self.return_features else (out,features)
def parameter_scheduler(self,epoch):
do_first=['classifier']
if epoch>0:
for n,p in self.named_parameters():
p.requires_grad=True
else:
for n,p in self.named_parameters():
p.requires_grad= any(nd in n for nd in do_first)
def no_grad(self):
for param in self.parameters():
param.requires_grad=False
def do_grad(self):
for n,p in self.named_parameters():
p.requires_grad= not any(nd in n for nd in self.dont_do_grad)
class NeighborsNet(nn.Module):
def __init__(self,num_classes,num_features=1024,num_neighbors=1,classifier_layer=None,intermidiate=None,dropout=0.2):
super(NeighborsNet, self).__init__()
self.num_classes=num_classes
if classifier_layer is not None:
self.num_features = classifier_layer.in_features
else:
self.num_features=num_features
self.num_neighbors=num_neighbors
layers=collections.OrderedDict()
if dropout>0:
layers['dropout']=nn.Dropout(p=dropout)
if intermidiate is not None:
layers['intermidiate']=nn.Linear(self.num_features*(2*self.num_neighbors+1), intermidiate)
layers['relu']=nn.ReLU()
layers['classifier']=nn.Linear(intermidiate, self.num_classes)
else:
layers['classifier']=nn.Linear(self.num_features*(2*self.num_neighbors+1), self.num_classes)
if (classifier_layer is not None) and (intermidiate is None):
_=layers['classifier'].bias.data.copy_((1.0+0.2*self.num_neighbors)*classifier_layer.bias.data)
d = torch.cat([0.1*classifier_layer.weight.data for i in range(self.num_neighbors)]+\
[classifier_layer.weight.data]+\
[0.1*classifier_layer.weight.data for i in range(self.num_neighbors)],dim=1)
_=layers['classifier'].weight.data.copy_(d)
self.network= torch.nn.Sequential(layers)
def forward(self, x):
x = x.view((x.shape[0],-1))
return self.network(x)
def parameter_scheduler(self,epoch):
do_first=['classifier']
if epoch>0:
for n,p in self.named_parameters():
p.requires_grad=True
else:
for n,p in self.named_parameters():
p.requires_grad= any(nd in n for nd in do_first)
def no_grad(self):
for param in self.parameters():
param.requires_grad=False
def do_grad(self):
for param in self.parameters():
param.requires_grad=True
def mean_model(models):
model = copy.deepcopy(models[0])
params=[]
for model_ in models:
params.append(dict(model_.named_parameters()))
param_dict=dict(model.named_parameters())
for name in param_dict.keys():
_=param_dict[name].data.copy_(torch.cat([param[name].data[...,None] for param in params],-1).mean(-1))
return model |
"""
``goless`` introduces go-like channels and select to Python,
built on top of Stackless Python (and maybe one day gevent).
Use :func:`goless.chan` to create a synchronous or buffered channel.
Use :func:`goless.select` like you would the ``Select`` function in Go's reflect package
(since Python lacks a switch/case statement, replicating Go's select statement syntax
wasn't very effective).
"""
import logging
import sys
import traceback
from .backends import current as _be
# noinspection PyUnresolvedReferences
from .channels import chan, ChannelClosed
# noinspection PyUnresolvedReferences
from .selecting import dcase, rcase, scase, select
version_info = 0, 0, 1
version = '.'.join([str(v) for v in version_info])
def on_panic(etype, value, tb):
"""
Called when there is an unhandled error in a goroutine.
By default, logs and exits the process.
"""
logging.critical(traceback.format_exception(etype, value, tb))
_be.propagate_exc(SystemExit, 1)
def go(func, *args, **kwargs):
"""
Run a function in a new tasklet, like a goroutine.
If the goroutine raises an unhandled exception (*panics*),
the :func:`goless.on_panic` will be called,
which by default logs the error and exits the process.
:param args: Positional arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
def safe_wrapped(f):
# noinspection PyBroadException
try:
f(*args, **kwargs)
except:
on_panic(*sys.exc_info())
_be.start(safe_wrapped, func)
|
COLUMNS=['ts', 'curr_pid', 'pid', 'src_cpu', 'dst_cpu', 'imbalance',
'src_len', 'src_numa_len', 'src_preferred_len',
'delta', 'cpu_idle', 'cpu_not_idle', 'cpu_newly_idle',
'same_node', 'prefer_src', 'prefer_dst',
'delta_faults', 'total_faults',
'dst_len', 'src_load', 'dst_load',
'nr_fails', 'cache_nice_tries', 'buddy_hot',
'throttled', 'p_running',
'test_aggressive',
'can_migrate',
'pc_0', 'pc_1']
|
# The MIT License (MIT)
# Copyright (c) 2017 Massachusetts Institute of Technology
#
# Authors: Victor Pankratius, Justin Li, Cody Rude
# This software has been created in projects supported by the US National
# Science Foundation and NASA (PI: Pankratius)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Skdaccess imports
from skdaccess.framework.data_class import DataFetcherCache
from skdaccess.framework.param_class import *
from skdaccess.geo.mahali.rinex.data_wrapper import DataWrapper
from pkg_resources import resource_filename
from skdaccess.utilities.mahali_util import convert_date
# Standard library imports
from glob import glob
import shutil
import os
from six.moves.urllib.request import urlopen
import sys
# 3rd part imports
from tqdm import tqdm
import pandas as pd
class DataFetcher(DataFetcherCache):
'''
Data Fetcher for Mahali Data
'''
def __init__(self, ap_paramList=[], start_date=None, end_date=None, generate_links = False):
'''
Initialize Mahali Data Fetcher
@param ap_paramList[stations]: Autolist of stations (Defaults to all stations)
@param start_date: Starting date for seelcting data (Defaults to beginning of available data)
@param end_date: Ending date for selecting data (Defaults to end of available data)
@param generate_links: Generate links to data instead of downloading data
'''
if start_date == None:
self.start_date = pd.to_datetime('2015232', format='%Y%j')
else:
self.start_date = convert_date(start_date)
if end_date == None:
self.end_date = pd.to_datetime('2015314', format='%Y%j')
else:
self.end_date = convert_date(end_date)
self.date_range = pd.date_range(self.start_date, self.end_date)
if len(ap_paramList) == 0:
station_list = [
'mh02',
'mh03',
'mh04',
'mh05',
'mh06',
'mh07',
'mh08',
'mh09',
'mh13',
]
ap_paramList = [ AutoList(station_list) ]
self.generate_links = generate_links
super(DataFetcher, self).__init__(ap_paramList)
def cacheData(self):
'''
Downloads all needed data. Called by output().
'''
station_list = self.ap_paramList[0]()
remote_location = '/data/mahali_UAF_data/cloud/rinex/obs'
day_list = []
start_year = self.start_date.strftime('%Y')
start_day = self.start_date.strftime('%j')
end_year = self.end_date.strftime('%Y')
end_date = self.end_date.strftime('%j')
data_list = pd.DataFrame(columns=['Site','Date'])
# Get a list of all data that needs to be loaded
mahali_data_info_location = resource_filename('skdaccess',os.path.join('support','mahali_data_info.hdf'))
for station in station_list:
try:
available_dates = pd.read_hdf(mahali_data_info_location, station)
except KeyError:
print('Unknown station:',station, )
common_dates = list(set(self.date_range).intersection(set(available_dates)))
common_dates.sort()
data_list = pd.concat([data_list, pd.DataFrame({'Site':station,'Date':common_dates})])
# Get a list of all needed filenames
data_list_obs = data_list.Site + data_list.Date.apply(lambda x: x.strftime('%j0.%yo'))
data_list_nav = data_list.Site + data_list.Date.apply(lambda x: x.strftime('%j0.%yn'))
data_set_filenames = set(pd.concat([data_list_obs, data_list_nav]))
# Get locations of all files to download
def getFileLocation(in_file):
day = in_file[4:7]
if in_file[-1] == 'n':
data_folder = 'nav'
elif in_file[-1] == 'o':
data_folder = 'obs'
else:
raise ValueError('Could not parse in_file')
return 'rinex/' + data_folder + '/2015/' + day + '/' + in_file
# Key function to sort rinex files by date, then
# station, then type (NAV or OBS)
key_func = lambda x: x[-3:-1] + x[-8:-5] + x[-12:-8] + x[-1]
# Base url of data
base_url = 'http://apollo.haystack.mit.edu/mahali-data/'
# Download files to disk
if not self.generate_links:
data_location = DataFetcher.getDataLocation('mahali_rinex')
if data_location == None:
data_location = os.path.join(os.path.expanduser('~'), '.skdaccess','mahali_rinex')
os.makedirs(data_location, exist_ok=True)
# Get currently downloaded files
file_list = glob(os.path.join(data_location,'*.*n',)) + glob(os.path.join(data_location,'*.*o',))
file_list = set(file.split(os.sep)[-1] for file in file_list)
# Select files that are wanted but not yet downloaded
missing_files = data_set_filenames.difference(file_list)
missing_files = list(missing_files)
missing_files.sort()
file_location_list = [getFileLocation(filename) for filename in missing_files]
if len(file_location_list) > 0:
print('Downloading mahali data')
sys.stdout.flush()
for url_path, filename in tqdm(zip(file_location_list, missing_files), total=len(missing_files)):
with open(os.path.join(data_location, filename), 'wb') as data_file:
shutil.copyfileobj(urlopen(base_url+ url_path), data_file)
# return the appropriate list of files to load
obs_file_list = [os.path.join(data_location, file) for file in data_list_obs]
nav_file_list = [os.path.join(data_location, file) for file in data_list_nav]
# Not downloading data, just generating links to where data is located
else:
obs_file_list = [base_url + getFileLocation(location) for location in data_list_obs]
nav_file_list = [base_url + getFileLocation(location) for location in data_list_nav]
obs_file_list.sort(key=key_func)
nav_file_list.sort(key=key_func)
return nav_file_list, obs_file_list
def output(self):
'''
Generate data wrapper for Mahali data
@return Mahali data wrapper
'''
nav_files, obs_files = self.cacheData()
def getSiteAndDate(in_filename):
date = pd.to_datetime('2015' + in_filename[-8:-5], format='%Y%j')
return in_filename[-12:-8], date
data_list = []
for nav, obs in zip(nav_files, obs_files):
site, date = getSiteAndDate(nav)
if (site,date) != getSiteAndDate(obs):
raise RuntimeError('Data mismatch')
# data_list.append([site,date,readRinexNav(nav), rinexobs(obs)])
data_list.append([site,date,nav, obs])
return DataWrapper(data_list)
|
from app import app, mongo
from flask_restful import Resource, Api
from flask import jsonify, request
from bson.objectid import ObjectId
import bcrypt, datetime
from flask_jwt_extended import (
create_access_token,
create_refresh_token,
jwt_refresh_token_required,
get_jwt_identity,
get_raw_jwt,
jwt_required,
decode_token
)
api = Api(app)
class GetAllUsers(Resource):
def get(self):
users = mongo.db.users
data = []
for field in users.find():
data.append(
{
'_id': str(field['_id']),
'username': field['name'],
'email': field['email'],
'password': field['password'],
'followers': field['followers'],
'following': field['following']
}
)
return jsonify(data)
class Login(Resource):
def post(self):
users = mongo.db.users
email = request.get_json()['email']
password = request.get_json()['password']
user = users.find_one({
'email': email
})
user['_id'] = str(user['_id'])
if user and bcrypt.checkpw(password.encode('utf8'), user['password'].encode('utf8')):
token = create_access_token(identity=user['_id'], fresh=True, expires_delta=datetime.timedelta(days=1), )
user['token'] = token
user.pop('password', 0)
return jsonify(user)
class Token(Resource):
def post(self):
users = mongo.db.users
token = request.get_json()['token']
userToken = decode_token(token)
print(userToken['identity'])
user = users.find_one({
'_id': ObjectId(userToken['identity'])
})
print(user)
if user is None:
return jsonify({
'error': 'token invalid'
})
user['_id'] = str(user['_id'])
user.pop('password', 0)
return jsonify(user)
class AddUser(Resource):
def post(self):
users = mongo.db.users
name = request.get_json()['name']
email = request.get_json()['email']
password = request.get_json()['password']
hashPassword = bcrypt.hashpw(password.encode('utf8'), bcrypt.gensalt(10))
name_id = users.insert(
{
'name': name,
'email': email,
'password': hashPassword.decode('utf8'),
'followers': [],
'following': [],
'posts': []
}
)
new_user = users.find_one({'_id': name_id})
result = {'username': new_user['name'], 'message': 'user was created successfully'}
return jsonify({'data': result})
class UpdateUser(Resource):
def put(self, id):
users = mongo.db.users
name = request.get_json()['name']
users.find_one_and_update({'_id': ObjectId(id)}, {'$set': {'name': name}}, upsert=False)
new_user = users.find_one({'_id': ObjectId(id)})
result = {'username': new_user['name'], 'message': 'user was updated successfully'}
return jsonify({'data': result})
class DeleteUser(Resource):
def delete(self, id):
users = mongo.db.users
response = users.delete_one({'_id': ObjectId(id)})
if response.deleted_count == 1:
result = {'message': 'user deleted successfully'}
else:
result = {'message': 'failed to delete user'}
return jsonify({'data': result})
class Follow(Resource):
def put(self, id):
users = mongo.db.users
userId = request.get_json()['userId']
user = users.find_one_and_update({'_id': ObjectId(id)}, {'$addToSet': {'followers': userId}}, upsert=False)
user2 = users.find_one_and_update({'_id': ObjectId(userId)}, {'$addToSet': {'following': id}}, upsert=False)
user['_id'] = str(user['_id'])
user2['_id'] = str(user2['_id'])
return jsonify({'data': [{'follower': user2}, {'following': user}]})
class Unfollow(Resource):
def put(self, id):
users = mongo.db.users
userId = request.get_json()['userId']
user = users.find_one_and_update({'_id': ObjectId(userId)}, {'$pull': {'followers': id}}, upsert=False)
user2 = users.find_one_and_update({'_id': ObjectId(id)}, {'$pull': {'following': userId}}, upsert=False)
user['_id'] = str(user['_id'])
user2['_id'] = str(user2['_id'])
return jsonify({'data': [{'follower': user2}, {'following': user}]})
api.add_resource(GetAllUsers, '/users')
api.add_resource(AddUser, '/users')
api.add_resource(UpdateUser, '/users/<id>')
api.add_resource(DeleteUser, '/users/<id>')
api.add_resource(Login, '/users/login')
api.add_resource(Follow, '/users/<id>/follow')
api.add_resource(Unfollow, '/users/<id>/unfollow')
api.add_resource(Token, '/users/token') |
inputs = [1.2, 5.1, 2.1]
weights = [3.1, 2.1, 8.7]
bias = 3
output = inputs[0]*weights[0] + inputs[1]*weights[1] + inputs[2]*weights[2] + bias
print(output) |
from pylatex import Section, Command, NoEscape
def add_competence(doc, cv_data):
with doc.create(Section("Competences")):
doc.append(Command("textbf", arguments=NoEscape("Techniques" + " ")))
techniques = ""
for technique in cv_data["technique"]:
techniques += (technique + ", ")
doc.append(NoEscape(techniques))
|
# ini file
|
from transpose_dict import TD
import json
def test_transpose_dict():
with open("tests/start.json", "r") as f:
start = json.load(f)
for i in range(3):
with open("tests/test_{i}.json".format(i=i), "r") as f:
print(TD(start, i))
assert json.load(f) == TD(start, i) |
from django.urls import path
from . import views
from .decorators import org_admin_only, unauthenticated_user
from django.contrib.auth.views import LoginView, LogoutView
from .views import (
PolicyListView,
PolicyCreateView,
PolicyDeleteView,
PolicyUpdateView,
OrganizationCreateView,
)
urlpatterns = [
path('', views.home, name='home'),
path('about', views.about, name='about'),
path('view_policies', views.view_policies, name='view_policies'),
path('violations', views.violations, name='violations'),
path('scan', views.scan, name='scan'),
path('new_policy', org_admin_only(PolicyCreateView.as_view()), name='new_policy'),
path('policies', PolicyListView.as_view(), name='policies'),
path('policies/<int:pk>/delete/', org_admin_only(PolicyDeleteView.as_view()), name='delete_policy'),
path('policies/<int:pk>/update/', org_admin_only(PolicyUpdateView.as_view()), name='update_policy'),
path('new_organization', OrganizationCreateView.as_view(), name='new_organization'),
path('login/', unauthenticated_user(LoginView.as_view()), name="login"),
path('register/', views.register, name="register"),
path('register_org/', unauthenticated_user(OrganizationCreateView.as_view()), name="register_org"),
path('logout/', LogoutView.as_view(next_page="login"), name="logout"),
path('update_profile/', views.update_profile, name="update_profile"),
path('profile/', views.profile, name="profile"),
path('change_password/', views.change_password, name="change_password"),
]
|
import sys
class Matrix:
def __init__(self, array):
self.rows = len(array)
self.cols = len(array[0])
self.array = array
def calculate_min_top_down_path(self):
return self.calculate_min_top_down_path_impl(0, 0)
def calculate_min_top_down_path_impl(self, row, col):
if row >= self.rows or row < 0:
return 0
if col >= self.cols or col < 0:
return 0
if row+1 == self.rows and col+1 == self.cols:
return self.array[row][col]
result1 = self.calculate_min_top_down_path_impl(row+1, col)
result2 = self.calculate_min_top_down_path_impl(row, col+1)
if row+1 == self.rows:
return result2 + self.array[row][col]
if col+1 == self.cols:
return result1 + self.array[row][col]
return min(result1, result2) + self.array[row][col]
def parse_matrix(n, input_stream):
matrix = []
for i in range(n):
line = input_stream.readline().split(',')
row = []
for r in line:
row.append(int(r))
matrix.append(row)
return Matrix(matrix)
test_cases = open(sys.argv[1], 'r')
while True:
line = test_cases.readline()
if len(line) == 0:
break
n = int(line)
matrix = parse_matrix(n, test_cases)
print(matrix.calculate_min_top_down_path())
test_cases.close()
|
import datetime
import math
import itertools
import pytest
import calendar
from homeplotter.timeseries import TimeSeries
sample_data = {
"both-broken":[[datetime.date(2020, 10, 12), 200.0],[datetime.date(2020, 11, 24), 50.0],[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 2, 2), 100],[datetime.date(2021,3,11),60]],
"last-broken":[[datetime.date(2020, 10, 1), 200.0],[datetime.date(2020, 11, 24), 50.0],[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 2, 2), 100],[datetime.date(2021,3,11),60]],
"last-broken-year":[[datetime.date(2020, 10, 1), 200.0],[datetime.date(2020, 11, 24), 50.0],[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 1, 2), 100]],
"first-broken":[[datetime.date(2020, 10, 12), 200.0],[datetime.date(2020, 11, 24), 50.0],[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 2, 2), 100],[datetime.date(2021,3,31),60]],
"first-broken-year":[[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 2, 2), 100],[datetime.date(2021,3,31),60]],
"both-even":[[datetime.date(2020, 10, 1), 200.0],[datetime.date(2020, 11, 24), 50.0],[datetime.date(2020, 12, 5), 200.0], [datetime.date(2020, 12, 30), 400.0], [datetime.date(2020, 12, 31), -300], [datetime.date(2021, 2, 2), 100],[datetime.date(2021,3,31),60]],
"one-month":[[datetime.date(2020, 12, 1), 200.0],[datetime.date(2020, 12, 5), 50.0],[datetime.date(2020, 12, 31), 300.0]],
}
def expected_start_date(date, padding):
year = date.year
month = date.month
if padding or date.day == 1:
return datetime.date(year,month,1)
else:
if month < 12:
return datetime.date(year,month+1,1)
else:
return datetime.date(year+1,1,1)
def expected_end_date(date, padding):
year = date.year
month = date.month
if padding or date.day==calendar.monthrange(year,month)[1]:
return datetime.date(year,month,1)
else:
if month > 1:
return datetime.date(year,month-1,1)
else:
return datetime.date(year-1,12,1)
@pytest.mark.parametrize("padding,sample_key", itertools.product([False,True],sample_data.keys()))
def test_accumulate__len(padding,sample_key):
ts = TimeSeries(sample_data[sample_key])
original_len = len(ts.data)
start_date = expected_start_date(ts.data[0][0],padding)
end_date = expected_end_date(ts.data[-1][0], padding)
expected_len = (end_date.year-start_date.year)*12+(end_date.month-start_date.month)+1
ts.accumulate(1,"Month",padding=padding)
assert(len(ts.data) == expected_len)
@pytest.mark.parametrize("padding,sample_key", itertools.product([False,True],sample_data.keys()))
def test_accumulate__start_date(padding,sample_key):
sd = sample_data[sample_key]
ts = TimeSeries(sd)
ts.accumulate(1,"Month",padding=padding)
assert(ts.get_x()[0]==expected_start_date(sd[0][0],padding))
assert(ts.get_x()[0].day==1)
@pytest.mark.parametrize("padding,sample_key", itertools.product([False,True],sample_data.keys()))
def test_accumulate__steps(padding,sample_key):
ts = TimeSeries(sample_data[sample_key])
ts.accumulate(1,"Month",padding=padding)
for i in range(1,len(ts.data)):
assert((ts.data[i][0]-ts.data[i-1][0]).days == calendar.monthrange(ts.data[i-1][0].year,ts.data[i-1][0].month)[1])
@pytest.mark.parametrize("padding,sample_key", itertools.product([False,True],sample_data.keys()))
def test_accumulate__end_date(padding,sample_key):
sd = sample_data[sample_key]
ts = TimeSeries(sd)
ts.accumulate(1,"Month",padding=padding)
assert(ts.get_x()[-1]==expected_end_date(sd[-1][0],padding))
assert(ts.get_x()[-1].day==1)
@pytest.mark.parametrize("padding,sample_key", itertools.product([False,True],sample_data.keys()))
def test_accumulate__sum(padding,sample_key):
sd = sample_data[sample_key]
ts = TimeSeries(sd)
ts.accumulate(1,"Month",padding=padding)
for i in range(len(ts.data)):
cum_sum = 0
for data in sd:
if ts.data[i][0]<=data[0]<ts.data[i][0]+datetime.timedelta(calendar.monthrange(ts.data[i][0].year,ts.data[i][0].month)[1]):
cum_sum+=data[1]
assert(ts.data[i][1]==cum_sum) |
# Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Project model."""
from ggrc import db
from ggrc.access_control.roleable import Roleable
from ggrc.fulltext.mixin import Indexed
from ggrc.models import mixins
from ggrc.models.mixins import synchronizable
from ggrc.models.comment import ScopedCommentable
from ggrc.models.object_document import PublicDocumentable
from ggrc.models.object_person import Personable
from ggrc.models.relationship import Relatable
class Project(Roleable,
synchronizable.Synchronizable,
mixins.CustomAttributable,
Personable,
Relatable,
mixins.LastDeprecatedTimeboxed,
PublicDocumentable,
ScopedCommentable,
mixins.TestPlanned,
mixins.base.ContextRBAC,
mixins.ScopeObject,
mixins.Folderable,
Indexed,
db.Model):
"""Representation for Project model."""
__tablename__ = 'projects'
_aliases = {
"documents_file": None,
}
|
"""
Exceptions declaration.
"""
__all__ = [
"PyCozmoException",
"PyCozmoConnectionError",
"ConnectionTimeout",
"Timeout",
]
class PyCozmoException(Exception):
""" Base class for all PyCozmo exceptions. """
class PyCozmoConnectionError(PyCozmoException):
""" Base class for all PyCozmo connection exceptions. """
class ConnectionTimeout(PyCozmoConnectionError):
""" Connection timeout. """
class Timeout(PyCozmoException):
""" Timeout. """
|
# -*- coding: utf-8 -*-
from openprocurement.archivarius.core.utils import Root
def factory(request):
request.validated['contract_src'] = {}
root = Root(request)
if not request.matchdict or not request.matchdict.get('contract_id'):
return root
request.validated['contract_id'] = request.matchdict['contract_id']
contract = request.contract
contract.__parent__ = root
request.validated['contract'] = request.validated['db_doc'] = contract
return contract
|
from nbconvert.exporters.exporter import ResourcesDict
from nbdocs.core import read_nb
from nbdocs.process import (
CorrectMdImageLinkPreprocessor,
copy_images,
correct_markdown_image_link,
md_correct_image_link,
get_image_link_re,
md_find_image_names,
)
from nbdocs.tests.base import create_tmp_image_file
new_link_expected = ""
wrong_link = ""
external_link = ""
text = """
Its a dog image.
here -  ---
=== cat
---  ---
just line,
ext link![mkd link] (https://images/some.jpg) dsf
one more line
output link  dsf
second output ![jpg] (output2.jpg) dsf
output image, link with whitespaces ![asdf] ( output.jpg ) dsf
"""
text_with_output_image_link = "some text\n\nmore text"
def test_get_image_link_re():
"""get_image_link_re"""
re_link = get_image_link_re()
all_links = re_link.findall(text)
assert len(all_links) == 6
fn = "output.jpg"
re_link = get_image_link_re(fn)
res = re_link.finditer(text)
assert len(list(res)) == 2
res = re_link.finditer(text)
match = next(res)
assert match.group("path") == fn
def test_md_find_image_names():
"""test md_find_image_names"""
image_names = md_find_image_names(text)
assert len(image_names) == 5
def test_copy_images(tmp_path):
"""test_copy_images"""
test_names = ["t_1.png", "t_2.jpg"]
for fn in test_names:
fn = tmp_path / fn
create_tmp_image_file(fn)
assert fn.exists()
dest = tmp_path / "dest"
done, left = copy_images(set(test_names), tmp_path, dest)
assert len(done) == 2
assert len(left) == 0
for fn in test_names:
assert (dest / fn).exists()
def test_md_correct_image_link():
"""test md_correct_image_link"""
corrected_text = md_correct_image_link(
md=text_with_output_image_link, image_name="output.jpg", image_path="images"
)
assert "" in corrected_text
assert "some text" in corrected_text
assert "more text" in corrected_text
# wrong name, nothing changed
corrected_text = md_correct_image_link(
md=text_with_output_image_link, image_name="output2.jpg", image_path="images"
)
assert corrected_text == text_with_output_image_link
# def test_cell_md_correct_image_link():
# pass
def test_correct_markdown_image_link(tmp_path):
"""Correct image link"""
nb_fn = "tests/test_nbs/markdown_image.ipynb"
nb = read_nb(nb_fn)
dest_path = "test_docs"
image_path = "images"
correct_markdown_image_link(nb, nb_fn, tmp_path / dest_path, image_path)
assert (tmp_path / dest_path / image_path).exists()
assert (
tmp_path / dest_path / image_path / "markdown_image_files" / "dog.jpg"
).exists()
assert nb.cells[1].source.splitlines()[1] == new_link_expected
nb.cells[1].source = external_link
correct_markdown_image_link(nb, nb_fn, tmp_path / dest_path, image_path)
assert nb.cells[1].source == external_link
nb.cells[1].source = wrong_link
correct_markdown_image_link(nb, nb_fn, tmp_path / dest_path, image_path)
assert nb.cells[1].source == wrong_link
nb_fn = "tests/test_nbs/code_image.ipynb"
nb = read_nb(nb_fn)
nb_copy = nb.copy()
correct_markdown_image_link(nb, nb_fn, tmp_path / dest_path, image_path)
assert nb == nb_copy
def test_CorrectMdImageLinkPreprocessor(tmp_path):
"""test CorrectMdImageLinkPreprocessor"""
nb_fn = "tests/test_nbs/markdown_image.ipynb"
nb = read_nb(nb_fn)
dest_path = "test_docs"
image_path = "images"
resources = ResourcesDict()
processor = CorrectMdImageLinkPreprocessor(tmp_path / dest_path, image_path)
processor.enabled = True
nb, _ = processor(nb, resources)
assert (tmp_path / dest_path / image_path).exists()
assert (
tmp_path / dest_path / image_path / "markdown_image_files" / "dog.jpg"
).exists()
assert nb.cells[1].source.splitlines()[1] == new_link_expected
nb.cells[1].source = external_link
nb, _ = processor(nb, resources)
assert nb.cells[1].source == external_link
nb.cells[1].source = wrong_link
nb, _ = processor(nb, resources)
assert nb.cells[1].source == wrong_link
nb_fn = "tests/test_nbs/code_image.ipynb"
nb = read_nb(nb_fn)
nb_copy = nb.copy()
nb, _ = processor(nb, resources)
assert nb == nb_copy
|
""" dblist - Detect accessible databases and display properties
(architecture, platform and version),
also return the Field Definition Table if a file number is specified
Usage:
python dblist.py --dbids <dbid> [--fnr <fnr> --xopt <LF-option> <other> ]
Options:
-a, --auth dbid,userid,password;... authentication for user for database
(open systems only)
-d, --dbids <dbid> is a valid dbid or a list of dbids (i,j,...)
may have to be quoted "(i,j..)"
or a range of dbids i-j
-e --env set adalnk parameter
-f --fnr display FDT of Adabas file number <fnr>
-n, --noclose leave session open (use for testing only)
-r, --replytimeout <sec> Adalink max. wait time on reply
-s, --silent don't print rsp-148 (use for large ranges)
-x, --xopt <n> =1 use LF/X, =2 use LF/F, =3 use LF/I
=0 use LF/S (default)
(MF: from V8.2, OS: from V6.2)
use acbx for 1 and 2
-p --password <pwd> Adabas security password
-u, --usr <userid> userid for ADASAF database
-w, --pwd <pwd> password for ADASAF database
-y, --newpass <npw> new password for ADASAF database
-v --verbose <level> dump adabas buffers
1 = after call 2 = before and after
-h, --help display this help
Examples:
python dblist.py -d 241
python dblist.py -s -d 1-10000
python dblist.py --dbids (241,10007,65535)
python dblist.py -d 241 -f 10 display FDT of db 241 file 10
$Date: 2019-11-08 15:01:35 +0100 (Fri, 08 Nov 2019) $
$Rev: 947 $
"""
from __future__ import print_function # PY3
from adapya.adabas.api import Adabas, Adabasx, archit2str, adaSetParameter
from adapya.adabas.api import DatabaseError, InterfaceError, adaSetTimeout
from adapya.adabas.api import setsaf, setuidpw
from adapya.adabas.fields import readfdt
from adapya.base.defs import log,LOGBEFORE,LOGCMD,LOGCB,LOGRB,LOGRSP,LOGFB
from adapya.base.conv import str2ebc
# log(LOGCMD+LOGCB+LOGRB+LOGRSP)
import getopt
import sys
def usage():
print(__doc__)
dbids=[]
fnr=0
dbidstr='(8,12,49,240,241,10006,10007,65534)' # check dbids
newpass=''
noclose=0
pwd=''
ph=0
replytimeout=0
silent=0
safid=''
safpw=''
verbose=0
xopt=0
try:
opts, args = getopt.getopt(sys.argv[1:],
'a:hd:e:f:nPp:r:su:v:w:x:y:',
['auth=,help','dbids=','env=','fnr=','newpass=','noclose','password=','ph',
'replytimeout=','silent','usr=','verbose=','pwd=','xopt='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
sys.exit()
elif opt in ('-a', '--auth'):
ss = arg.split(';')
for s in ss:
adbid,auser,apwd = s.split(',')
adbid=int(adbid)
i=setuidpw(adbid,auser,apwd)
print('setuidpw(%d,%s,%s) returned %d' % (adbid,auser,apwd,i))
elif opt in ('-d', '--dbids'):
dbidstr=arg
elif opt in ('-e', '--env'):
i=adaSetParameter(arg)
if i:
if i == -3:
t='Invalid parameter'
elif i == -4:
t='Parameter already set'
else: t = '%d' % i
print('adaSetParameter(%s) returned "%s"' % (arg,t))
sys.exit(-1)
elif opt in ('-f', '--fnr'):
fnr=int(arg)
elif opt in ('-n', '--noclose'):
noclose=1
elif opt in ('-p', '--password'):
pwd=arg
elif opt in ('-P', '--ph'):
ph=int(arg)
elif opt in ('-r', '--replytimeout'):
replytimeout=int(arg)
elif opt in ('-s', '--silent'):
silent=1
elif opt in ('-u', '--usr'):
safid=arg
elif opt in ('-v', '--verbose'):
verbose=int(arg)
elif opt in ('-w', '--pwd'):
safpw=arg
elif opt in ('-x', '--xopt'):
xopt=int(arg)
if not( 0<=xopt<=3):
print('invalid xopt parameter', xopt)
usage()
sys.exit(2)
elif opt in ('-y', '--newpass'):
newpass=arg
print('\nCheck if the following databases are active:', dbidstr, '\n')
if safid and safpw:
i = setsaf(safid, safpw, newpass)
if i:
print('Setting adasaf parameter returned %d' % i)
if dbidstr[0] in '([':
dbids+=eval(dbidstr)
else:
fromto=dbidstr.split('-')
if len(fromto) == 2: # range given?
for i in range(int(fromto[0]),int(fromto[1])+1):
dbids.append(i)
else:
dbids.append(int(dbidstr))
opsysDict={0: 'Mainframe (IBM/Siemens/Fujitsu)', 1: 'VMS', 2:
"Unix, Windows", 4: 'Entire System Server'}
if 0 < xopt < 3:
c1=Adabasx(rbl=80,fbl=10) # acbx needs fb/rb pair: fbl=0 gives error
else:
c1=Adabas(rbl=80)
c1.cb.cid='list'
if ph:
c1.cb.typ=0x04
if replytimeout:
rsp=adaSetTimeout(replytimeout)
if verbose > 1:
log(LOGCMD|LOGCB|LOGFB|LOGRB|LOGBEFORE)
elif verbose == 1:
log(LOGCMD|LOGCB|LOGRB)
else:
log(0) # we handle response codes here
for i in dbids: # loop through list of databases
if i < 1 or i > 65535: # halt on invalid dbid
print("Invalid DBID %d" % i)
break
try:
c1.dbid=i
c1.cb.dbid=i
c1.nucid=ph
if 1<=xopt<=2:
c1.cb.nid=ph
#c1.open(wcharset='UTF-8',acode=819,tz='Europe/Berlin',arc=9)
c1.open()
# Evaluate architecture and version information given back
# from the open call
if c1.opsys in opsysDict:
s = opsysDict[c1.opsys]
else:
s = '%d' % c1.opsys
if c1.opsys != 4:
print(('Database %5d is active, V%d.%d.%d.%d, arc=%d,'\
' opsys=%s,\n'+26*' '+'cluster nucid %d, %s') %\
(c1.cb.dbid or c1.dbid,c1.version, c1.release, c1.smlevel, c1.ptlevel,\
c1.dbarchit, s, c1.nucid, archit2str(c1.dbarchit))
)
if c1.cb.typ==0x04 and ph: # physical call with nucid
assert c1.nucid == ph, \
'Error: Response from wrong NUCID %d, expected %d'%(
c1.nucid, ph) # nucid is refreshed from isl+2(2) in open() call
else:
print( 'Entire System %d is active, V%d.%d.%d.%d, arc=%d' %\
(c1.cb.dbid or c1.dbid,c1.version, c1.release, c1.smlevel, c1.ptlevel,\
c1.dbarchit) )
if fnr:
print( '\nField Definition Table for file %d' % fnr)
readfdt(i, fnr,printfdt=True,xopt=xopt,pwd=pwd)
if not noclose:
c1.close()
except DatabaseError as e:
ax=e.apa
if not silent: # or or ax.sub1 or ax.sub2:
print( 'Database %5d --'%i, e.value)
if not ax.cb.rsp==148:
sys.exit(8)
except InterfaceError as e:
print( 'Database %5d -- %s' % (e.apa.dbid,e.value))
sys.exit(16)
except Exception as e:
print(e)
raise
sys.exit(12)
# Copyright 2004-ThisYear Software AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
"""
A simple script listing serial ports on the computer.
This requires to install the adafruit-board-toolkit module.
pip3 install adafruit-board-toolkit
"""
import adafruit_board_toolkit.circuitpython_serial
ports = (
adafruit_board_toolkit.circuitpython_serial.repl_comports()
+adafruit_board_toolkit.circuitpython_serial.data_comports()
)
if ports:
print(f"{len(ports)} serial Circuitpython ports found connected to the computer.")
col1 = max([len(port.device) for port in ports]) + 1
col1 = max(13, col1)
col3 = max([len(port.product) for port in ports]) + 1
col3 = max(7, col3)
col4 = max([len(port.product+port.manufacturer) for port in ports]) + 3
col4 = max(10, col4)
print("")
print(" Port Location".ljust(col1), "Type ", " Device")
print("-" * col1, "-" * 5, "-" * col4)
for port in adafruit_board_toolkit.circuitpython_serial.repl_comports():
print(f"{port.device:{col1}s} REPL {port.product} ({port.manufacturer})")
print("")
for port in adafruit_board_toolkit.circuitpython_serial.data_comports():
print(f"{port.device:{col1}s} DATA {port.product} ({port.manufacturer})")
else:
print("No serial port matching a Circuitpython board was found.")
|
from django.shortcuts import render, redirect, render_to_response, get_object_or_404
from django.http import HttpResponse, Http404, HttpResponseRedirect
from .models import *
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.views.generic import RedirectView
from .forms import *
from django.http import JsonResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import *
from rest_framework import status
from .permissions import IsAdminOrReadOnly
# Create your views here.
def index(request):
title = 'AudRate'
projects = Project.objects.all()
return render(request, 'index.html', {"projects": projects, "title": title})
def search_results(request):
if 'project' in request.GET and request.GET["project"]:
search_term = request.GET.get("project")
searched_projects = Project.search_by_title(search_term)
message = f"{search_term}"
return render(request, 'search.html', {"message": message, "projects": searched_projects})
else:
message = "You haven't searched for any project"
return render(request, 'search.html', {"message": message})
@login_required(login_url = '/accounts/login/')
def myprofile(request):
user = request.user
# author = user.profile
projects = Project.objects.filter(author = user.profile)
title = f'{user.first_name} {user.last_name}'
return render(request, 'myprofile.html', {'projects': projects, 'title': title})
@login_required(login_url = '/accounts/login/')
def update_profile(request):
user = request.user
title = f'Edit {user.first_name} {user.last_name}\'s Profile'
if request.method == 'POST':
profile_form = ProfileUpdateForm(request.POST, request.FILES,instance = user.profile)
contact_form = ContactUpdateForm(request.POST)
if profile_form.is_valid() and contact_form.is_valid():
profile_form.save()
contact = contact_form.save(commit = False)
contact.profile = user.profile
contact.save()
return redirect('myprofile')
else:
profile_form = ProfileUpdateForm(instance = user.profile)
contact_form = ContactUpdateForm()
return render(request, 'update_profile.html', {'title': title, 'profile_form': profile_form, 'contact_form': contact_form})
@login_required(login_url = '/accounts/login/')
def new_project(request):
user = request.user
title = 'New Project'
if request.method == 'POST':
project_form = ProjectForm(request.POST, request.FILES)
if project_form.is_valid():
project = project_form.save(commit = False)
project.author = user.profile
project.save()
return redirect('index')
else:
project_form = ProjectForm()
return render(request, 'new_project.html', {'title': title, 'project_form': project_form})
def project_view(request,project_id):
project = Project.objects.filter(pk = project_id)
try:
project = Project.objects.get(pk = project_id)
except Project.DoesNotExist:
raise Http404("Sorry. The project does not exist.")
return render(request, 'project_view.html', { 'project': project})
class ProfileList(APIView):
permission_classes = (IsAdminOrReadOnly,)
def get(self, request, format = None):
all_profiles = Profile.objects.all()
serializers = ProfileSerializer(all_profiles, many = True)
return Response(serializers.data)
def post(self, request, format = None):
serializers = ProfileSerializer(data = request.data)
if serializers.is_valid():
serializers.save()
return Response(serializers.data, status = status.HTTP_201_CREATED)
return Response(serializers.errors, status = status.HTTP_400_BAD_REQUEST)
class ProjectList(APIView):
permission_classes = (IsAdminOrReadOnly,)
def get(self, request, format = None):
all_projects = Project.objects.all()
serializers = ProjectSerializer(all_projects, many = True)
return Response(serializers.data)
def post(self, request, format = None):
serializers = ProjectSerializer(data = request.data)
if serializers.is_valid():
serializers.save()
return Response(serializers.data, status = status.HTTP_201_CREATED)
return Response(serializers.errors, status = status.HTTP_400_BAD_REQUEST) |
import numpy as np
from controller import Dumbbell
from kinematics import attitude
angle = (2*np.pi- 0) * np.random.rand(1) + 0
# generate a random initial state that is outside of the asteroid
pos = np.random.rand(3)+np.array([2,2,2])
R = attitude.rot1(angle).reshape(9)
vel = np.random.rand(3)
ang_vel = np.random.rand(3)
t = np.random.rand()*100
state = np.hstack((pos,vel, R, ang_vel))
class TestDumbbellInertialDesiredAttitude():
dum = Dumbbell()
alpha = np.random.rand()
axis = np.array([1, 0, 0])
Rd, Rd_dot, ang_vel_d, ang_vel_d_dot = dum.desired_attitude(1, alpha, axis)
def test_desired_rotation_matrix_determinant(self):
np.testing.assert_almost_equal(np.linalg.det(self.Rd), 1)
def test_desired_rotation_matrix_orthogonal(self):
np.testing.assert_array_almost_equal(self.Rd.T.dot(self.Rd),
np.eye(3,3))
def test_desired_attitude_satifies_kinematics(self):
np.testing.assert_array_almost_equal(self.Rd_dot,
self.Rd.dot(attitude.hat_map(self.ang_vel_d)))
def test_moment_of_inertia(self):
np.testing.assert_allclose(self.dum.J, np.trace(self.dum.Jd)*np.eye(3,3) - self.dum.Jd)
class TestDumbbellInertialAttitudeController():
"""Test the attitude controller for the inertial eoms
"""
dum = Dumbbell()
u_m = dum.attitude_controller(t, state, np.zeros(3))
def test_control_moment_size(self):
np.testing.assert_equal(self.u_m.shape, (3,))
class TestDumbbellInertialTranslationalController():
dum = Dumbbell()
u_f = dum.translation_controller(t, state, np.zeros(3))
def test_control_force_size(self):
np.testing.assert_equal(self.u_f.shape, (3,))
|
"""Functions related to the main event handling."""
import traceback
from extutils.emailutils import MailSender
from msghandle import handle_message_main
from msghandle.models import (
Event, TextMessageEventObject, ImageMessageEventObject,
HandledMessageEventsHolder
)
from .logger import DISCORD
__all__ = ("handle_discord_main", "handle_error",)
def handle_discord_main(event: Event) -> HandledMessageEventsHolder:
"""Main function to handle the message received from the discord bot."""
try:
# Early return on not-handled object
if not isinstance(event, (TextMessageEventObject, ImageMessageEventObject)):
DISCORD.logger.warning("Discord event object not handled. "
"Raw: %s" % event.raw if hasattr(event, "raw") else event)
return HandledMessageEventsHolder(event.channel_model)
return handle_message_main(event)
except Exception as ex: # pylint: disable=broad-except
handle_error(ex)
return HandledMessageEventsHolder(event.channel_model)
def handle_error(ex: Exception):
"""Function to be called when any error occurred during the discord message handling."""
subject = "Error on Discord Message Processing"
html = f"<h4>{ex}</h4>\n" \
f"<hr>\n" \
f"<pre>Traceback:\n" \
f"{traceback.format_exc()}</pre>"
MailSender.send_email_async(html, subject=subject)
DISCORD.logger.error(subject, exc_info=True)
|
from django.shortcuts import render
from django.http import HttpResponse
from basic_app.models import AccessRecord, Topic, Webpage
# Create your views here.
def index(request):
webpages_list = AccessRecord.objects.order_by("date")
date_dict = {"access_records": webpages_list}
return render(request, "basic_app/index.html", context=date_dict)
|
from __future__ import division, print_function
import copy
import fnmatch
import os
import subprocess
import wave
import struct
import hashlib
import h5py
from copy import deepcopy
from math import ceil
from numpy.fft import fftshift, ifftshift
import numpy as np
from scipy.io.wavfile import read as read_wavfile
from scipy.fftpack import fft, ifft, fftfreq, fft2, ifft2, dct
from scipy.signal import resample, firwin, filtfilt
from scipy.linalg import inv, toeplitz
from scipy.optimize import leastsq
import matplotlib.pyplot as plt
import matplotlib.cm as cmap
import matplotlib.colors as pltcolors
import matplotlib.mlab as mlab
import colorsys
from soundsig.signal import lowpass_filter, gaussian_window, correlation_function
from soundsig.timefreq import gaussian_stft
from soundsig.detect_peaks import detect_peaks
class WavFile():
""" Class for representing a sound and writing it to a .wav file """
def __init__(self, file_name=None, log_spectrogram=True):
self.log_spectrogram = log_spectrogram
if file_name is None:
self.sample_depth = 2 # in bytes
self.sample_rate = 44100.0 # in Hz
self.data = None
self.num_channels = 1
else:
wr = wave.open(file_name, 'r')
self.num_channels = wr.getnchannels()
self.sample_depth = wr.getsampwidth()
wr.close()
self.sample_rate, data = read_wavfile(file_name)
# If stereo make mono
if self.num_channels == 1:
self.data = data
else:
self.data = data.mean(axis=1)
self.analyzed = False
def to_wav(self, output_file, normalize=False, max_amplitude=32767.0):
wf = wave.open(output_file, 'w')
wf.setparams( (self.num_channels, self.sample_depth, self.sample_rate, len(self.data), 'NONE', 'not compressed') )
# normalize the sample
if normalize:
nsound = ((self.data / np.abs(self.data).max())*max_amplitude).astype('int')
else:
nsound = self.data
#print 'nsound.min=%d, max=%d' % (nsound.min(), nsound.max())
hex_sound = [struct.pack('h', x) for x in nsound]
wf.writeframes(b''.join(hex_sound))
wf.close()
def analyze(self, min_freq=0, max_freq=None, spec_sample_rate=1000.0, freq_spacing=125.0, envelope_cutoff_freq=200.0, noise_level_db=80, rectify=True, cmplx=False):
if self.analyzed:
return
self.data_t = np.arange(0.0, len(self.data), 1.0) / self.sample_rate
#compute the temporal envelope
self.envelope = temporal_envelope(self.data, self.sample_rate, envelope_cutoff_freq)
#compute log power spectrum
fftx = fft(self.data)
ps_f = fftfreq(len(self.data), d=(1.0 / self.sample_rate))
if max_freq == None:
findx = (ps_f > min_freq) & (ps_f < np.inf)
else:
findx = (ps_f > min_freq) & (ps_f < max_freq)
self.power_spectrum = np.log10(np.abs(fftx[findx]))
self.power_spectrum_f = ps_f[findx]
#estimate fundamental frequency from log power spectrum in the simplest way possible
ps = np.abs(fftx[findx])
peak_index = ps.argmax()
try:
self.fundamental_freq = self.power_spectrum_f[peak_index]
except IndexError:
print('Could not identify fundamental frequency!')
self.fundamental_freq = 0.0
#compute log spectrogram
t,f,spec,spec_rms = spectrogram(self.data, self.sample_rate, spec_sample_rate=spec_sample_rate,
freq_spacing=freq_spacing, min_freq=min_freq, max_freq=max_freq)
self.spectrogram_t = t
self.spectrogram_f = f
self.spectrogram = spec
self.spectrogram_rms = spec_rms
self.analyzed = True
def reanalyze(self, min_freq=0, max_freq=None, spec_sample_rate=1000.0, freq_spacing=25.0, envelope_cutoff_freq=200.0, noise_level_db=80, rectify=True, cmplx=False):
self.analyzed = False
return self.analyze(min_freq, max_freq, spec_sample_rate, freq_spacing, envelope_cutoff_freq, noise_level_db, rectify, cmplx)
def plot(self, fig=None, show_envelope=True, min_freq=0.0, max_freq=10000.0, colormap=cmap.gist_yarg, noise_level_db=80,
start_time=0, end_time=np.inf):
self.analyze(min_freq=min_freq, max_freq=max_freq, noise_level_db=noise_level_db)
if show_envelope:
spw_size = 15
spec_size = 35
else:
spw_size = 25
spec_size = 75
raw_ti = (self.data_t > start_time) & (self.data_t < end_time)
if fig is None:
fig = plt.figure()
gs = plt.GridSpec(100, 1)
ax = fig.add_subplot(gs[:spw_size])
plt.plot(self.data_t[raw_ti], self.data[raw_ti], 'k-')
plt.axis('tight')
plt.ylabel('Sound Pressure')
s = (spw_size+5)
e = s + spec_size
ax = fig.add_subplot(gs[s:e])
spec_ti = (self.spectrogram_t > start_time) & (self.spectrogram_t < end_time)
plot_spectrogram(self.spectrogram_t[spec_ti], self.spectrogram_f, self.spectrogram[:, spec_ti], ax=ax, ticks=True, colormap=colormap, colorbar=False)
if show_envelope:
ax = fig.add_subplot(gs[(e+5):95])
plt.plot(self.spectrogram_t, self.spectrogram_rms, 'g-')
plt.xlabel('Time (s)')
plt.ylabel('Envelope')
plt.axis('tight')
class BioSound(object):
""" Class for representing a communication sound using multiple feature spaces"""
def __init__(self, soundWave=np.array(0.0), fs=np.array(0.0), emitter='Unknown', calltype = 'U' ):
# Note that all the fields are numpy arrays for saving to h5 files.
self.sound = soundWave # sound pressure waveform
self.hashid = np.string_(hashlib.md5(np.array_str(soundWave).encode('utf-8')).hexdigest())
self.samprate = float(fs) if isinstance(fs,int) else fs # sampling rate
self.emitter = np.string_(emitter) # string for id of emitter
self.type = np.string_(calltype) # string for call type
self.spectro = np.asarray([]) # Log spectrogram
self.to = np.asarray([]) # Time scale for spectrogram
self.fo = np.asarray([]) # Frequency scale for spectrogram
self.mps = np.asarray([]) # Modulation Power Spectrum
self.wf = np.asarray([]) # Spectral modulations
self.wt = np.asarray([]) # Temporal modulations
self.f0 = np.asarray([]) # time varying fundamental
self.f0_2 = np.asarray([]) # time varying fundamental of second voice
self.F1 = np.asarray([]) # time varying formant 1
self.F2 = np.asarray([]) # time varying formant 2
self.F3 = np.asarray([]) # time varying formant 3
self.fund = np.asarray([]) # Average fundamental
self.sal = np.asarray([]) # time varying saliency
self.meansal = np.asarray([]) # mean saliency
self.fund2 = np.asarray([]) # Average fundamental of 2nd peak
self.voice2percent = np.asarray([]) # Average percent of presence of second peak
self.maxfund = np.asarray([])
self.minfund = np.asarray([])
self.cvfund = np.asarray([])
self.meanspect = np.asarray([])
self.stdspect = np.asarray([])
self.skewspect = np.asarray([])
self.kurtosisspect = np.asarray([])
self.entropyspect = np.asarray([])
self.q1 = np.asarray([])
self.q2 = np.asarray([])
self.q3 = np.asarray([])
self.meantime = np.asarray([])
self.stdtime = np.asarray([])
self.skewtime = np.asarray([])
self.kurtosistime = np.asarray([])
self.entropytime = np.asarray([])
self.fpsd = np.asarray([])
self.psd = np.asarray([])
self.tAmp = np.asarray([])
self.amp = np.asarray([])
self.rms = np.asarray([])
self.maxAmp = np.asarray([])
def saveh5(self, fileName=None):
# Save as an h5 file. Uses the hashid if fileName is not given
# Not using attributes
if fileName is None:
fileName = '%s.h5' % self.hashid
fid = h5py.File(fileName,'w')
selfDict = vars(self)
for varnames in selfDict:
fid.create_dataset(varnames, data=selfDict[varnames])
fid.close()
def readh5(self, fileName):
fid = h5py.File(fileName, 'r')
for varnames in fid.keys():
setattr(self, varnames, np.array(fid[varnames]).squeeze())
fid.close()
def spectrum(self, f_high=10000):
# Calculates power spectrum and features from power spectrum
# Need to add argument for window size
# f_high is the upper bound of the frequency for saving power spectrum
# nwindow = (1000.0*np.size(soundIn)/samprate)/window_len
#
Pxx, Freqs = mlab.psd(self.sound, Fs=self.samprate, NFFT=1024, noverlap=512)
# Find quartile power
cum_power = np.cumsum(Pxx)
tot_power = np.sum(Pxx)
quartile_freq = np.zeros(3, dtype = 'int')
quartile_values = [0.25, 0.5, 0.75]
nfreqs = np.size(cum_power)
iq = 0
for ifreq in range(nfreqs):
if (cum_power[ifreq] > quartile_values[iq]*tot_power):
quartile_freq[iq] = ifreq
iq = iq+1
if (iq > 2):
break
# Find skewness, kurtosis and entropy for power spectrum below f_high
ind_fmax = np.where(Freqs > f_high)[0][0]
# Description of spectral shape
spectdata = Pxx[0:ind_fmax]
freqdata = Freqs[0:ind_fmax]
spectdata = spectdata/np.sum(spectdata)
meanspect = np.sum(freqdata*spectdata)
stdspect = np.sqrt(np.sum(spectdata*((freqdata-meanspect)**2)))
skewspect = np.sum(spectdata*(freqdata-meanspect)**3)
skewspect = skewspect/(stdspect**3)
kurtosisspect = np.sum(spectdata*(freqdata-meanspect)**4)
kurtosisspect = kurtosisspect/(stdspect**4)
entropyspect = -np.sum(spectdata*np.log2(spectdata))/np.log2(ind_fmax)
# Storing the values
self.meanspect = meanspect
self.stdspect = stdspect
self.skewspect = skewspect
self.kurtosisspect = kurtosisspect
self.entropyspect = entropyspect
self.q1 = Freqs[quartile_freq[0]]
self.q2 = Freqs[quartile_freq[1]]
self.q3 = Freqs[quartile_freq[2]]
self.fpsd = freqdata
self.psd = spectdata
def spectroCalc(self, spec_sample_rate=1000, freq_spacing = 50, min_freq=0, max_freq=10000):
# Calculates the spectrogram in dB
t,f,spec,spec_rms = spectrogram(self.sound, self.samprate, spec_sample_rate=spec_sample_rate,
freq_spacing=freq_spacing, min_freq=min_freq, max_freq=max_freq,
cmplx=True)
self.to = t
self.fo = f
self.spectro = 20*np.log10(np.abs(spec))
def mpsCalc(self, window=None, Norm=True):
if self.spectro.size == 0:
self.spectroCalc()
wf, wt, mps_powAvg = mps(self.spectro, self.fo, self.to, window=window, Norm=Norm)
self.mps = mps_powAvg # Modulation Power Spectrum
self.wf = wf # Spectral modulations
self.wt = wt
def ampenv(self, cutoff_freq = 20, amp_sample_rate = 1000):
# Calculates the amplitude enveloppe and related parameters
(amp, tdata) = temporal_envelope(self.sound, self.samprate, cutoff_freq=cutoff_freq, resample_rate=amp_sample_rate)
# Here are the parameters
ampdata = amp/np.sum(amp)
meantime = np.sum(tdata*ampdata)
stdtime = np.sqrt(np.sum(ampdata*((tdata-meantime)**2)))
skewtime = np.sum(ampdata*(tdata-meantime)**3)
skewtime = skewtime/(stdtime**3)
kurtosistime = np.sum(ampdata*(tdata-meantime)**4)
kurtosistime = kurtosistime/(stdtime**4)
indpos = np.where(ampdata>0)[0]
entropytime = -np.sum(ampdata[indpos]*np.log2(ampdata[indpos]))/np.log2(np.size(indpos))
self.meantime = meantime
self.stdtime = stdtime
self.skewtime = skewtime
self.kurtosistime = kurtosistime
self.entropytime = entropytime
self.tAmp = tdata
self.amp = amp
self.maxAmp = max(amp)
def fundest(self, maxFund = 1500, minFund = 300, lowFc = 200, highFc = 6000, minSaliency = 0.5, debugFig = 0, minFormantFreq = 500, maxFormantBW = 500, windowFormant = 0.1, method='Stack'):
# Calculate the fundamental, the formants and parameters related to these
sal, fund, fund2, form1, form2, form3, lenfund = fundEstimator(self.sound, self.samprate, self.to, debugFig = debugFig, maxFund = maxFund, minFund = minFund, lowFc = lowFc, highFc = highFc, minSaliency = minSaliency, minFormantFreq = minFormantFreq, maxFormantBW = maxFormantBW, windowFormant = windowFormant, method = method)
goodFund = fund[~np.isnan(fund)]
goodSal = sal[~np.isnan(sal)]
goodFund2 = fund2[~np.isnan(fund2)]
if np.size(goodFund) > 0 :
meanfund = np.mean(goodFund)
else:
meanfund = np.asarray([])
meansal = np.mean(goodSal)
if np.size(goodFund2)> 0:
meanfund2 = np.mean(goodFund2)
else:
meanfund2 = np.asarray([])
if np.size(goodFund) == 0 or np.size(goodFund2) == 0:
fund2prop = 0.0
else:
fund2prop = np.float(np.size(goodFund2))/np.float(np.size(goodFund))
self.f0 = fund # time varying fundamental
self.f0_2 = fund2 # time varying fundamental of second voice
self.F1 = form1 # time varying formant 1
self.F2 = form2 # time varying formant 2
self.F3 = form3 # time varying formant 3
self.fund = meanfund # Average fundamental
self.sal = sal # Time varying saliency
self.meansal = meansal # Average saliency
self.fund2 = meanfund2 # Average fundamental of 2nd peak
self.voice2percent = fund2prop*100 # Average percent of presence of second peak
if np.size(goodFund) > 0 :
self.maxfund = np.max(goodFund)
self.minfund = np.min(goodFund)
self.cvfund = np.std(goodFund)/meanfund
def play(self):
# Plays the sound
play_sound_array(self.sound*(2**15), self.samprate)
def plot(self, DBNOISE=50, f_low=250, f_high=10000):
# Plots a biosound in figures 1, 2, 3
# Ploting Variables
soundlen = np.size(self.sound)
t = np.array(range(soundlen))
t = t*(1000.0/self.samprate)
# Plot the oscillogram + spectrogram
plt.figure(1)
plt.clf()
# mngr = plt.get_current_fig_manager()
# mngr.window.setGeometry(0, 260, 640, 545)
# The oscillogram
plt.axes([0.1, 0.75, 0.85, 0.20])
plt.plot(t,self.sound, 'k')
# plt.xlabel('Time (ms)')
plt.xlim(0, t[-1])
# Plot the amplitude enveloppe
if self.tAmp.size != 0 :
# rescale amp envelope to max for better display
plt.plot(self.tAmp*1000.0, self.amp*np.max(self.sound)/np.max(self.amp), 'r', linewidth=2)
# Plot the spectrogram
plt.axes([0.1, 0.1, 0.85, 0.6])
spec_colormap() # defined in sound.py
cmap = plt.get_cmap('SpectroColorMap')
if self.spectro.size != 0 :
soundSpect = self.spectro
if soundSpect.shape[0] == self.to.size:
soundSpect = np.transpose(soundSpect)
maxB = soundSpect.max()
minB = maxB-DBNOISE
soundSpect[soundSpect < minB] = minB
minSpect = soundSpect.min()
plt.imshow(soundSpect, extent = (self.to[0]*1000, self.to[-1]*1000, self.fo[0], self.fo[-1]), aspect='auto', interpolation='nearest', origin='lower', cmap=cmap, vmin=minSpect, vmax=maxB)
plt.ylim(f_low, f_high)
plt.xlim(0, t[-1])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (ms)')
# Plot the fundamental on the same figure
if self.f0.size != 0 :
fundplot = self.f0
diffFund = np.diff(fundplot)
diffFundInd = np.concatenate(([False], abs(diffFund)>1000))
fundplot[diffFundInd] = float('nan')
plt.plot(self.to*1000.0, self.f0, 'k', linewidth=3)
plt.plot(self.to*1000.0, self.f0_2, 'm', linewidth=3)
plt.plot(self.to*1000.0, self.F1, 'r--', linewidth=3)
plt.plot(self.to*1000.0, self.F2, 'w--', linewidth=3)
plt.plot(self.to*1000.0, self.F3, 'b--', linewidth=3)
plt.show()
# Plot Power Spectrum
plt.figure(2)
plt.clf()
# mngr = plt.get_current_fig_manager()
# mngr.window.setGeometry(650, 260, 640, 545)
if self.psd.size != 0 :
plt.plot(self.fpsd, self.psd, 'k-')
plt.xlabel('Frequency Hz')
plt.ylabel('Power Linear')
xl, xh, yl, yh = plt.axis()
xl = f_low
xh = f_high
plt.axis((xl, xh, yl, yh))
if self.q1.size != 0:
plt.plot([self.q1, self.q1], [yl, yh], 'k--')
plt.plot([self.q2, self.q2], [yl, yh], 'k--')
plt.plot([self.q3, self.q3], [yl, yh], 'k--')
if self.F1.size != 0:
F1Mean = self.F1[~np.isnan(self.F1)].mean()
F2Mean = self.F2[~np.isnan(self.F2)].mean()
F3Mean = self.F3[~np.isnan(self.F3)].mean()
plt.plot([F1Mean, F1Mean], [yl, yh], 'r--', linewidth=2.0)
plt.plot([F2Mean, F2Mean], [yl, yh], 'c--', linewidth=2.0)
plt.plot([F3Mean, F3Mean], [yl, yh], 'b--', linewidth=2.0)
plt.show()
# Table of results
plt.figure(3)
plt.clf()
# mngr = plt.get_current_fig_manager()
# mngr.window.setGeometry(320, 10, 640, 250)
textstr = '%s %s' % (self.emitter, self.type)
plt.text(0.4, 1.0, textstr)
if self.fund.size != 0:
if self.fund2.size != 0:
textstr = 'Mean Fund = %.2f Hz Mean Saliency = %.2f Mean Fund2 = %.2f PF2 = %.2f%%' % (self.fund, self.meansal, self.fund2, self.voice2percent)
else:
textstr = 'Mean Fund = %.2f Hz Mean Saliency = %.2f No 2nd Voice Detected' % (self.fund, self.meansal)
plt.text(-0.1, 0.8, textstr)
if self.fund.size != 0:
textstr = ' Max Fund = %.2f Hz, Min Fund = %.2f Hz, CV = %.2f' % (self.maxfund, self.minfund, self.cvfund)
plt.text(-0.1, 0.7, textstr)
textstr = 'Mean Spect = %.2f Hz, Std Spect= %.2f Hz' % (self.meanspect, self.stdspect)
plt.text(-0.1, 0.6, textstr)
textstr = ' Skew = %.2f, Kurtosis = %.2f Entropy=%.2f' % (self.skewspect, self.kurtosisspect, self.entropyspect)
plt.text(-0.1, 0.5, textstr)
textstr = ' Q1 F = %.2f Hz, Q2 F= %.2f Hz, Q3 F= %.2f Hz' % (self.q1, self.q2, self.q3 )
plt.text(-0.1, 0.4, textstr)
if self.F1.size != 0:
textstr = ' For1 = %.2f Hz, For2 = %.2f Hz, For3= %.2f Hz' % (F1Mean, F2Mean, F3Mean )
plt.text(-0.1, 0.3, textstr)
textstr = 'Mean Time = %.2f s, Std Time= %.2f s' % (self.meantime, self.stdtime)
plt.text(-0.1, 0.2, textstr)
textstr = ' Skew = %.2f, Kurtosis = %.2f Entropy=%.2f' % (self.skewtime, self.kurtosistime, self.entropytime)
plt.text(-0.1, 0.1, textstr)
if self.rms.size != 0 and self.maxAmp.size != 0 :
textstr = 'RMS = %.2f, Max Amp = %.2f' % (self.rms, self.maxAmp)
plt.text(-0.1, 0.0, textstr)
plt.axis('off')
plt.show()
# Plot Modulation Power spectrum if it exists
#ex = (spectral_freq.min(), spectral_freq.max(), temporal_freq.min(), temporal_freq.max())
if self.mps.size != 0 :
plt.figure(4)
plt.clf()
cmap = plt.get_cmap('jet')
ex = (self.wt.min(), self.wt.max(), self.wf.min()*1e3, self.wf.max()*1e3)
logMPS = 10.0*np.log10(self.mps)
maxMPS = logMPS.max()
minMPS = maxMPS-DBNOISE
logMPS[logMPS < minMPS] = minMPS
plt.imshow(logMPS, interpolation='nearest', aspect='auto', origin='lower', cmap=cmap, extent=ex)
plt.ylabel('Spectral Frequency (Cycles/KHz)')
plt.xlabel('Temporal Frequency (Hz)')
plt.colorbar()
plt.ylim((0,self.wf.max()*1e3))
plt.title('Modulation Power Spectrum')
plt.show()
plt.pause(1) # To flush the plots?
def spec_colormap():
# Makes the colormap that we like for spectrograms
cmap = np.zeros((64,3))
cmap[0,2] = 1.0
for ib in range(21):
cmap[ib+1,0] = (31.0+ib*(12.0/20.0))/60.0
cmap[ib+1,1] = (ib+1.0)/21.0
cmap[ib+1,2] = 1.0
for ig in range(21):
cmap[ig+ib+1,0] = (21.0-(ig)*(12.0/20.0))/60.0
cmap[ig+ib+1,1] = 1.0
cmap[ig+ib+1,2] = 0.5+(ig)*(0.3/20.0)
for ir in range(21):
cmap[ir+ig+ib+1,0] = (8.0-(ir)*(7.0/20.0))/60.0
cmap[ir+ig+ib+1,1] = 0.5 + (ir)*(0.5/20.0)
cmap[ir+ig+ib+1,2] = 1
for ic in range(64):
(cmap[ic,0], cmap[ic,1], cmap[ic,2]) = colorsys.hsv_to_rgb(cmap[ic,0], cmap[ic,1], cmap[ic,2])
spec_cmap = pltcolors.ListedColormap(cmap, name=u'SpectroColorMap', N=64)
plt.register_cmap(cmap=spec_cmap)
def plot_spectrogram(t, freq, spec, ax=None, ticks=True, fmin=None, fmax=None, colormap=None, colorbar=True, log = True, dBNoise = 50):
if colormap == None:
spec_colormap()
colormap = plt.get_cmap('SpectroColorMap')
if ax is None:
ax = plt.gca()
if fmin is None:
fmin = freq.min()
if fmax is None:
fmax = freq.max()
ex = (t.min(), t.max(), freq.min(), freq.max())
plotSpect = np.abs(spec)
if log == True and dBNoise is not None:
plotSpect = 20*np.log10(plotSpect)
maxB = plotSpect.max()
minB = maxB-dBNoise
else:
if dBNoise is not None:
maxB = 20*np.log10(plotSpect.max())
minB = ((maxB-dBNoise)/20.0)**10
else:
maxB = plotSpect.max()
minB = plotSpect.min()
plotSpect[plotSpect < minB] = minB
iax = ax.imshow(plotSpect, aspect='auto', interpolation='nearest', origin='lower', extent=ex, cmap=colormap, vmin=minB, vmax=maxB)
ax.set_ylim(fmin, fmax)
if not ticks:
ax.set_xticks([])
ax.set_yticks([])
else:
ax.set_ylabel('Frequency (Hz)')
ax.set_xlabel('Time (s)')
if colorbar:
plt.colorbar(iax)
def play_sound(file_name):
""" Install sox to get this to work: http://sox.sourceforge.net/ """
subprocess.call(['play', file_name])
def play_wavfile(filename):
import pyaudio
chunk_size = 1024
wf = wave.open(filename, "r")
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True)
data = wf.readframes(chunk_size)
while data != '':
stream.write(data)
data = wf.readframes(chunk_size)
wf.close()
stream.stop_stream()
stream.close()
p.terminate()
def play_sound_array(data, sample_rate):
''' Requires pyaudio package. Can be downloaded here
http://people.csail.mit.edu/hubert/pyaudio/
'''
import pyaudio
# Only play one channel
if len(data.shape) > 1:
data = np.mean(data, axis=np.argmin(data.shape))
data = data.astype('int16')
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(2),
channels=1,
rate=int(sample_rate),
output=True)
stream.write(data.tostring())
stream.stop_stream()
stream.close()
p.terminate()
def spectrogram(s, sample_rate, spec_sample_rate, freq_spacing, min_freq=0, max_freq=None, nstd=6, cmplx = True):
"""
Given a sound pressure waveform, s, compute the complex spectrogram.
See documentation on gaussian_stft for additional details.
Returns:
t, freq, timefreq, rms
t: array of time values to use as x axis
freq: array of frequencies to use as y axis
timefreq: the spectrogram (a time-frequency represnetaion)
rms : the time varying average
Arguments:
REQUIRED:
s: sound pressssure waveform
sample_rate: sampling rate for s in Hz
spec_sample_rate: sampling rate for the output spectrogram in Hz. This variable sets the overlap for the windows in the STFFT.
freq_spacing: the time-frequency scale for the spectrogram in Hz. This variable determines the width of the gaussian window.
OPTIONAL
complex = False: returns the absolute value
use min_freq and max_freq to save space
nstd = number of standard deviations of the gaussian in one window.
"""
# We need units here!!
increment = 1.0 / spec_sample_rate
window_length = nstd / (2.0*np.pi*freq_spacing)
t,freq,timefreq,rms = gaussian_stft(s, sample_rate, window_length, increment, nstd=nstd, min_freq=min_freq, max_freq=max_freq)
# rms = spec.std(axis=0, ddof=1)
if cmplx == False:
timefreq = np.abs(timefreq)
return t, freq, timefreq, rms
def temporal_envelope(s, sample_rate, cutoff_freq=200.0, resample_rate=None):
"""
Get the temporal envelope from the sound pressure waveform.
s: the signal
sample_rate: the sample rate of the signal
cutoff_freq: the cutoff frequency of the low pass filter used to create the envelope
Returns the temporal envelope of the signal, with same sample rate or downsampled.
"""
#rectify a zeroed version
srect = np.abs(s - np.mean(s))
#low pass filter
if cutoff_freq is not None:
srect = lowpass_filter(srect, sample_rate, cutoff_freq, filter_order=4)
srect[srect < 0] = 0
if resample_rate is not None:
lensound = len(srect)
t=(np.array(range(lensound),dtype=float))/sample_rate
lenresampled = int(round(float(lensound)*resample_rate/sample_rate))
(srectresampled, tresampled) = resample(srect, lenresampled, t=t, axis=0, window=None)
return (srectresampled, tresampled)
else:
return srect
def recursive_ls(root_dir, file_pattern):
"""
Walks through all the files in root_dir and returns every file whose name matches
the pattern specified by file_pattern.
"""
matches = list()
for root, dirnames, filenames in os.walk(root_dir):
for filename in fnmatch.filter(filenames, file_pattern):
matches.append(os.path.join(root, filename))
return matches
def sox_convert_to_mono(file_path):
"""
Uses Sox (sox.sourceforge.net) to convert a stereo .wav file to mono.
"""
root_dir,file_name = os.path.split(file_path)
base_file_name = file_name[:-4]
output_file_path = os.path.join(root_dir, '%s_mono.wav' % base_file_name)
cmd = 'sox \"%s\" -c 1 \"%s\"' % (file_path, output_file_path)
print(cmd)
subprocess.call(cmd, shell=True)
def generate_sine_wave(duration, freq, samprate):
"""
Generate a pure tone at a given frequency and sample rate for a specified duration.
"""
t = np.arange(0.0, duration, 1.0 / samprate)
return np.sin(2*np.pi*freq*t)
def generate_simple_stack(duration, fundamental_freq, samprate, num_harmonics=10):
nsamps = int(duration*samprate)
s = np.zeros(nsamps, dtype='float')
ffreq = 0.0
for n in range(num_harmonics):
ffreq += fundamental_freq
s += generate_sine_wave(duration, ffreq, samprate)
return s
def generate_harmonic_stack(duration, fundamental_freq, samprate, num_harmonics=10, base=2):
nsamps = int(duration*samprate)
s = np.zeros(nsamps, dtype='float')
for n in range(num_harmonics):
freq = fundamental_freq * base**n
s += generate_sine_wave(duration, freq, samprate)
return s
def modulate_wave(s, samprate, freq):
t = np.arange(len(s), dtype='float') / samprate
c = np.sin(2*np.pi*t*freq)
return c*s
def mtfft(spectrogram, df, dt, Log=False):
"""
Compute the 2d modulation power and phase for a given time frequency slice.
return temporal_freq,spectral_freq,mps_pow,mps_phase
"""
#take the 2D FFT and center it
smps = fft2(spectrogram)
smps = fftshift(smps)
#compute the log amplitude
mps_pow = np.abs(smps)**2
if Log:
mps_pow = 10*np.log10(mps_pow)
#compute the phase
mps_phase = np.angle(smps)
#compute the axes
nf = mps_pow.shape[0]
nt = mps_pow.shape[1]
spectral_freq = fftshift(fftfreq(nf, d=df[1]-df[0]))
temporal_freq = fftshift(fftfreq(nt, d=dt[1]-dt[0]))
"""
nb = sdata.shape[1]
dwf = np.zeros(nb)
for ib in range(int(np.ceil((nb+1)/2.0))+1):
posindx = ib
negindx = nb-ib+2
print 'ib=%d, posindx=%d, negindx=%d'% (ib, posindx, negindx )
dwf[ib]= (ib-1)*(1.0/(df*nb))
if ib > 1:
dwf[negindx] =- dwf[ib]
nt = sdata.shape[0]
dwt = np.zeros(nt)
for it in range(0, int(np.ceil((nt+1)/2.0))+1):
posindx = it
negindx = nt-it+2
print 'it=%d, posindx=%d, negindx=%d' % (it, posindx, negindx)
dwt[it] = (it-1)*(1.0/(nt*dt))
if it > 1 :
dwt[negindx] = -dwt[it]
spectral_freq = dwf
temporal_freq = dwt
"""
return spectral_freq, temporal_freq, mps_pow, mps_phase
def mps(spectrogram, df, dt, window=None, Norm=True):
"""
Calculates the modulation power spectrum using overlapp and add method with a gaussian window of length window in s
Assumes that spectrogram is in dB. df and dt are the axis of spectrogram.
"""
# Debugging paramenter
debugPlot = False
# Resolution of spectrogram in DB
dbRES = 50
# Check the size of the spectrogram vs dt
nt = dt.size
nf = df.size
if spectrogram.shape[1] != nt and spectrogram.shape[0] != nf:
print('Error in mps. Expected %d bands in frequency and %d points in time' % (nf, nt))
print('Spectrogram had shape %d, %d' % spectrogram.shape)
return 0, 0, 0
# Z-score the flattened spectrogram is Norm is True
sdata = deepcopy(spectrogram)
if Norm:
maxdata = sdata.max()
mindata = maxdata - dbRES
sdata[sdata< mindata] = mindata
sdata -= sdata.mean()
sdata /= sdata.std()
if window == None:
window = dt[-1]/10.0
# Find the number of spectrogram points in the gaussian window
if dt[-1] < window:
print('Warning in mps: Requested MPS window size is greater than spectrogram temporal extent.')
print('mps will be calculate with windows of %d points or %s s' % (nt-1, dt[-1]) )
nWindow = nt - 1
else:
nWindow = np.where(dt>= window)[0][0]
if nWindow%2 == 0:
nWindow += 1 # Make it odd size so that we have a symmetric window
#if nWindow < 64:
# print('Error in mps: window size %d pts (%.3f s) is two small for reasonable estimates' % (nWindow, window))
# return np.asarray([]), np.asarray([]), np.asarray([])
# Generate the Gaussian window
gt, wg = gaussian_window(nWindow, 6)
tShift = int(gt[-1]/3)
nchunks = 0
# Pad the spectrogram with zeros.
minSdata = sdata.min()
sdataZeros = np.ones((sdata.shape[0], int((nWindow-1)/2))) * minSdata
sdata = np.concatenate((sdataZeros, sdata, sdataZeros), axis = 1)
if debugPlot:
plt.figure(1)
plt.clf()
plt.subplot()
plt.imshow(sdata, origin='lower')
plt.title('Scaled and Padded Spectrogram')
plt.show()
plt.pause(1)
for tmid in range(tShift, nt, tShift):
# t mid is in the original coordinates while tstart and tend
# are shifted to deal with the zero padding.
tstart = tmid-(nWindow-1)//2-1
tstart += (nWindow-1)//2
if tstart < 0:
print('Error in mps. tstart negative')
break;
tend = tmid+(nWindow-1)//2
tend += (nWindow-1)//2
if tend > sdata.shape[1]:
print('Error in mps. tend too large')
break
nchunks += 1
# Multiply the spectrogram by the window
wSpect = deepcopy(sdata[:,tstart:tend])
# Debugging code
if debugPlot:
plt.figure(nchunks+1)
plt.clf()
plt.subplot(121)
plt.imshow(wSpect, origin='lower')
plt.title('%d Middle (%d %d)' % (tmid, tstart, tend) )
for fInd in range(nf):
wSpect[fInd,:] = wSpect[fInd,:]*wg
# Debugging code
if debugPlot:
plt.figure(nchunks+1)
plt.subplot(122)
plt.imshow(wSpect, origin='lower')
plt.title('After')
plt.show()
plt.pause(1)
input("Press Enter to continue...")
# Get the 2d FFT
wf, wt, mps_pow, mps_phase = mtfft(wSpect, df, dt[0:tend-tstart])
if nchunks == 1:
mps_powAvg = mps_pow
else:
mps_powAvg += mps_pow
mps_powAvg /= nchunks
return wf, wt, mps_powAvg
def plot_mps(spectral_freq, temporal_freq, amp, phase=None):
plt.figure()
#plot the amplitude
if phase:
plt.subplot(2, 1, 1)
#ex = (spectral_freq.min(), spectral_freq.max(), temporal_freq.min(), temporal_freq.max())
ex = (temporal_freq.min(), temporal_freq.max(), spectral_freq.min()*1e3, spectral_freq.max()*1e3)
plt.imshow(amp, interpolation='nearest', aspect='auto', origin='lower', cmap=cmap.jet, extent=ex)
plt.ylabel('Spectral Frequency (Cycles/KHz)')
plt.xlabel('Temporal Frequency (Hz)')
plt.colorbar()
plt.ylim((0,spectral_freq.max()*1e3))
plt.title('Power')
#plot the phase
if phase:
plt.subplot(2, 1, 2)
plt.imshow(phase, interpolation='nearest', aspect='auto', origin='lower', cmap=cmap.jet, extent=ex)
plt.ylabel('Spectral Frequency (Cycles/KHz)')
plt.xlabel('Temporal Frequency (Hz)')
plt.ylim((0,spectral_freq.max()*1e3))
plt.title('Phase')
plt.colorbar()
def synSpect(b, x):
# Generates a model spectrum made out of gaussian peaks
# fund, sigma, pkmax, dbfloor
# global fundGlobal maxFund minFund
npeaks = np.size(b)-1 # First element of b is the sampling rate
# amp = 25 # Force 25 dB peaks
sdpk = 60 # Force 80 hz width
synS = np.zeros(len(x))
for i in range(npeaks):
a = b[i+1] # To inforce positive peaks only
synS = synS + a*np.exp(-(x-b[0]*(i+1))**2/(2*sdpk**2))
#if (sum(isinf(synS)) + sum(isnan(synS))):
# for i in range(npeaks):
# fprintf(1,'%f ', exp(b(i+1)))
return synS
def residualSyn(vars, x, realS):
b = vars
synS = synSpect(b, x)
return realS-synS
#if (sum(isinf(synS)) + sum(isnan(synS)))
# for i=1:npeaks
# fprintf(1,'%f ', exp(b(i+1)))
def lpc(signal, order):
"""Compute the Linear Prediction Coefficients.
Return the order + 1 LPC coefficients for the signal. c = lpc(x, k) will
find the k+1 coefficients of a k order linear filter:
xp[n] = -c[1] * x[n-2] - ... - c[k-1] * x[n-k-1]
Such as the sum of the squared-error e[i] = xp[i] - x[i] is minimized.
Parameters
----------
signal: array_like
input signal
order : int
LPC order (the output will have order + 1 items)"""
order = int(order)
if signal.ndim > 1:
raise ValueError("Array of rank > 1 not supported yet")
if order > signal.size:
raise ValueError("Input signal must have a lenght >= lpc order")
if order > 0:
p = order + 1
r = np.zeros(p, signal.dtype)
# Number of non zero values in autocorrelation one needs for p LPC
# coefficients
nx = np.min([p, signal.size])
x = np.correlate(signal, signal, 'full')
r[:nx] = x[signal.size-1:signal.size+order]
phi = np.dot(inv(toeplitz(r[:-1])), -r[1:])
return np.concatenate(([1.], phi)), None, None
else:
return np.ones(1, dtype = signal.dtype), None, None
def fundEstimator(soundIn, fs, t=None, debugFig = 0, maxFund = 1500, minFund = 300, lowFc = 200, highFc = 6000, minSaliency = 0.5, minFormantFreq = 500, maxFormantBW = 500, windowFormant = 0.1, method='Stack'):
"""
Estimates the fundamental frequency of a complex sound.
soundIn is the sound pressure waveformlog spectrogram.
fs is the sampling rate
t is a vector of time values in s at which the fundamental will be estimated.
The sound must include at least 1024 sample points
The optional parameter with defaults are
Some user parameters
debugFig = 0 Set to zero to eliminate figures.
maxFund = 1500 Maximum fundamental frequency
minFund = 300 Minimum fundamental frequency
lowFc = 200 Low frequency cut-off for band-passing the signal prior to auto-correlation.
highFc = 6000 High frequency cut-off
minSaliency = 0.5 Threshold in the auto-correlation for minimum saliency - returns NaN for pitch values is saliency is below this number
minFormantFreq = 500 Minimum value of firt formant
maxFormantBW = 500 Maxminum value of formants bandwith.
windowFormant = 0.1 Time window for Formant calculation. Includes 5 std of normal window.
Four methods are available:
'AC' - Peak of the auto-correlation function
'ACA' - Peak of envelope of auto-correlation function
'Cep' - First peak in cepstrum
'Stack' - Fitting of harmonic stacks (default - works well for zebra finches)
Returns
sal - the time varying pitch saliency - a number between 0 and 1 corresponding to relative size of the first auto-correlation peak
fund - the time-varying fundamental in Hz at the same resolution as the spectrogram.
fund2 - a second peak in the spectrum - not a multiple of the fundamental a sign of a second voice
form1 - the first formant, if it exists
form2 - the second formant, if it exists
form3 - the third formant, if it exists
soundLen - length of sal, fund, fund2, form1, form2, form3
"""
# Band-pass filtering signal prior to auto-correlation
soundLen = len(soundIn)
nfilt = 1024
if soundLen < 1024:
print('Warning in fundEstimator: sound too short for bandpass filtering, len(soundIn)=%d' % soundLen)
print('Signal will not be filtered - you might want to filter before making Biosound oobject')
# return (np.asarray([]), np.asarray([]), np.asarray([]), np.asarray([]), np.asarray([]), np.asarray([]), soundLen)
else:
# high pass filter the signal
highpassFilter = firwin(nfilt-1, 2.0*lowFc/fs, pass_zero=False)
padlen = min(soundLen-10, 3*len(highpassFilter))
soundIn = filtfilt(highpassFilter, [1.0], soundIn, padlen=padlen)
# low pass filter the signal
lowpassFilter = firwin(nfilt, 2.0*highFc/fs)
padlen = min(soundLen-10, 3*len(lowpassFilter))
soundIn = filtfilt(lowpassFilter, [1.0], soundIn, padlen=padlen)
# Plot a spectrogram?
#if debugFig:
# plt.figure(9)
# (tDebug ,freqDebug ,specDebug , rms) = spectrogram(soundIn, fs, 1000.0, 50, min_freq=0, max_freq=10000, nstd=6, log=True, noise_level_db=50, rectify=True)
# plot_spectrogram(tDebug, freqDebug, specDebug)
# Initializations and useful variables
soundLen = len(soundIn)
sound_dur = soundLen / fs
if t is None:
# initialize t to be spaced by 1 ms increments if not specified
_si = 1e-3
npts = int(sound_dur / _si)
t = np.arange(npts) * _si
nt=len(t)
soundRMS = np.zeros(nt)
fund = np.zeros(nt)
fund2 = np.zeros(nt)
sal = np.zeros(nt)
form1 = np.zeros(nt)
form2 = np.zeros(nt)
form3 = np.zeros(nt)
# Calculate the size of the window for the auto-correlation
alpha = 5 # Number of sd in the Gaussian window
winLen = int(np.fix((2.0*alpha/minFund)*fs)) # Length of Gaussian window based on minFund
if (winLen%2 == 0): # Make a symmetric window
winLen += 1
# Use 200 ms for LPC Window - make this a parameter at some point
winLen2 = int(np.fix(windowFormant*fs))
if (winLen2%2 == 0): # Make a symmetric window
winLen2 += 1
gt, w = gaussian_window(winLen, alpha)
gt2, w2 = gaussian_window(winLen2, alpha)
maxlags = int(2*ceil((float(fs)/minFund)))
# First calculate the rms in each window
for it in range(nt):
tval = t[it] # Center of window in time
if tval >= sound_dur:
continue
tind = int(np.fix(tval*fs)) # Center of window in ind
tstart = tind - (winLen-1)//2
tend = tind + (winLen-1)//2
if tstart < 0:
winstart = - tstart
tstart = 0
else:
winstart = 0
if tend >= soundLen:
windend = winLen - (tend-soundLen+1) - 1
tend = soundLen-1
else:
windend = winLen-1
soundWin = soundIn[tstart:tend]*w[winstart:windend]
soundRMS[it] = np.std(soundWin)
soundRMSMax = max(soundRMS)
# Calculate the auto-correlation in windowed segments and obtain 4 guess values of the fundamental
# fundCorrGuess - guess from the auto-correlation function
# fundCorrAmpGuess - guess form the amplitude of the auto-correlation function
# fundCepGuess - guess from the cepstrum
# fundStackGuess - guess taken from a fit of the power spectrum with a harmonic stack, using the fundCepGuess as a starting point
# Current version use fundStackGuess as the best estimate...
soundlen = 0
for it in range(nt):
fund[it] = float('nan')
sal[it] = float('nan')
fund2[it] = float('nan')
form1[it] = float('nan')
form2[it] = float('nan')
form3[it] = float('nan')
if (soundRMS[it] < soundRMSMax*0.1):
continue
soundlen += 1
tval = t[it] # Center of window in time
if tval >= sound_dur: # This should not happen here because the RMS should be zero
continue
tind = int(np.fix(tval*fs)) # Center of window in ind
tstart = tind - (winLen-1)//2
tend = tind + (winLen-1)//2
if tstart < 0:
winstart = - tstart
tstart = 0
else:
winstart = 0
if tend >= soundLen:
windend = winLen - (tend-soundLen+1) - 1
tend = soundLen-1
else:
windend = winLen-1
tstart2 = tind - (winLen2-1)//2
tend2 = tind + (winLen2-1)//2
if tstart2 < 0:
winstart2 = - tstart2
tstart2 = 0
else:
winstart2 = 0
if tend2 >= soundLen:
windend2 = winLen2 - (tend2-soundLen+1) - 1
tend2 = soundLen-1
else:
windend2 = winLen2-1
soundWin = soundIn[tstart:tend]*w[winstart:windend]
soundWin2 = soundIn[tstart2:tend2]*w2[winstart2:windend2]
# Apply LPC to get time-varying formants and one additional guess for the fundamental frequency
# TODO (kevin): replace this with librosa
A, E, K = lpc(soundWin2, 8) # 8 degree polynomial
rts = np.roots(A) # Find the roots of A
rts = rts[np.imag(rts)>=0] # Keep only half of them
angz = np.arctan2(np.imag(rts),np.real(rts))
# Calculate the frequencies and the bandwidth of the formants
frqsFormants = angz*(fs/(2*np.pi))
indices = np.argsort(frqsFormants)
bw = -0.5*(fs/(2*np.pi))*np.log(np.abs(rts)) # FIXME (kevin): I think this line was broken before... it was using 1/2
# Keep formants above 500 Hz and with bandwidth < 500 # This was 1000 for bird calls
formants = []
for kk in indices:
if ( frqsFormants[kk]>minFormantFreq and bw[kk] < maxFormantBW):
formants.append(frqsFormants[kk])
formants = np.array(formants)
if len(formants) > 0 :
form1[it] = formants[0]
if len(formants) > 1 :
form2[it] = formants[1]
if len(formants) > 2 :
form3[it] = formants[2]
# Calculate the auto-correlation
lags = np.arange(-maxlags, maxlags+1, 1)
autoCorr = correlation_function(soundWin, soundWin, lags)
ind0 = int(np.where(lags == 0)[0][0]) # need to find lag zero index
# find peaks
indPeaksCorr = detect_peaks(autoCorr, mph=autoCorr.max()/10.0)
# Eliminate center peak and all peaks too close to middle
indPeaksCorr = np.delete(indPeaksCorr,np.where( (indPeaksCorr-ind0) < fs/maxFund)[0])
pksCorr = autoCorr[indPeaksCorr]
# Find max peak
if len(pksCorr)==0:
pitchSaliency = 0.1 # 0.1 goes with the detection of peaks greater than max/10
else:
indIndMax = np.where(pksCorr == max(pksCorr))[0][0]
indMax = indPeaksCorr[indIndMax]
fundCorrGuess = fs/abs(lags[indMax])
pitchSaliency = autoCorr[indMax]/autoCorr[ind0]
sal[it] = pitchSaliency
if sal[it] < minSaliency:
continue
# Calculate the envelope of the auto-correlation after rectification
envCorr = temporal_envelope(autoCorr, fs, cutoff_freq=maxFund, resample_rate=None)
locsEnvCorr = detect_peaks(envCorr, mph=envCorr.max()/10.0)
pksEnvCorr = envCorr[locsEnvCorr]
# Find the peak closest to zero
if locsEnvCorr.size > 1:
lagdiff = np.abs(locsEnvCorr[0]-ind0)
indIndEnvMax = 0
for indtest in range(1,locsEnvCorr.size):
lagtest = np.abs(locsEnvCorr[indtest]-ind0)
if lagtest < lagdiff:
lagdiff = lagtest
indIndEnvMax = indtest
# Take the first peak after the one closest to zero
if indIndEnvMax+2 > len(locsEnvCorr): # No such peak - use data for correlation function
fundCorrAmpGuess = fundCorrGuess
indEnvMax = indMax
else:
indEnvMax = locsEnvCorr[indIndEnvMax+1]
if lags[indEnvMax] == 0 : # This should not happen
print('Error: Max Peak in enveloppe auto-correlation found at zero delay')
fundCorrAmpGuess = fundCorrGuess
indEnvMax = indMax
else:
fundCorrAmpGuess = fs/lags[indEnvMax]
else:
fundCorrAmpGuess = fundCorrGuess
indEnvMax = indMax
# Calculate power spectrum and cepstrum
Y = fft(soundWin, n=winLen+1)
f = (fs/2.0)*(np.array(range(int((winLen+1)/2+1)), dtype=float)/float((winLen+1)//2))
fhigh = np.where(f >= highFc)[0][0]
powSound = 20.0*np.log10(np.abs(Y[0:(winLen+1)//2+1])) # This is the power spectrum
powSoundGood = powSound[0:fhigh]
maxPow = max(powSoundGood)
powSoundGood = powSoundGood - maxPow # Set zero as the peak amplitude
powSoundGood[powSoundGood < - 60] = -60
# Calculate coarse spectral enveloppe
p = np.polyfit(f[0:fhigh], powSoundGood, 3)
powAmp = np.polyval(p, f[0:fhigh])
# Cepstrum
CY = dct(powSoundGood-powAmp, norm = 'ortho')
tCY = 1000.0*np.array(range(len(CY)))/fs # Units of Cepstrum in ms
fCY = np.zeros(tCY.size)
fCY[1:] = 1000.0/tCY[1:] # Corresponding fundamental frequency in Hz.
fCY[0] = fs*2.0 # Nyquist limit not infinity
lowInd = np.where(fCY<lowFc)[0]
if lowInd.size > 0:
flowCY = np.where(fCY < lowFc)[0][0]
else:
flowCY = fCY.size
fhighCY = np.where(fCY < highFc)[0][0]
# Find peak of Cepstrum
indPk = np.where(CY[fhighCY:flowCY] == max(CY[fhighCY:flowCY]))[0][-1]
indPk = fhighCY + indPk
fmass = 0
mass = 0
indTry = indPk
while (CY[indTry] > 0):
fmass = fmass + fCY[indTry]*CY[indTry]
mass = mass + CY[indTry]
indTry = indTry + 1
if indTry >= len(CY):
break
indTry = indPk - 1
if (indTry >= 0 ):
while (CY[indTry] > 0):
fmass = fmass + fCY[indTry]*CY[indTry]
mass = mass + CY[indTry]
indTry = indTry - 1
if indTry < 0:
break
fGuess = fmass/mass
if (fGuess == 0 or np.isnan(fGuess) or np.isinf(fGuess) ): # Failure of cepstral method
fGuess = fundCorrGuess
fundCepGuess = fGuess
# Force fundamendal to be bounded
if (fundCepGuess > maxFund ):
i = 2
while(fundCepGuess > maxFund):
fundCepGuess = fGuess/i
i += 1
elif (fundCepGuess < minFund):
i = 2
while(fundCepGuess < minFund):
fundCepGuess = fGuess*i
i += 1
# Fit Gaussian harmonic stack
maxPow = max(powSoundGood-powAmp)
# This is the matlab code...
# fundFitCep = NonLinearModel.fit(f(1:fhigh)', powSoundGood'-powAmp, @synSpect, [fundCepGuess ones(1,9).*log(maxPow)])
# modelPowCep = synSpect(double(fundFitCep.Coefficients(:,1)), f(1:fhigh))
vars = np.concatenate(([fundCorrGuess], np.ones(9)*np.log(maxPow)))
bout = leastsq(residualSyn, vars, args = (f[0:fhigh], powSoundGood-powAmp))
modelPowCep = synSpect(bout[0], f[0:fhigh])
errCep = sum((powSoundGood - powAmp - modelPowCep)**2)
vars = np.concatenate(([fundCorrGuess*2], np.ones(9)*np.log(maxPow)))
bout2 = leastsq(residualSyn, vars, args = (f[0:fhigh], powSoundGood-powAmp))
modelPowCep2 = synSpect(bout2[0], f[0:fhigh])
errCep2 = sum((powSoundGood - powAmp - modelPowCep2)**2)
if errCep2 < errCep:
bout = bout2
modelPowCep = modelPowCep2
fundStackGuess = bout[0][0]
if (fundStackGuess > maxFund) or (fundStackGuess < minFund ):
fundStackGuess = float('nan')
# Store the result depending on the method chosen
if method == 'AC':
fund[it] = fundCorrGuess
elif method == 'ACA':
fund[it] = fundCorrAmpGuess
elif method == 'Cep':
fund[it] = fundCepGuess
elif method == 'Stack':
fund[it] = fundStackGuess
# A second cepstrum for the second voice
# CY2 = dct(powSoundGood-powAmp'- modelPowCep)
if not np.isnan(fundStackGuess):
powLeft = powSoundGood- powAmp - modelPowCep
maxPow2 = max(powLeft)
f2 = 0
if ( maxPow2 > maxPow*0.5): # Possible second peak in central area as indicator of second voice.
f2 = f[np.where(powLeft == maxPow2)[0][0]]
if ( f2 > 1000 and f2 < 4000):
if (pitchSaliency > minSaliency):
fund2[it] = f2
#% modelPowCorrAmp = synSpect(double(fundFitCorrAmp.Coefficients(:,1)), f(1:fhigh))
#%
#% errCorr = sum((powSoundGood - powAmp' - modelPowCorr).^2)
#% errCorrAmp = sum((powSoundGood - powAmp' - modelPowCorrAmp).^2)
#% errCorrSum = sum((powSoundGood - powAmp' - (modelPowCorr+modelPowCorrAmp) ).^2)
#%
#% f1 = double(fundFitCorr.Coefficients(1,1))
#% f2 = double(fundFitCorrAmp.Coefficients(1,1))
#%
#% if (pitchSaliency > minSaliency)
#% if (errCorr < errCorrAmp)
#% fund(it) = f1
#% if errCorrSum < errCorr
#% fund2(it) = f2
#% end
#% else
#% fund(it) = f2
#% if errCorrSum < errCorrAmp
#% fund2(it) = f1
#% end
#% end
#%
#% end
if (debugFig ):
plt.figure(10)
plt.subplot(4,1,1)
plt.cla()
plt.plot(soundWin)
# f1 = double(fundFitCorr.Coefficients(1,1))
# f2 = double(fundFitCorrAmp.Coefficients(1,1))
titleStr = 'Saliency = %.2f F0 AC = %.2f ACA = %.2f Cep = %.2f St = %.2f(Hz)' % (pitchSaliency, fundCorrGuess, fundCorrAmpGuess, fundCepGuess, fundStackGuess)
plt.title(titleStr)
plt.subplot(4,1,2)
plt.cla()
plt.plot(1000*(lags/fs), autoCorr)
plt.plot([1000.*lags[indMax]/fs, 1000.*lags[indMax]/fs], [0, autoCorr[ind0]], 'k')
plt.plot(1000.*lags/fs, envCorr, 'r', linewidth= 2)
plt.plot([1000*lags[indEnvMax]/fs, 1000.*lags[indEnvMax]/fs], [0, autoCorr[ind0]], 'g', linewidth=2)
plt.xlabel('Time (ms)')
plt.subplot(4,1,3)
plt.cla()
plt.plot(f[0:fhigh],powSoundGood)
plt.axis([0, highFc, -60, 0])
plt.plot(f[0:fhigh], powAmp, 'b--')
plt.plot(f[0:fhigh], modelPowCep + powAmp, 'k')
# plt.plot(f(1:fhigh), modelPowCorrAmp + powAmp', 'g')
for ih in range(1,6):
plt.plot([fundCorrGuess*ih, fundCorrGuess*ih], [-60, 0], 'k')
plt.plot([fundCorrAmpGuess*ih, fundCorrAmpGuess*ih], [-60, 0], 'g')
plt.plot([fundStackGuess*ih, fundStackGuess*ih], [-60, 0], 'r')
plt.plot([fundCepGuess*ih, fundCepGuess*ih], [-60, 0], 'y')
if f2 != 0:
plt.plot([f2, f2], [-60, 0], 'b')
plt.xlabel('Frequency (Hz)')
# title(sprintf('Err1 = %.1f Err2 = %.1f', errCorr, errCorrAmp))
plt.subplot(4,1,4)
plt.cla()
plt.plot(tCY, CY)
# plot(tCY, CY2, 'k--')
plt.plot([1000./fundCorrGuess, 1000./fundCorrGuess], [0, max(CY)], 'k')
plt.plot([1000./fundCorrAmpGuess, 1000./fundCorrAmpGuess], [0, max(CY)], 'g')
plt.plot([1000./fundStackGuess, 1000./fundStackGuess], [0, max(CY)], 'r')
plt.plot([1000./fundCepGuess, 1000./fundCepGuess], [0, max(CY)], 'y')
#% plot([(pkClosest-1)/fs (pkClosest-1)/fs], [0 max(CY)], 'g')
#% if ~isempty(ipk2)
#% plot([(pk2-1)/fs (pk2-1)/fs], [0 max(CY)], 'b')
#% end
#% for ip=1:length(pks)
#% plot([(locs(ip)-1)/fs (locs(ip)-1)/fs], [0 pks(ip)/4], 'r')
#% end
plt.axis([0, 1000.*np.size(CY)/(2.*fs), 0, max(CY)])
plt.xlabel('Time (ms)')
plt.pause(1)
# Fix formants.
# Find means in regions where there are two formants
# Decide whether there is formant 3
n3 = np.sum(~np.isnan(form3))
if (n3 < 0.1*nt): # There are only two formants - fix formant 3 by merging...
meanf1 = np.mean(form1[~np.isnan(form2)])
meanf2 = np.mean(form2[~np.isnan(form2)])
for it in range(nt):
if ~np.isnan(form3[it]):
df12 = np.abs(form2[it]-meanf1)
df23 = np.abs(form3[it]-meanf2)
if df12 < df23 :
form1[it] = (form1[it] + form2[it])/2.0
form2[it] = form3[it]
form3[it] = np.nan
else:
form2[it] = (form2[it] + form3[it])/2.0
form3[it] = np.nan
else: # if there is only one figure out if its second or first
if np.isnan(form2[it]):
if ~np.isnan(form1[it]):
df11 = np.abs(form1[it]-meanf1)
df12 = np.abs(form1[it]-meanf2)
if (df12 < df11):
form2[it] = form1[it]
form1[it] = np.nan
else:
meanf1 = np.mean(form1[~np.isnan(form3)])
meanf2 = np.mean(form2[~np.isnan(form3)])
meanf3 = np.mean(form3[~np.isnan(form3)])
for it in range(nt):
if np.isnan(form3[it]):
if np.isnan(form2[it]): # there is only one formant found
if ~np.isnan(form1[it]):
df11 = np.abs(form1[it]-meanf1)
df12 = np.abs(form1[it]-meanf2)
df13 = np.abs(form1[it]-meanf3)
if (df13 < np.minimum(df11,df12)):
form3[it] = form1[it]
form1[it] = np.nan
elif (df12 < np.minimum(df11, df13)):
form2[it] = form1[it]
form1[it] = np.nan
else: # two formants are found
df22 = np.abs(form2[it]-meanf2)
df23 = np.abs(form2[it]-meanf3)
if (df23 < df22):
form3[it] = form2[it]
df11 = np.abs(form1[it]-meanf1)
df12 = np.abs(form1[it]-meanf2)
if (df12 < df11):
form2[it] = form1[it]
form1[it] = np.nan
else:
form2[it] = np.nan
# for it in range(nt):
# if ~np.isnan(form1[it]):
# df11 = np.abs(form1[it]-meanf1)
# df12 = np.abs(form1[it]-meanf2)
# df13 = np.abs(form1[it]-meanf3)
# if df12 < df11:
# if df13 < df12:
# if ~np.isnan(form3[it]):
# df33 = np.abs(form3[it]-meanf3)
# if df13 < df33:
# form3[it] = form1[it]
# else:
# form3[it] = form1[it]
# else:
# if ~np.isnan(form2[it]):
# df22 = np.abs(form2[it]-meanf2)
# if df12 < df22:
# form2[it] = form1[it]
# else:
# form2[it] = form1[it]
# form1[it] = float('nan')
# if ~np.isnan(form2[it]):
# df21 = np.abs(form2[it]-meanf1)
# df22 = np.abs(form2[it]-meanf2)
# df23 = np.abs(form2[it]-meanf3)
# if df21 < df22 :
# if ~np.isnan(form1[it]):
# df11 = np.abs(form1[it]-meanf1)
# if df21 < df11:
# form1[it] = form2[it]
# else:
# form1[it] = form2[it]
# form2[it] = float('nan')
# elif df23 < df22:
# if ~np.isnan(form3[it]):
# df33 = np.abs(form3[it]-meanf3)
# if df23 < df33:
# form3[it] = form2[it]
# else:
# form3[it] = form2[it]
# form2[it] = float('nan')
# if ~np.isnan(form3[it]):
# df31 = np.abs(form3[it]-meanf1)
# df32 = np.abs(form3[it]-meanf2)
# df33 = np.abs(form3[it]-meanf3)
# if df32 < df33:
# if df31 < df32:
# if ~np.isnan(form1[it]):
# df11 = np.abs(form1[it]-meanf1)
# if df31 < df11:
# form1[it] = form3[it]
# else:
# form1[it] = form3[it]
# else:
# if ~np.isnan(form2[it]):
# df22 = np.abs(form2[it]-meanf2)
# if df32 < df22:
# form2[it] = form3[it]
# else:
# form2[it] = form3[it]
# form3[it] = float('nan')
return (sal, fund, fund2, form1, form2, form3, soundlen)
def get_mps(t, freq, spec):
"Computes the MPS of a spectrogram (idealy a log-spectrogram) or other REAL time-freq representation"
mps = fftshift(fft2(spec))
amps = np.real(mps * np.conj(mps))
nf = mps.shape[0]
nt = mps.shape[1]
wfreq = fftshift(fftfreq(nf, d=freq[1] - freq[0]))
wt = fftshift(fftfreq(nt, d=t[1] - t[0]))
return wt, wfreq, mps, amps
def inverse_mps(mps):
"Inverts a MPS back to a spectrogram"
spec = ifft2(ifftshift(mps))
return spec
def play_signal(s, normalize = False):
"quick and easy temporary play"
wf = WavFile()
wf.sample_rate = 44100 #standard samp rate
wf.data = s
wf.to_wav("/tmp/README.wav", normalize)
play_sound("/tmp/README.wav")
def inverse_spectrogram(spec, s_len,
sample_rate, spec_sample_rate, freq_spacing, min_freq=0, max_freq=None, nstd=6, log=True, noise_level_db=80, rectify=True):
"""turns the complex spectrogram into a signal
inverts by repeating the process on a string-of-ones
"""
spec_copy = spec.copy()
if log:
spec_copy = 10**(spec_copy)
spec_tranpose = spec.transpose() # spec_tranpose[time][frequency]
hnwinlen = len(spec) - 1
nincrement = int(np.round(float(sample_rate)/spec_sample_rate))
gauss_t = np.arange(-hnwinlen, hnwinlen+1, 1.0)
gauss_std = float(2*hnwinlen) / float(nstd)
gauss_window = np.exp(-gauss_t**2 / (2.0*gauss_std**2)) / (gauss_std*np.sqrt(2*np.pi))
s = np.zeros(s_len + 2*hnwinlen+1)
w = np.zeros(s_len + 2*hnwinlen+1)
for i in range(len(spec_tranpose)):
sample = i * nincrement
spec_slice = np.concatenate((spec_tranpose[i][:0:-1].conj(), spec_tranpose[i]))
s[sample:sample+2*hnwinlen+1] += gauss_window * ifft(ifftshift(spec_slice))
w[sample:sample+2*hnwinlen+1] += gauss_window ** 2
s /= w
return s[hnwinlen:hnwinlen+s_len]
def inverse_real_spectrogram(spec, s_len,
sample_rate, spec_sample_rate, freq_spacing, min_freq=0, max_freq=None, nstd=6, log=True, noise_level_db=80, rectify=True, iterations = 10):
"inverts a real spectrogram into a signal using the griffith/lim algorithm"
spec_magnitude = spec.copy()
if log:
spec_magnitude = 10**spec_magnitude
estimated = inverse_spectrogram(spec_magnitude, s_len, sample_rate, spec_sample_rate, freq_spacing, min_freq, max_freq, nstd, log=False)
for i in range(iterations):
phase_spec = spectrogram(estimated, sample_rate, spec_sample_rate, freq_spacing, min_freq, max_freq, nstd, log=False)[2]
error = ((abs(spec_magnitude) - abs(phase_spec))**2).sum() / (abs(spec_magnitude)**2).sum()
print('the error after iteration %d is %f' % (i+i, error))
spec_angle = np.angle(phase_spec)
estimated_spec = spec_magnitude * np.exp(1j*spec_angle)
estimated = inverse_spectrogram(estimated_spec, s_len, sample_rate, spec_sample_rate, freq_spacing, min_freq, max_freq, nstd, log=False)
return estimated
def log_transform(x, dbnoise=100, normalize=False):
""" Takes the log of a power spectrum or spectrogram to convert into decibels.
:param x: The power spectrum or spectrogram. The contents of x will be replaced with the log version.
:param dbnoise: The noise level in decibels. Anything lower than dbnoise will be set to zero.
"""
x /= x.max()
zi = x > 0
x[zi] = 20*np.log10(x[zi]) + dbnoise
x[x < 0] = 0
if normalize:
x /= x.max()
def spec_stats(spec_t, spec_freq, spec):
""" Compute time-varying statistics on a spectrogram (or log spectrogram).
:param spec_t: Spectrogram times with shape (num_time_points)
:param spec_freq: Spectrogram frequencies with shape (num_freq)
:param spec: Spectrogram of shape (num_freq, num_time_points)
:return:
"""
# normalize each time point by it's sum to create a probability distribution
nfreq,nt = spec.shape
spec_p = deepcopy(spec)
spec_p -= spec_p.min()
spec_p_sum = spec_p.sum(axis=0)
spec_p /= spec_p_sum
# compute mean frequency
freq_mean = np.dot(spec_p.T, spec_freq)
# compute quantiles
spec_p_csum = np.cumsum(spec_p, axis=0)
qvals = [0.25, 0.5, 0.75]
Q = np.zeros([len(qvals), nt])
for t in range(nt):
for k,q in enumerate(qvals):
i = spec_p_csum[:, t] <= q
if i.sum() > 0:
fi = np.max(np.where(i)[0])
Q[k, t] = spec_freq[fi]
stats = dict()
stats['Q'] = Q
stats['qvals'] = qvals
stats['freq_mean'] = freq_mean
return stats
def addramp(sound_in, samp_rate=44100, ramp_duration=5):
# Adds a cosine onset and offset ramp to the sound of length ramp_duration
# in ms
samp_ms = samp_rate/1000
sound_out = sound_in
lensound = len(sound_in)
nend = int(ramp_duration*samp_ms);
if (nend >= lensound):
nend = lensound -1;
# Onset ramp
for t in range(nend):
mult1=0.5*(1.0-np.cos(np.pi*t/nend))
sound_out[t] = sound_out[t]*mult1
# Offset ramp
nbeg = lensound - nend
for t in range(nbeg, lensound):
mult1=0.5*(1.0-np.cos(np.pi*(lensound-t-1)/nend))
sound_out[t] = sound_out[t]*mult1
return sound_out
|
from django.db import models
from cached_fields.fields import CachedIntegerField
from cached_fields.mixins import CachedFieldsMixin
from reverseapp.handlers import InvoiceSignalHandler
class Item(models.Model):
name = models.CharField(max_length=12)
price = models.IntegerField()
class Invoice(models.Model):
item = models.ForeignKey(Item, on_delete=models.CASCADE, related_name="invoices")
quantity = models.IntegerField()
total = CachedIntegerField(InvoiceSignalHandler) |
import os
SITE_ID = 1
BASE_DIR = os.path.dirname(__file__)
ROOT_URLCONF = 'urls'
SECRET_KEY = 'secretkey'
SITE_ROOT = os.path.dirname(os.path.abspath(__file__))
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.contenttypes',
'django.contrib.admin',
'django.contrib.messages',
'django.contrib.sites',
'cached_httpbl',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware'
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.messages.context_processors.messages',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'cached-httpbl',
}
}
# this htttpBL API key is legal only for testing purposes
CACHED_HTTPBL_API_KEY = 'abcdefghijkl'
CACHED_HTTPBL_USE_CACHE = False
|
#!/usr/bin/env python3
class Node(object):
"""Node class for binary tree"""
def __init__(self, data=None):
self.left = None
self.right = None
self.data = data
class Tree(object):
"""Tree class for binary search"""
def __init__(self, data=None):
self.root = Node(data)
def insert(self, data):
self._add(data, self.root)
def _add(self, data, node):
if data < node.data:
if node.left:
self._add(data, node.left)
else:
node.left = Node(data)
else:
if node.right:
self._add(data, node.right)
else:
node.right = Node(data)
def traverseBFS(self, node):
queue = [node]
out = [] # output buffer
while len(queue) > 0:
currentNode = queue.pop(0)
out.append(currentNode.data)
if currentNode.left:
queue.append(currentNode.left)
if currentNode.right:
queue.append(currentNode.right)
return out
def inorder(self, node, buf=[]):
if node is not None:
self.inorder(node.left, buf)
buf.append(node.data)
self.inorder(node.right, buf)
def preorder(self, node, buf=[]):
if node is not None:
buf.append(node.data)
self.preorder(node.left, buf)
self.preorder(node.right, buf)
def postorder(self, node, buf=[]):
if node is not None:
self.postorder(node.left, buf)
self.postorder(node.right, buf)
buf.append(node.data)
def test(self):
d = self.traverseBFS(self.root)
assert(d == [1,0,5,7,10])
f = []
self.inorder(self.root, f)
assert(f == [0,1,5,7,10])
j = []
self.preorder(self.root, j)
assert(j == [1,0,5,7,10])
l = []
self.postorder(self.root, l)
assert(l == [0,10,7,5,1])
def main():
tree = Tree(1)
data = [5,7,10,0]
for i in data:
tree.insert(i)
tree.test()
if __name__ == '__main__':
main()
|
from abc import abstractmethod
import argparse
import copy
import numpy as np
import torch
from tqdm import tqdm
from cogdl.data import Dataset
from cogdl.data.sampler import (
SAINTSampler,
NeighborSampler,
ClusteredLoader,
)
from cogdl.models.supervised_model import SupervisedModel
from cogdl.trainers.base_trainer import BaseTrainer
from . import register_trainer
class SampledTrainer(BaseTrainer):
@staticmethod
def add_args(parser):
# fmt: off
parser.add_argument("--num-workers", type=int, default=4)
parser.add_argument("--eval-step", type=int, default=3)
parser.add_argument("--batch-size", type=int, default=128)
parser.add_argument("--no-self-loop", action="store_true")
# fmt: on
@abstractmethod
def fit(self, model: SupervisedModel, dataset: Dataset):
raise NotImplementedError
@abstractmethod
def _train_step(self):
raise NotImplementedError
@abstractmethod
def _test_step(self, split="val"):
raise NotImplementedError
def __init__(self, args):
super(SampledTrainer, self).__init__(args)
self.device = "cpu" if not torch.cuda.is_available() or args.cpu else args.device_id[0]
self.patience = args.patience
self.max_epoch = args.max_epoch
self.lr = args.lr
self.weight_decay = args.weight_decay
self.loss_fn, self.evaluator = None, None
self.data, self.train_loader, self.optimizer = None, None, None
self.eval_step = args.eval_step if hasattr(args, "eval_step") else 1
self.num_workers = args.num_workers if hasattr(args, "num_workers") else 0
self.batch_size = args.batch_size
self.self_loop = not (hasattr(args, "no_self_loop") and args.no_self_loop)
@classmethod
def build_trainer_from_args(cls, args):
return cls(args)
def train(self):
epoch_iter = tqdm(range(self.max_epoch))
patience = 0
max_score = 0
min_loss = np.inf
best_model = copy.deepcopy(self.model)
for epoch in epoch_iter:
self._train_step()
if (epoch + 1) % self.eval_step == 0:
acc, loss = self._test_step()
train_acc = acc["train"]
val_acc = acc["val"]
val_loss = loss["val"]
epoch_iter.set_description(
f"Epoch: {epoch:03d}, Train Acc/F1: {train_acc:.4f}, Val Acc/F1: {val_acc:.4f}"
)
self.model = self.model.to(self.device)
if val_loss <= min_loss or val_acc >= max_score:
if val_loss <= min_loss:
best_model = copy.deepcopy(self.model)
min_loss = np.min((min_loss, val_loss.cpu()))
max_score = np.max((max_score, val_acc))
patience = 0
else:
patience += 1
if patience == self.patience:
epoch_iter.close()
break
return best_model
@register_trainer("graphsaint")
class SAINTTrainer(SampledTrainer):
@staticmethod
def add_args(parser: argparse.ArgumentParser):
"""Add trainer-specific arguments to the parser."""
# fmt: off
SampledTrainer.add_args(parser)
parser.add_argument("--eval-cpu", action="store_true")
parser.add_argument("--method", type=str, default="node", help="graph samplers")
parser.add_argument("--sample-coverage", default=20, type=float, help="sample coverage ratio")
parser.add_argument("--size-subgraph", default=1200, type=int, help="subgraph size")
args = parser.parse_args()
if args.method == "rw" or args.method == "mrw":
parser.add_argument("--num-walks", default=50, type=int, help="number of random walks")
parser.add_argument("--walk-length", default=20, type=int, help="random walk length")
parser.add_argument("--size-frontier", default=20, type=int, help="frontier size in multidimensional random walks")
# fmt: on
@staticmethod
def get_args4sampler(args):
args4sampler = {
"method": args.method,
"sample_coverage": args.sample_coverage,
"size_subgraph": args.size_subgraph,
}
if args.method == "rw" or args.method == "mrw":
args4sampler["num_walks"] = args.num_walks
args4sampler["walk_length"] = args.walk_length
if args.method == "mrw":
args4sampler["size_frontier"] = args.size_frontier
return args4sampler
@classmethod
def build_trainer_from_args(cls, args):
return cls(args)
def __init__(self, args):
super(SAINTTrainer, self).__init__(args)
self.args4sampler = self.get_args4sampler(args)
self.eval_cpu = args.eval_cpu if hasattr(args, "eval_cpu") else False
def fit(self, model: SupervisedModel, dataset: Dataset):
self.dataset = dataset
self.data = dataset.data
if self.self_loop:
self.data.add_remaining_self_loops()
self.model = model.to(self.device)
self.evaluator = dataset.get_evaluator()
self.loss_fn = dataset.get_loss_fn()
self.sampler = SAINTSampler(dataset, self.args4sampler)()
# self.train_dataset = SAINTDataset(dataset, self.args_sampler)
# self.train_loader = SAINTDataLoader(
# dataset=train_dataset,
# num_workers=self.num_workers,
# persistent_workers=True,
# pin_memory=True
# )
# self.set_data_model(dataset, model)
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr, weight_decay=self.weight_decay)
return self.train()
def _train_step(self):
self.data = self.sampler.one_batch("train")
self.data.to(self.device)
self.model = self.model.to(self.device)
self.model.train()
self.optimizer.zero_grad()
mask = self.data.train_mask
if len(self.data.y.shape) > 1:
logits = self.model.predict(self.data)
weight = self.data.norm_loss[mask].unsqueeze(1)
loss = torch.nn.BCEWithLogitsLoss(reduction="sum", weight=weight)(logits[mask], self.data.y[mask].float())
else:
logits = torch.nn.functional.log_softmax(self.model.predict(self.data), dim=-1)
loss = (
torch.nn.NLLLoss(reduction="none")(logits[mask], self.data.y[mask]) * self.data.norm_loss[mask]
).sum()
loss.backward()
self.optimizer.step()
def _test_step(self, split="val"):
self.data = self.sampler.one_batch(split)
if split != "train" and self.eval_cpu:
self.model = self.model.cpu()
else:
self.data.apply(lambda x: x.to(self.device))
self.model.eval()
masks = {"train": self.data.train_mask, "val": self.data.val_mask, "test": self.data.test_mask}
with torch.no_grad():
logits = self.model.predict(self.data)
loss = {key: self.loss_fn(logits[val], self.data.y[val]) for key, val in masks.items()}
metric = {key: self.evaluator(logits[val], self.data.y[val]) for key, val in masks.items()}
return metric, loss
@register_trainer("neighborsampler")
class NeighborSamplingTrainer(SampledTrainer):
model: torch.nn.Module
@staticmethod
def add_args(parser: argparse.ArgumentParser):
"""Add trainer-specific arguments to the parser."""
# fmt: off
SampledTrainer.add_args(parser)
# fmt: on
def __init__(self, args):
super(NeighborSamplingTrainer, self).__init__(args)
self.hidden_size = args.hidden_size
self.sample_size = args.sample_size
def fit(self, model, dataset):
self.data = dataset[0]
if self.self_loop:
self.data.add_remaining_self_loops()
self.evaluator = dataset.get_evaluator()
self.loss_fn = dataset.get_loss_fn()
settings = dict(
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
persistent_workers=True,
pin_memory=True,
)
if torch.__version__.split("+")[0] < "1.7.1":
settings.pop("persistent_workers")
self.data.train()
self.train_loader = NeighborSampler(
dataset=dataset,
mask=self.data.train_mask,
sizes=self.sample_size,
**settings,
)
settings["batch_size"] *= 5
self.data.eval()
self.test_loader = NeighborSampler(
dataset=dataset,
mask=None,
sizes=[-1],
**settings,
)
self.model = model.to(self.device)
self.model.set_data_device(self.device)
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr, weight_decay=self.weight_decay)
best_model = self.train()
self.model = best_model
acc, loss = self._test_step()
return dict(Acc=acc["test"], ValAcc=acc["val"])
def _train_step(self):
self.data.train()
self.model.train()
self.train_loader.shuffle()
x_all = self.data.x.to(self.device)
y_all = self.data.y.to(self.device)
for target_id, n_id, adjs in self.train_loader:
self.optimizer.zero_grad()
n_id = n_id.to(x_all.device)
target_id = target_id.to(y_all.device)
x_src = x_all[n_id].to(self.device)
y = y_all[target_id].to(self.device)
loss = self.model.node_classification_loss(x_src, adjs, y)
loss.backward()
self.optimizer.step()
def _test_step(self, split="val"):
self.model.eval()
self.data.eval()
masks = {"train": self.data.train_mask, "val": self.data.val_mask, "test": self.data.test_mask}
with torch.no_grad():
logits = self.model.inference(self.data.x, self.test_loader)
loss = {key: self.loss_fn(logits[val], self.data.y[val]) for key, val in masks.items()}
acc = {key: self.evaluator(logits[val], self.data.y[val]) for key, val in masks.items()}
return acc, loss
@classmethod
def build_trainer_from_args(cls, args):
return cls(args)
@register_trainer("clustergcn")
class ClusterGCNTrainer(SampledTrainer):
@staticmethod
def add_args(parser: argparse.ArgumentParser):
"""Add trainer-specific arguments to the parser."""
# fmt: off
SampledTrainer.add_args(parser)
parser.add_argument("--n-cluster", type=int, default=1000)
parser.add_argument("--batch-size", type=int, default=20)
# fmt: on
@staticmethod
def get_args4sampler(args):
args4sampler = {
"method": "metis",
"n_cluster": args.n_cluster,
}
return args4sampler
def __init__(self, args):
super(ClusterGCNTrainer, self).__init__(args)
self.n_cluster = args.n_cluster
self.batch_size = args.batch_size
def fit(self, model, dataset):
self.data = dataset[0]
if self.self_loop:
self.data.add_remaining_self_loops()
self.model = model.to(self.device)
self.evaluator = dataset.get_evaluator()
self.loss_fn = dataset.get_loss_fn()
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr, weight_decay=self.weight_decay)
settings = dict(
batch_size=self.batch_size, num_workers=self.num_workers, persistent_workers=True, pin_memory=True
)
if torch.__version__.split("+")[0] < "1.7.1":
settings.pop("persistent_workers")
self.data.train()
self.train_loader = ClusteredLoader(
dataset,
self.n_cluster,
method="metis",
**settings,
)
best_model = self.train()
self.model = best_model
metric, loss = self._test_step()
return dict(Acc=metric["test"], ValAcc=metric["val"])
def _train_step(self):
self.model.train()
self.data.train()
self.train_loader.shuffle()
total_loss = 0
for batch in self.train_loader:
self.optimizer.zero_grad()
batch = batch.to(self.device)
loss = self.model.node_classification_loss(batch)
loss.backward()
total_loss += loss.item()
self.optimizer.step()
def _test_step(self, split="val"):
self.model.eval()
self.data.eval()
data = self.data
self.model = self.model.cpu()
masks = {"train": self.data.train_mask, "val": self.data.val_mask, "test": self.data.test_mask}
with torch.no_grad():
logits = self.model(data)
loss = {key: self.loss_fn(logits[val], self.data.y[val]) for key, val in masks.items()}
metric = {key: self.evaluator(logits[val], self.data.y[val]) for key, val in masks.items()}
return metric, loss
@register_trainer("random_cluster")
class RandomClusterTrainer(SampledTrainer):
@staticmethod
def add_args(parser):
# fmt: off
SampledTrainer.add_args(parser)
parser.add_argument("--n-cluster", type=int, default=10)
parser.add_argument("--val-n-cluster", type=int, default=-1)
# fmt: on
def __init__(self, args):
super(RandomClusterTrainer, self).__init__(args)
self.patience = args.patience // args.eval_step
self.n_cluster = args.n_cluster
self.val_n_cluster = args.val_n_cluster if hasattr(args, "val_n_cluster") else -1
self.eval_step = args.eval_step
self.data, self.optimizer, self.evaluator, self.loss_fn = None, None, None, None
def fit(self, model, dataset):
self.model = model.to(self.device)
self.data = dataset[0]
if self.self_loop:
self.data.add_remaining_self_loops()
self.loss_fn = dataset.get_loss_fn()
self.evaluator = dataset.get_evaluator()
settings = dict(num_workers=self.num_workers, persistent_workers=True, pin_memory=True)
if torch.__version__.split("+")[0] < "1.7.1":
settings.pop("persistent_workers")
self.train_loader = ClusteredLoader(dataset=dataset, n_cluster=self.n_cluster, method="random", **settings)
if self.val_n_cluster > 0:
self.test_loader = ClusteredLoader(
dataset=dataset,
n_cluster=self.val_n_cluster,
method="random",
num_workers=self.num_workers,
persistent_workers=True,
shuffle=False,
)
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr, weight_decay=self.weight_decay)
best_model = self.train()
self.model = best_model
metric, loss = self._test_step()
return dict(Acc=metric["test"], ValAcc=metric["val"])
def _train_step(self):
self.model.train()
self.data.train()
self.train_loader.shuffle()
for batch in self.train_loader:
self.optimizer.zero_grad()
batch = batch.to(self.device)
loss_n = self.model.node_classification_loss(batch)
loss_n.backward()
self.optimizer.step()
def _test_step(self, split="val"):
self.model.eval()
self.data.eval()
if self.val_n_cluster > 0:
return self.batch_eval(split)
self.model = self.model.to("cpu")
data = self.data
self.model = self.model.cpu()
masks = {"train": self.data.train_mask, "val": self.data.val_mask, "test": self.data.test_mask}
with torch.no_grad():
logits = self.model.predict(data)
loss = {key: self.loss_fn(logits[val], self.data.y[val]) for key, val in masks.items()}
metric = {key: self.evaluator(logits[val], self.data.y[val]) for key, val in masks.items()}
return metric, loss
def batch_eval(self, split="val"):
preds = {"train": [], "val": [], "test": []}
ys = {"train": [], "val": [], "test": []}
with torch.no_grad():
for batch in self.test_loader:
batch = batch.to(self.device)
pred = self.model.predict(batch)
for item in ["train", "val", "test"]:
preds[item].append(pred[batch[f"{item}_mask"]])
ys[item].append(batch.y[batch[f"{item}_mask"]])
metric = dict()
loss = dict()
for key in preds.keys():
pred = torch.cat(preds[key], dim=0)
y = torch.cat(ys[key], dim=0)
_metric = self.evaluator(pred, y)
_loss = self.loss_fn(pred, y)
metric[key] = _metric
loss[key] = _loss
return metric, loss
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def removeElements(self, head: ListNode, val: int) -> ListNode:
prev = dummy = ListNode(-1, head)
cur = head
while cur:
if cur.val == val:
prev.next = cur.next
else:
prev = cur
cur = cur.next
return dummy.next
|
import pandas as pd
import numpy as np
import joblib
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import RandomizedSearchCV
from sklearn.metrics import mean_squared_error, r2_score, mean_absolute_error
data = pd.read_csv('Final_Data_5203.csv')
data = data.drop(['imdb_id', 'titleType', 'primaryTitle', 'id', 'original_language'], axis=1)
y = data['weighted_rating'].values
X = data.drop(['weighted_rating', 'revenue', 'Variance'], axis = 1)
X = StandardScaler().fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
#Weighted Rating
# For Weighted Rating
parameter_space = {
'n_estimators': [10,50,100],
'criterion': ['squared_error', 'absolute_error', 'poisson'],
'max_depth': [10,20,30,40,50],
}
clf = RandomizedSearchCV(RandomForestRegressor(), parameter_space, n_iter=15, cv = 3, scoring = "explained_variance", verbose = True)
clf.fit(X_train,y_train)
clf.best_params_
train_pred = clf.predict(X_train) # Train predict
test_pred = clf.predict(X_test) # Test predict
print("For Train:")
print("Mean Square Error is",mean_squared_error(y_train,train_pred)) # Calculating MSE
print("Root Mean Square Error is",mean_squared_error(y_train,train_pred)**(1/2)) # Calculating RMSE
print("Mean Absolute Error is",mean_absolute_error(y_train,train_pred)) # Calculating MAE
print("r2 Score is", r2_score(y_train,train_pred)) # Calculating r2 Score
print("For Test:")
print("Mean Square Error is",mean_squared_error(y_test,test_pred)) # Calculating MSE
print("Root Mean Square Error is",mean_squared_error(y_test,test_pred)**(1/2)) # Calculating RMSE
print("Mean Absolute Error is",mean_absolute_error(y_test,test_pred)) # Calculating MAE
print("r2 Score is", r2_score(y_test,test_pred)) # Calculating r2 Score
#Revenue
# For Revenue
y2 = np.log(data['revenue']).values
X_train, X_test, y_train, y_test = train_test_split(X, y2, test_size=0.2, random_state=42)
clf2 = RandomizedSearchCV(RandomForestRegressor(), parameter_space, n_iter=15, cv = 3, scoring = "explained_variance", verbose = True)
clf2.fit(X_train,y_train)
clf2.best_params_
train_pred2 = clf2.predict(X_train)
test_pred2 = clf2.predict(X_test)
print("For Train:")
print("Mean Square Error is",mean_squared_error(y_train,train_pred2)) # Calculating MSE
print("Root Mean Square Error is",mean_squared_error(y_train,train_pred2)**(1/2)) # Calculating RMSE
print("Mean Absolute Error is",mean_absolute_error(y_train,train_pred2)) # Calculating MAE
print("r2 Score is", r2_score(y_train,train_pred2)) # Calculating r2 Score
print("For Test:")
print("Mean Square Error is",mean_squared_error(y_test,test_pred2)) # Calculating MSE
print("Root Mean Square Error is",mean_squared_error(y_test,test_pred2)**(1/2)) # Calculating RMSE
print("Mean Absolute Error is",mean_absolute_error(y_test,test_pred2)) # Calculating MAE
print("r2 Score is", r2_score(y_test,test_pred2)) # Calculating r2 Score
# save the model to disk
filename = 'random_forest_model.sav'
#joblib.dump(clf2, filename) |
from setuptools import setup
import os
import re
if os.environ.get("CI_COMMIT_TAG"):
version = os.environ["CI_COMMIT_TAG"]
if version.startswith("v"):
version = version[1:]
if not re.search(r"^\d+\.\d+\.\d+$", version):
raise AttributeError(
"given CI_COMMIT_TAG {} incorrect format. It must be vX.Y.Z or X.Y.Z format".format(
os.environ["CI_COMMIT_TAG"]
)
)
elif os.environ.get("CI_JOB_ID"):
version = os.environ["CI_JOB_ID"]
else:
version = None
setup(
zip_safe=True,
name="desafe",
version=version,
author="pjon",
url="https://github.com/joncastro/SafeInCloud",
py_modules=["desafe"],
description="An utility to decrypt Safe in Cloud password files",
use_2to3=True,
license="LICENSE",
install_requires=["pycrypto", "xmltodict", "passlib", "docopt"],
entry_points={"console_scripts": ["desafe = desafe:main"]},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Security :: Cryptography",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
],
)
|
from .lrs_layer import TimeAugmentation
from .rlrs_layer import RecurrentLRS
import numpy as np
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
import keras
from keras import backend as K
from keras.layers import Dense, BatchNormalization, Reshape
def init_rlrs_model(input_shape, num_levels, num_hidden, num_classes, decoupled=False):
model = keras.Sequential()
model.add(keras.layers.InputLayer(input_shape=input_shape))
model.add(TimeAugmentation())
model.add(RecurrentLRS(num_hidden, num_levels, input_shape[-1] + 1, decoupled=decoupled))
model.add(BatchNormalization(axis=-1))
model.add(Dense(num_classes, activation='softmax'))
if not decoupled:
model.name = 'RLRS_M{}_H{}'.format(num_levels, num_hidden)
else:
model.name = 'RLRS2_M{}_H{}'.format(num_levels, num_hidden)
return model |
#===============================================================================
# Copyright 2020-2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
# daal4py KNN regression scikit-learn-compatible classes
from ._base import NeighborsBase, KNeighborsMixin
from sklearn.base import RegressorMixin
from .._utils import sklearn_check_version
if sklearn_check_version("0.22"):
from sklearn.neighbors._regression import KNeighborsRegressor as \
BaseKNeighborsRegressor
from sklearn.neighbors._base import _check_weights
from sklearn.utils.validation import _deprecate_positional_args
else:
from sklearn.neighbors.regression import KNeighborsRegressor as \
BaseKNeighborsRegressor
from sklearn.neighbors.base import _check_weights
def _deprecate_positional_args(f):
return f
if sklearn_check_version("0.24"):
class KNeighborsRegressor_(KNeighborsMixin, RegressorMixin, NeighborsBase):
@_deprecate_positional_args
def __init__(self, n_neighbors=5, *, weights='uniform',
algorithm='auto', leaf_size=30,
p=2, metric='minkowski', metric_params=None, n_jobs=None,
**kwargs):
super().__init__(
n_neighbors=n_neighbors,
algorithm=algorithm,
leaf_size=leaf_size, metric=metric, p=p,
metric_params=metric_params, n_jobs=n_jobs, **kwargs)
else:
if sklearn_check_version("0.22"):
from sklearn.neighbors._base import SupervisedFloatMixin as \
BaseSupervisedFloatMixin
else:
from sklearn.neighbors.base import SupervisedFloatMixin as \
BaseSupervisedFloatMixin
class KNeighborsRegressor_(NeighborsBase, KNeighborsMixin,
BaseSupervisedFloatMixin, RegressorMixin):
@_deprecate_positional_args
def __init__(self, n_neighbors=5, *, weights='uniform',
algorithm='auto', leaf_size=30,
p=2, metric='minkowski', metric_params=None, n_jobs=None,
**kwargs):
super().__init__(
n_neighbors=n_neighbors,
algorithm=algorithm,
leaf_size=leaf_size, metric=metric, p=p,
metric_params=metric_params, n_jobs=n_jobs, **kwargs)
class KNeighborsRegressor(KNeighborsRegressor_):
@_deprecate_positional_args
def __init__(self, n_neighbors=5, *, weights='uniform',
algorithm='auto', leaf_size=30,
p=2, metric='minkowski', metric_params=None, n_jobs=None,
**kwargs):
super().__init__(
n_neighbors=n_neighbors,
algorithm=algorithm,
leaf_size=leaf_size, metric=metric, p=p,
metric_params=metric_params, n_jobs=n_jobs, **kwargs)
self.weights = \
weights if sklearn_check_version("1.0") else _check_weights(weights)
def _more_tags(self):
return BaseKNeighborsRegressor._more_tags(self)
def fit(self, X, y):
return NeighborsBase._fit(self, X, y)
def predict(self, X):
return BaseKNeighborsRegressor.predict(self, X)
|
'''
Functions for performing Otsu's algorithm.
3 variations:
1. the standard way Otsu's algorithm on the whole image
2. segmenting the image into blocks, and performing Otsu's algorithm on each block
3. use sliding window on the image, and use Otsu's algorithm within the window
'''
import cv2
import numpy as np
def threshold(image: np.ndarray) -> np.ndarray:
'''
Otsu algorithm, done the standard way. (global threshold)
The mask that is obtained can be used to binarize the image, by doing:
np.where(image > threshold_mask, 255, 0)
:params:
- image (np.ndarray): the image to find the threshold of
:return:
- mask of threshold (np.ndarray)
'''
histogram = cv2.calcHist(
images=[image],
channels=[0],
mask=None,
histSize=[256],
ranges=[0, 256],
)
num_pixels = image.size
gray_level_probabilities = [
bin_count[0] / num_pixels
for bin_count in histogram
]
best_threshold = -1
min_intraclass_variance = float('inf')
for threshold in range(len(histogram)): # 0 to 255
lower_group_probability = sum(gray_level_probabilities[:threshold+1])
upper_group_probability = sum(gray_level_probabilities[threshold+1:])
if lower_group_probability == 0 or upper_group_probability == 0:
continue # otherwise, divide by 0
lower_group_mean = sum([
gray_level * probability # gray level == pixel value
for gray_level, probability in enumerate(gray_level_probabilities[:threshold+1])
]) / lower_group_probability
upper_group_mean = sum([
gray_level * probability
for gray_level, probability in enumerate(gray_level_probabilities[threshold+1:])
]) / upper_group_probability
lower_group_variance = sum([
((gray_level - lower_group_mean)**2) * probability
for gray_level, probability in enumerate(gray_level_probabilities[:threshold+1])
]) / lower_group_probability
upper_group_variance = sum([
((gray_level - upper_group_mean)**2) * probability
for gray_level, probability in enumerate(gray_level_probabilities[threshold+1:])
]) / upper_group_probability
intraclass_variance = lower_group_probability * lower_group_variance + \
upper_group_probability * upper_group_variance
if intraclass_variance < min_intraclass_variance:
min_intraclass_variance = intraclass_variance
best_threshold = threshold
return np.full_like(image, fill_value=best_threshold)
def segmented_threshold(
image: np.ndarray,
num_vertical_segments: int,
num_horizontal_segments: int,
) -> np.ndarray:
'''
Otsu threshold, done on segments of the image.
Total number of segments = num_vertical_segments * num_horizontal_segments
The mask that is obtained can be used to binarize the image, by doing:
np.where(image > threshold_mask, 255, 0)
:params:
- image (np.ndarray): the image to find the threshold of
- num_vertical_segments (int): number of times to segment vertically; at least 1
- num_horizontal_segments (int): number of times to segment horizontally; at least 1
:return:
- mask of threshold (np.ndarray)
'''
if num_vertical_segments < 1:
raise ValueError('There must be at least 1 segment in vertical direction')
if num_horizontal_segments < 1:
raise ValueError('There must be at least 1 segment in horizontal direction')
image_height, image_length = image.shape
# take ceiling to ensure entire image is covered
segment_height = image_height // num_vertical_segments + 1
segment_length = image_length // num_horizontal_segments + 1
threshold_mask = np.zeros_like(image)
for vertical_offset in range(0, image_height, segment_height):
for horizontal_offset in range(0, image_length, segment_length):
image_segment = image[
vertical_offset: vertical_offset + segment_height,
horizontal_offset: horizontal_offset + segment_length
]
# standard Otsu's algorithm
segmented_threshold_mask = threshold(image_segment)
threshold_mask[
vertical_offset: vertical_offset + segment_height,
horizontal_offset: horizontal_offset + segment_length
] = segmented_threshold_mask
return threshold_mask
def sliding_window_threshold(
image: np.ndarray,
window_height: int,
window_length: int,
vertical_stride: int,
horizontal_stride: int,
) -> np.ndarray:
'''
Otsu threshold, done by using a sliding window over the image.
The mask that is obtained can be used to binarize the image, by doing:
np.where(image > threshold_mask, 255, 0)
:params:
- image (np.ndarray): the image to find the threshold of
- window_height (int): height of the window i.e. vertical width
- window_length (int): length of the window i.e. horizontal width
- vertical_stride (int): number of pixel-wise steps to take in the vertical direction
- horizontal_stride (int): number of pixel-wise steps to take in the horizontal direction
:return:
- mask of threshold (np.ndarray)
'''
if window_height < 1:
raise ValueError('Window must have a height of at least 1 pixel')
if window_length < 1:
raise ValueError('Window must have a length of at least 1 pixel')
if vertical_stride < 1:
raise ValueError('Stride in the vertical direction must be of at least 1 pixel')
if horizontal_stride < 1:
raise ValueError('Stride in the horizontal direction must be of at least 1 pixel')
image_height, image_length = image.shape
threshold_mask = np.zeros_like(image, dtype=np.float64)
num_repetitions = np.zeros_like(image)
for vertical_offset in range(0, image_height - window_height + vertical_stride, vertical_stride):
for horizontal_offset in range(0, image_length - window_length + horizontal_stride, horizontal_stride):
window = image[
vertical_offset: vertical_offset + window_height,
horizontal_offset: horizontal_offset + window_length
]
# standard Otsu's algorithm
window_threshold_mask = threshold(window)
threshold_mask[
vertical_offset: vertical_offset + window_height,
horizontal_offset: horizontal_offset + window_length
] += np.mean(window_threshold_mask)
num_repetitions[
vertical_offset: vertical_offset + window_height,
horizontal_offset: horizontal_offset + window_length
] += 1
return threshold_mask / num_repetitions # get average
|
from sklearn import metrics
from sklearn.metrics import roc_auc_score, roc_curve, auc
from matplotlib import pyplot as plt
import preprocess
import utils
class Evaluator:
def accuracy(self, Y_test, preds):
auc = metrics.accuracy_score(preds, Y_test)
print(f"Classification accuracy is {auc}")
return auc
def classification_report(self, Y_test, preds):
cr = metrics.classification_report(Y_test, preds)
print(f"Classification report:\n {cr}")
return cr
def confusion_matrix(self, Y_test, preds):
cm = metrics.confusion_matrix(Y_test, preds)
# print(f"Confusion matrix:\n {cm}")
precision = metrics.precision_score(Y_test, preds, average="weighted")
recall = metrics.recall_score(Y_test, preds, average="weighted")
f1_score = metrics.f1_score(Y_test, preds, average="weighted")
print(
f"Precision: {round(precision, 3)}, "
f"recall: {round(recall, 3)}, "
f"F1-score: {round(f1_score, 3)}"
)
return cm
def feature_importance(self, model, classes):
"""Return feature importance - value below zero means that the feature is not important"""
print(model.feature_importances_)
imp = model.coef_[0]
imp, names = zip(*sorted(zip(imp, classes)))
plt.rcParams["figure.figsize"] = (15, 10)
plt.barh(range(len(names)), imp, align="center")
plt.yticks(range(len(names)), names)
plt.show()
|
# -*- coding: utf-8 -*-
from benedict.core import keypaths as _keypaths
import unittest
class keypaths_test_case(unittest.TestCase):
def test_keypaths(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
},
}
o = _keypaths(i)
r = [
'a',
'b',
'b.c',
'b.c.x',
'b.c.y',
'b.d',
'b.d.x',
'b.d.y',
]
self.assertEqual(o, r)
def test_keypaths_with_custom_separator(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
},
}
o = _keypaths(i, separator='/')
r = [
'a',
'b',
'b/c',
'b/c/x',
'b/c/y',
'b/d',
'b/d/x',
'b/d/y',
]
self.assertEqual(o, r)
def test_keypaths_with_invalid_separator(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
},
}
with self.assertRaises(ValueError):
o = _keypaths(i, separator=True)
def test_keypaths_without_separator(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
},
}
# with self.assertRaises(ValueError):
# o = _keypaths(i, separator=None)
o = _keypaths(i)
r = [
'a',
'b',
'b.c',
'b.c.x',
'b.c.y',
'b.d',
'b.d.x',
'b.d.y',
]
self.assertEqual(o, r)
def test_keypaths_with_non_string_keys(self):
i = {
True: {
True: 1,
},
False: {
False: 1,
},
None: {
None: 1,
},
}
o = _keypaths(i)
r = [
'False',
'False.False',
'None',
'None.None',
'True',
'True.True',
]
self.assertEqual(o, r)
def test_keypaths_with_lists_and_indexes_included(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
'e': [
{
'x': 1,
'y': -1,
'z': [1, 2, 3],
},
{
'x': 2,
'y': -2,
'z': [2, 3, 4],
},
{
'x': 3,
'y': -3,
'z': [3, 4, 5],
},
]
},
}
o = _keypaths(i, indexes=True)
r = [
'a',
'b',
'b.c',
'b.c.x',
'b.c.y',
'b.d',
'b.d.x',
'b.d.y',
'b.e',
'b.e[0]',
'b.e[0].x',
'b.e[0].y',
'b.e[0].z',
'b.e[0].z[0]',
'b.e[0].z[1]',
'b.e[0].z[2]',
'b.e[1]',
'b.e[1].x',
'b.e[1].y',
'b.e[1].z',
'b.e[1].z[0]',
'b.e[1].z[1]',
'b.e[1].z[2]',
'b.e[2]',
'b.e[2].x',
'b.e[2].y',
'b.e[2].z',
'b.e[2].z[0]',
'b.e[2].z[1]',
'b.e[2].z[2]',
]
self.assertEqual(o, r)
def test_keypaths_with_lists_and_indexes_not_included(self):
i = {
'a': 1,
'b': {
'c': {
'x': 2,
'y': 3,
},
'd': {
'x': 4,
'y': 5,
},
'e': [
{
'x': 1,
'y': -1,
'z': [1, 2, 3],
},
{
'x': 2,
'y': -2,
'z': [2, 3, 4],
},
{
'x': 3,
'y': -3,
'z': [3, 4, 5],
},
]
},
}
o = _keypaths(i, indexes=False)
r = [
'a',
'b',
'b.c',
'b.c.x',
'b.c.y',
'b.d',
'b.d.x',
'b.d.y',
'b.e',
]
self.assertEqual(o, r)
def test_keypaths_with_nested_lists_and_indexes_included(self):
i = {
'a': {
'b': [
[1, 2],
[3, 4, 5],
[
{
'x': 1,
'y': -1,
},
] ,
],
},
}
o = _keypaths(i, indexes=True)
r = [
'a',
'a.b',
'a.b[0]',
'a.b[0][0]',
'a.b[0][1]',
'a.b[1]',
'a.b[1][0]',
'a.b[1][1]',
'a.b[1][2]',
'a.b[2]',
'a.b[2][0]',
'a.b[2][0].x',
'a.b[2][0].y',
]
self.assertEqual(o, r)
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
import os
pp = os.path.abspath('..')
p = os.path.abspath('.')
sys.path.insert(0, p)
sys.path.insert(0, pp)
from DataGenerator import DistributionDataGenerator, Workload
from SyntheticDataGenerators import SyntheticStreamMaker
from QueryGenerator import *
import SketchExperiment
import logging
import Oracle
import random
import numpy as np
import scipy as sp
import copy
import types
import sketches.Sketches as Sketches
logging.getLogger().setLevel(logging.WARNING)
if __name__ == '__main__':
# query for quantiles 5, 10, ..., 95
qg1 = ConfigQueryGenerator(
queries='quantile',
parameters=np.arange(0.05, 1, 0.05),
indices=DataGeneratorSeq(length=10),
)
qg2 = ConfigQueryGenerator(
queries='cdf',
parameters=np.arange(0.05, 1, 0.05),
indices=DataGeneratorSeq(length=10),
)
qg1.name = 'quantile'
qg2.name = 'cdf'
qg = ChainQueryGenerators(generators=[qg1, qg2])
dg1 = DistributionDataGenerator(length=int(1e5), distribution=sp.stats.beta(1,1), name='Uniform')
w1 = Workload(data_generator=dg1, query_generator=qg)
dg2 = DistributionDataGenerator(length=int(1e5), distribution=sp.stats.norm(0.5,0.2), name='Normal')
w2 = Workload(data_generator=dg2, query_generator=qg)
dg3 = SyntheticStreamMaker(n=1e5, order='zoomin')
w3 = Workload(data_generator=dg3, query_generator=qg1)
oracle = Oracle.QuantileOracle(save_dir = "/tmp/answers", as_json=True, read_cache=True)
qg.connectDataGenerator(dg2)
opts = SketchExperiment.ExperimentOptions(
nparallel=8,
ndatasets=1,
nrepetitions=8,
save_answers=True,
)
e = SketchExperiment.SketchMetaExperiment(workloads=[w1, w2, w3],
oracle=oracle,
options=opts,
result_file="tmp_quantile_exp_results.csv",
)
#oracle2 = copy.deepcopy(oracle)
# SketchConfig(sketch=Sketches.KLLSketch, size=range(100,1000,100))
KLL_params = [{'size':s} for s in range(100,1000,200)]
REQ_params = [{'size':s} for s in range(10,100,20)]
Tdigest_params = [{'delta':1/s} for s in range(20,200,40)]
sketches = {'KLL': (Sketches.KLLSketch, KLL_params),
'REQ': (Sketches.REQSketch, REQ_params),
'Tdigest': (Sketches.TDigestSketch, Tdigest_params),
#'Oracle': Sketches.SketchFactory(Sketches.OracleSketch, oracle2),
}
e.addSketches(sketches)
# e.prepare()
e.execute() |
# Generated by Django 3.0.4 on 2021-04-07 10:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blogs', '0003_auto_20210407_0913'),
]
operations = [
migrations.AlterModelOptions(
name='postcomment',
options={},
),
migrations.AlterModelTable(
name='post',
table='post',
),
migrations.AlterModelTable(
name='postcomment',
table='post_comment',
),
migrations.AlterModelTable(
name='postimage',
table='post_image',
),
]
|
#mapdatain_5.py
#to read in ALL data
#TRYING TO FIGURE OUT WHERE PHONY FIGURES COMING FROM
"""
These 3 imports needed for the full program
# from classes import *
# from code import *
"""
import math
import matplotlib.pyplot as plt
from scipy import stats
import numpy as np
import random
import time
# import data
import data as pass_data
from orangeclass_V_3efix import * #import the FUNCTIONS from the CLASSES
# from boxesmap3_1 import *
def filein (fname):
numlines=0
xin=[]
f=open(fname,'r')
for line in f:
#print (line, end='')
xin.append(line)
numlines=numlines+1
f.close()
return xin,numlines
def fileout (filename,filedata):
f2=open(filename,'w')
f2.write(filedata)
f2.close()
def getxy (fname):
data,numlines=filein(fname)
# dataline=['0' for i in range(numlines)]
x=['0' for i in range(numlines)]
y=['0' for i in range(numlines)]
for i in range(numlines):
xline=data[i]
xline2=xline.split('\t')
xline2[-1]=xline2[-1].replace('\n','')
#print ('\nxline2',xline2)
x[i]=eval(xline2[0])
y[i]=eval(xline2[1])
return x,y,numlines
def getx (fname):
data,numlines=filein(fname)
#print ('\ndata\n',data,'\nlines',numlines)
x=['0' for i in range(numlines)]
for i in range(numlines):
x[i]=data[i].replace('\n','')
x[i]=eval(x[i])
return x,numlines
def getxn(fname):
data,numlines=filein(fname)
#print (numlines)
#print (data)
dataline=['0' for i in range(numlines)]
for i in range(numlines):
x=data[i]
y=x.split('\t')
y[-1]=y[-1].replace('\n','')
dataline[i]=y
#print ('\n\nascii-input',dataline)
xdata=dataline[:]
for i in range (numlines):
inline=len(dataline[i])
for j in range (inline):
if xdata[i][j] != '':
xdata[i][j]=eval(xdata[i][j])
else:
xdata[i][j]=None
#print ('dataline',dataline)
#print ('xdata',xdata)
return xdata, numlines
#MIDIFYING THIS FUNCTION TO GET THE COLOR
def getxnsecondstring(fname): #get n inputs from each line
#first column=text, next colu1mns all numbers
#here: variable name, x, y, height, width
data,numlines=filein(fname)
#print (numlines)
#print (data)
dataline=['0' for i in range(numlines)]
for i in range(numlines):
x=data[i]
y=x.split('\t')
y[-1]=y[-1].replace('\n','')
dataline[i]=y
#print ('\n\nascii-input',dataline)
xdata=dataline[:]
for i in range (numlines):
inline=len(dataline[i])
for j in range (2,inline):
if xdata[i][j] != '':
xdata[i][j]=eval(xdata[i][j])
else:
xdata[i][j]=None8
#print ('dataline',dataline)
#print ('xdata',xdata)
return xdata, numlines
#-------------------------------------------------------------------------
def lslin(invars,invar):
print('\ncurrent value of ',invars,' is= ',invar)
outvars=input('\nchange to (def=no change)')
if (outvars==''):
return invar
else:
outvar=eval(outvars)
return outvar
#END OF DEFINED FUNCTIONS---------------
#START DATA INPUT
#give it just a number n, will find files cn.txt, bn.txt, mn.txt, icn.txt, btext and bxy
fast=input('\n ONLY NUMBER n and I will find cn.txt, etc. (#/a, Def=a)')
if fast.isdigit():
fnamec='c'+fast+'.txt'
fnameb='b'+fast+'.txt'
fnamem='m'+fast+'.txt'
fnameic='ic'+fast+'.txt'
fnamebtextbxy='btextbxy'+fast+'.txt'
else:
fname=input('\nfilename for array c [I will add .txt]= ')
fnamec=fname+'.txt'
fname=input('\nfilename for array b [I will add .txt]= ')
fnameb=fname+'.txt'
fname=input('\nfilename for array m [I will add .txt]= ')
fnamem=fname+'.txt'
fname=input('\nfilename for array IC [I will add .txt]= ')
fnameic=fname+'.txt'
fname=input('\nfilename for bxy, btext [I will add .txt]= ')
fnamebtextbxy=fname+'.txt'
#get the files
c,numc=getxn(fnamec)
b,numb=getx(fnameb)
m,numm=getx(fnamem)
ic,numic=getx(fnameic)
btextbxydata,numvar=getxnsecondstring(fnamebtextbxy)
#check for consistentcy
if (numc**4!=numb*numm*numic*numvar):
print ("\nFATAL WARNING - input issue - numbers c,b,m,ic,bxy,btext don't match")
quit()
#PART ONE make original m, b, c, ic arrays (NOT matrices) and print
ma=np.array(m)
ba=np.array(b)
ca=np.array(c)
ica=np.array(ic)
print ('\nca= ',ca)
print ('\nba= ',ba)
print ('\nma= ',ma)
print ('\nica= ',ica)
change=input('\nWant to CHANGE parameters (y/n), def=n')
if (change=='y' or change=='Y'):
c=lslin('c',c)
b=lslin('b',b)
m=lslin('m',m)
ic=lslin('ic',ic)
ma=np.array(m)
ba=np.array(b)
ca=np.array(c)
ica=np.array(ic)
print ('\n\nNEW PARAMTER VALUES ARE:')
print ('\nca= ',ca)
print ('\nba= ',ba)
print ('\nma= ',ma)
print ('\nic= ',ica)
else:
pass
#PART TWO read in the variable names and box locations in the plot btext and bxy(x,y,h,w)
print('\n numvar(from btextbxy)= ',numvar)
print('\n btextbxydata= ',btextbxydata)
#COMPUTE (x,y)=[0,1] needed from PPTX
bx=[btextbxydata[i][2] for i in range (numvar)]
by=[btextbxydata[i][3] for i in range (numvar)]
wx=[btextbxydata[i][5] for i in range (numvar)]
hy=[btextbxydata[i][4] for i in range (numvar)]
#note this scaling has changed 2017-07-06
#SCALE as needed for the plot
xp=[0. for i in range(numvar)]
yp=[0. for i in range(numvar)]
xp2=[0. for i in range(numvar)]
yp2=[0. for i in range(numvar)]
for i in range(numvar):
xp[i]=(bx[i] + 0.5*wx[i])
yp[i]=(by[i] + 0.5*hy[i])
maxx,minx=max(xp),min(xp)
maxy,miny=max(yp),min(yp)
for i in range(numvar):
xp2[i]=0.9*(xp[i]-minx)/(maxx-minx)+0.05
yp2[i]=1-(0.9*(yp[i]-miny)/(maxy-miny)+0.05)
bxy=[[xp2[i],yp2[i]] for i in range(numvar)]
print ('\nbxy= ',bxy)
#PARAEMTERS NEEDED FOR THE NUMERICAL INTEGRATION
dt=.001
numdata=30000
t=[0. for i in range(numdata)]
z=np.array([ica for i in range (numdata)])
#READY TO PASS ON DATA----------------------------------------------------------
#wrap parameters to pass into function
pass_data.numdata=numdata
pass_data.ca=ca
pass_data.dt=dt
pass_data.ma=ma
pass_data.ba=ba
pass_data.numc=numc
pass_data.z=z
pass_data.t=t
pass_data.ica=ica
#NEW DATA's ADDED BELOW
pass_data.fnamec=fnamec
pass_data.fnamem=fnamem
pass_data.fnameb=fnameb
pass_data.fnamebtextbxy=fnamebtextbxy
pass_data.dt=dt
#EVEN MORE DATA'S ADDED BELOW
pass_data.a=ca
pass_data.bxy=bxy
pass_data.btext=str([btextbxydata[i][0] for i in range(numvar)])
pass_data.b=ica
pass_data.labels=[btextbxydata[i][0] for i in range(numvar)]
#ADDING BOX COLORS
pass_data.boxcolor=[btextbxydata[i][1] for i in range(numvar)]
#this stuff to call data_3.py and make PLOTS!
#FIRST LET'S CHECK INPUT THEN LATER DO THE CALL------------
zzz=App()
#FIRST LET'S CHECK INPUT THEN LATER DO THE CALL------------
# zzz.MakeWindow()
# zzz.MakeSample()
"""
THESE NEEDED FOR THE FULL PROGRAM
# App.recalculate(pass_data)
#---------->call App
# callGUI()
"""
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from irekua_database.models import MimeType
from irekua_rest_api.serializers.base import IrekuaModelSerializer
from irekua_rest_api.serializers.base import IrekuaHyperlinkedModelSerializer
class SelectSerializer(IrekuaModelSerializer):
class Meta:
model = MimeType
fields = (
'url',
'mime_type',
'media_info_schema'
)
class DescriptionSerializer(IrekuaModelSerializer):
class Meta:
model = MimeType
fields = (
'mime_type',
'media_info_schema'
)
class ListSerializer(IrekuaModelSerializer):
class Meta:
model = MimeType
fields = (
'url',
'mime_type',
'media_info_schema',
)
class DetailSerializer(IrekuaHyperlinkedModelSerializer):
class Meta:
model = MimeType
fields = (
'url',
'mime_type',
'media_info_schema',
)
class CreateSerializer(IrekuaModelSerializer):
class Meta:
model = MimeType
fields = (
'mime_type',
'media_info_schema',
)
class UpdateSerializer(IrekuaModelSerializer):
class Meta:
model = MimeType
fields = (
'mime_type',
'media_info_schema',
)
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
from MyDIPUtils.config import *
# check_ol1: circle[i] and circle[j] overlaps too much
# check_ol2: circle[i] lies too much outside original image
# check_ol3: compare the overlapping area between different stepsize
def direction_map(edge, clockwise):
# edge_pad = np.pad(edge, 1, mode='constant', constant_values=255)
# pdb.set_trace()
edge_copy = edge.copy()
flag = np.zeros_like(edge_copy)
edge_y, edge_x = np.nonzero(edge_copy == 0)
leftmost_x = np.min(edge_x)
leftmost_y = np.max(edge_y[edge_x == leftmost_x])
original_point = (leftmost_x, leftmost_y)
points = []
neigh = edge_copy[leftmost_y-1:leftmost_y+2, leftmost_x-1:leftmost_x+2]
if not clockwise:
direction = 0 if neigh[1, 2] == 0 else 7/4
if direction == 0:
next_point = (leftmost_x+1, leftmost_y)
else:
next_point = (leftmost_x+1, leftmost_y+1)
else:
direction = 0 if neigh[1, 2] == 0 else 1/4
if direction == 0:
next_point = (leftmost_x+1, leftmost_y)
else:
next_point = (leftmost_x+1, leftmost_y-1)
points.append((direction, original_point))
# flag[leftmost_y, leftmost_x] = 1
while next_point != original_point:
x, y = next_point
neigh = edge_copy[y-1:y+2, x-1:x+2]
flag_neigh = flag[y-1:y+2, x-1:x+2]
this_point = next_point
direction, next_point = find_next_direction(neigh, this_point, flag_neigh)
points.append((direction, this_point))
flag[this_point[1], this_point[0]] = 1
# dir_map[y, x] = direction
return points
def find_next_direction(neigh, this_point, flag_neigh):
x, y = this_point
neigh[flag_neigh==1] = 255
# 4-neighbour is prior to 8-neighbour
if neigh[0, 1] == 0:
return 1/2, (x, y-1)
if neigh[1, 2] == 0:
return 0, (x+1, y)
if neigh[2, 1] == 0:
return 3/2, (x, y+1)
if neigh[1, 0] == 0:
return 1, (x-1, y)
if neigh[0, 2] == 0:
return 1/4, (x+1, y-1)
if neigh[0, 0] == 0:
return 3/4, (x-1, y-1)
if neigh[2, 0] == 0:
return 5/4, (x-1, y+1)
if neigh[2, 2] == 0:
return 7/4, (x+1, y+1)
def tangent_line(points, seq_num, img, draw=True):
k = 0
angle = 0
for i in range(1, max_neigh):
s0 = 0
for j in range(i):
s0 += points[j+seq_num][0]
angle += s0/i
k += np.tan(s0*np.pi/(i))
angle /= (max_neigh-1)
k /= (max_neigh-1)
x0, y0 = points[seq_num][1]
y0 = img.shape[0] - y0
b = y0-k*x0
if draw:
line_point(k, b, img)
return k, angle, b
def points_sequence(points):
# points should be passed directly from cv2.goodFeaturesToTrack
# shape is (N, 1, 2)
sequence = []
points = np.squeeze(points)
leftmost = np.argmin(points[:, 0])
sequence.append(points[leftmost])
for direction in ['lr', 'ur', 'ul', 'll']:
next_point = find_next_anticlock(sequence[-1], points, direction)
while np.any(next_point) is not None:
sequence.append(next_point)
next_point = find_next_anticlock(sequence[-1], points, direction)
return sequence
def find_next_anticlock(point, points, direction):
if direction not in ['lr', 'ur', 'ul', 'll']:
raise ValueError('Unknown direction')
x, y = point
if direction == 'lr':
target = points[points[:, 1] > y]
if len(target) == 0:
return None
return target[np.argmin(target[:, 0])]
if direction == 'ur':
target = points[points[:, 0] > x]
if len(target) == 0:
return None
return target[np.argmax(target[:, 1])]
if direction == 'll':
target = points[points[:, 0] < x]
if len(target) == 0:
return None
return target[np.argmin(target[:, 1])]
if direction == 'ul':
target = points[points[:, 1] < y]
if len(target) == 0:
return None
return target[np.argmax(target[:, 0])]
def find_line(point1, point2, img_size, pb):
x1, y1 = point1
x2, y2 = point2
y1, y2 = img_size - y1, img_size - y2
if pb == True:
if np.abs(y1-y2) > l1_norm_threshold:
k = -(x1-x2)/(y1-y2)
b = (y1+y2)/2 - k*(x1+x2)/2
else:
k = None
b = (x1+x2)/2
else:
if np.abs(x1-x2) > l1_norm_threshold:
k = (y1-y2)/(x1-x2)
b = y2 - k*x2
else:
k = None
b = x1
return k, b
def find_para_line(k, point, img_size):
if k != None:
return -k*point[0]+(img_size-point[1])
else:
return point[0]
def line_point(k, b, img):
if k != None:
if b > 0:
point1 = (0, img.shape[0] - int(b))
else:
point1 = (int(-b/k), img.shape[0])
if k*img.shape[0] + b > img.shape[0]:
point2 = (int((img.shape[0] - b)/k), 0)
else:
point2 = (img.shape[0], int(img.shape[0] - (k*img.shape[0] + b)))
else:
point1 = (b, 0)
point2 = (b, img.shape[0])
cv2.line(img, point1, point2, 0)
# return img
def line_gen_1(k, b, img_size):
# img[i, j]: i->y, j->x
if k != None:
return lambda x, y: k*x-(img_size-y)+b
else:
return lambda x, y: x-b
def line_gen_2(k, b, img_size):
# Warning: if k == None, cannot use this function
assert k != None
return lambda x: img_size-(k*x+b)
def distance(x1, y1, x2, y2, norm='l2'):
if norm == 'l1':
return min(np.abs(x1-x2), np.abs(y1-y2))
else:
return np.sqrt((x1-x2)**2+(y1-y2)**2)
def find_center_and_radius(point1, point2, points, img):
# 1. find the side of the arc
k0, b0 = find_line(point1, point2, img.shape[0], pb=False)
line = line_gen_1(k0, b0, img.shape[0])
for point in points:
if not np.any(np.logical_or(point == point1, point == point2)):
flag = np.sign(line(*point))
break
# 2. mask only the interested arc
arc_ma = np.full_like(img, 255, dtype=np.uint8)
arc_y, arc_x = np.nonzero(img != 255)
for i in range(len(arc_x)):
if flag != np.sign(line(arc_x[i], arc_y[i])):
arc_ma[arc_y[i], arc_x[i]] = 0
# 3. further mask only the area between 2 corner point
k, b = find_line(point1, point2, img.shape[0], pb=True)
b1, b2 = find_para_line(k, point1, img.shape[0]), find_para_line(k, point2, img.shape[0])
line1, line2 = line_gen_1(k, b1, img.shape[0]), line_gen_1(k, b2, img.shape[0])
sgn1, sgn2 = np.sign(line1(*point2)), np.sign(line2(*point1))
arc_y, arc_x = np.nonzero(arc_ma != 255)
for i in range(len(arc_x)):
i_sgn1, i_sgn2 = np.sign(line1(arc_x[i], arc_y[i])), np.sign(line2(arc_x[i], arc_y[i]))
if sgn1 != i_sgn1 or sgn2 != i_sgn2:
arc_ma[arc_y[i], arc_x[i]] = 255
# test = draw_points([tuple(point1), tuple(point2)], arc_ma)
# line_point(k, b, test)
# line_point(k0, b0, test)
# imgshow(test)
# plt.figure()
# plt.imshow(arc_ma, cmap='gray')
# 3.find center and radius
arc_y, arc_x = np.nonzero(arc_ma == 0)
len_arc = len(arc_y)
if len_arc < 5:
return None
if k != None:
lower_x = max((point1[0]+point2[0])//2-max_radius, 0)
upper_x = min((point1[0]+point2[0])//2+max_radius, img.shape[0])
line = line_gen_2(k, b, img.shape[0])
dis_var = []
dis = []
for x in range(lower_x, upper_x):
tmp_dis = []
y = line(x)
for i in range(len_arc):
ay, ax = arc_y[i], arc_x[i]
tmp_dis.append(distance(x, y, ax, ay))
dis_var.append(np.var(tmp_dis))
dis.append(np.mean(tmp_dis))
cur = np.argmin(dis_var)
center_x = lower_x + cur
center_y = int(line(center_x))
radius = dis[cur]
else:
lower_y = max((point1[1]+point2[1])//2-max_radius, 0)
upper_y = min((point1[1]+point2[1])//2+max_radius, img.shape[0])
x = b
dis_var = []
dis = []
for y in range(lower_y, upper_y):
tmp_dis = []
for i in range(len_arc):
ay, ax = arc_y[i], arc_x[i]
tmp_dis.append(distance(x, y, ax, ay))
dis_var.append(np.var(tmp_dis))
dis.append(np.mean(tmp_dis))
cur = np.argmin(dis_var)
center_x = b
center_y = lower_y + cur
radius = dis[cur]
return (int(center_x), int(center_y)), int(radius)
def check_close(circles):
flags = [-1 for _ in range(len(circles))]
count = 0
for i in range(len(circles)):
if flags[i] == -1:
color = count
count += 1
else:
color = flags[i]
flags[i] = color
for j in range(len(circles)):
if j != i and distance(*circles[i][0], *circles[j][0]) < distance_threshold:
flags[j] = color
final = []
for i in range(len(flags)):
if flags[i] != -1:
color = flags[i]
flags[i] = -1
tmp_center = [circles[i][0]]
tmp_radius = [circles[i][1]]
for j in range(i+1, len(flags)):
if flags[j] == color:
tmp_center.append(circles[j][0])
tmp_radius.append(circles[j][1])
flags[j] = -1
mean_center = np.mean(tmp_center, axis=0)
mean_radius = np.mean(tmp_radius)
final.append(((int(mean_center[0]), int(mean_center[1])), int(mean_radius)))
return final
def overlapping(circle1, circle2, img_shape):
tmp1 = np.full(img_shape, 255, dtype=np.uint8)
tmp2 = np.full(img_shape, 255, dtype=np.uint8)
cv2.circle(tmp1, circle1[0], circle1[1], 0, cv2.FILLED)
cv2.circle(tmp2, circle2[0], circle2[1], 0, cv2.FILLED)
ol = np.full(img_shape, 255, dtype=np.uint8)
ol[np.logical_and(tmp1==0, tmp2==0)] = 0
area1 = np.sum(tmp1==0)
area2 = np.sum(tmp2==0)
area_ol = np.sum(ol==0)
return area_ol/area1, area_ol/area2
def check_ol1(circles, shape):
final = []
flags = [-1 for _ in range(len(circles))]
for i in range(len(circles)):
if flags[i] == -1:
for j in range(i+1, len(circles)):
if flags[j] == -1:
ol_i, ol_j = overlapping(circles[i], circles[j], shape)
if max(ol_i, ol_j) > overlapping1_threshold:
if max(ol_i, ol_j) == ol_i:
flags[i] = 0
else:
flags[j] = 0
if flags[i] == -1:
final.append(circles[i])
return final
def check_ol2(circles, ori_img):
final = []
for circle in circles:
tmp = np.full(ori_img.shape, 255, dtype=np.uint8)
cv2.circle(tmp, circle[0], circle[1], 0, cv2.FILLED)
ol = np.full(ori_img.shape, 255, dtype=np.uint8)
ol[np.logical_and(tmp==0, ori_img==0)]=0
if np.sum(ol==0)/np.sum(tmp==0) > overlapping2_threshold:
final.append(circle)
return final
def check_ol3(circles, ori_img):
tmp = np.full(ori_img.shape, 255, dtype=np.uint8)
for circle in circles:
cv2.circle(tmp, circle[0], circle[1], 0, cv2.FILLED)
intersec = np.full(ori_img.shape, 255, dtype=np.uint8)
intersec[np.logical_and(tmp==0, ori_img==0)]=0
ol = np.sum(intersec==0)
tmp[intersec==0] = 255
sub = np.sum(tmp==0)
# return ol/sub or ol-sub?
# problem...
return ol-sub
def find_all_circles(check_close_radius=True, check_overlapping_1=True, check_overlapping_2=True, **kwargs):
# points should be binded with edge
if 'points' in kwargs.keys():
points = kwargs['points']
has_point = True
else:
has_point = False
if 'edge' in kwargs.keys():
edge = kwargs['edge']
has_edge = True
else:
has_edge = False
if 'img' in kwargs.keys():
img = kwargs['img']
has_img = True
else:
has_img = False
flag_edge = has_point^has_edge
if flag_edge:
raise KeyError('Points and edge should be passed concurrently.')
if has_img == False and has_point == False:
raise KeyError('Either image or edge should be passed.')
if has_img == False and check_overlapping_2 == True:
raise KeyError('Checking overlapping 2 requries original image.')
flag_edge = has_edge
if not has_edge:
img = cv2.GaussianBlur(img,(5,5),0)
# TODO: integrate erosion
edge = cv2.Canny(img, 100, 200)
edge = cv2.bitwise_not(edge)
out = edge.copy()
corners = cv2.goodFeaturesToTrack(img,10,0.1,10)
white = np.zeros_like(img)
for i in corners:
x,y = i.ravel()
cv2.circle(white,(x,y),3,255,-1)
points = corners
else:
out = edge.copy()
# sequence = points_sequence(points)
points += [points[0]]
sequence = np.array(points, dtype=np.int32)
circles = []
if len(sequence) < 3:
left, right, up, down = find_4_points(edge)
center = int((right+left)/2), int((down+up)/2)
radius = int(min((right-left)/2, (down-up)/2))
return [(center, radius)]
for i in range(len(sequence) - 1):
point = find_center_and_radius(sequence[i], sequence[i+1], sequence, edge)
if point != None and point[1] < max_radius:
circles.append(point)
if check_overlapping_2 and has_img:
circles = check_ol2(circles, img)
if check_close_radius:
circles = check_close(circles)
if check_overlapping_1:
# check overlapping 1 means check if one deteced circle is mostly inside another detected circle
# TODO: take average or just choose the larger circle?
circles = check_ol1(circles, edge.shape)
for circle in circles:
center, radius = circle
cv2.circle(out,center,3,0,-1)
cv2.circle(out,center,radius,0)
return circles, out
def imgshow(img):
plt.figure()
plt.imshow(img, cmap='gray')
plt.show()
def draw_points(points, img):
copy = img.copy()
for point in points:
cv2.circle(copy, point, 3, 0, cv2.FILLED)
return copy
def find_feature_points(points_sample):
slope = []
for i in range(len(points_sample)-1):
x1, y1 = points_sample[i]
x2, y2 = points_sample[i+1]
theta = np.arctan((y1-y2)/(x1-x2)) if (x1-x2) != 0 else 0.5*np.pi
print(theta, points_sample[i])
slope.append(theta)
interested = []
for i in range(len(slope)-1):
diff = np.abs(slope[i]-slope[i+1])
if diff > np.pi/2:
diff = np.pi - diff
if diff > slope_lower:
print(slope[i]-slope[i+1])
# imgshow(test)
interested.append(points_sample[i+1])
# cv2.circle(erode, points_sample[i], 2, 0, cv2.FILLED)
flags = [-1 for _ in range(len(interested))]
count = 0
for i in range(len(interested)):
if flags[i] == -1:
color = count
count += 1
else:
color = flags[i]
flags[i] = color
for j in range(len(interested)):
if j != i and distance(*interested[i], *interested[j]) < distance_threshold:
flags[j] = color
final = []
for i in range(len(flags)):
if flags[i] != -1:
color = flags[i]
flags[i] = -1
tmp = [interested[i]]
for j in range(i+1, len(flags)):
if flags[j] == color:
tmp.append(interested[j])
flags[j] = -1
mean = np.mean(tmp, axis=0)
final.append((int(mean[0]), int(mean[1])))
return final
def find_feature_points2(points_sample):
slope = []
slope_rev = []
for i in range(len(points_sample)-1):
x1, y1 = points_sample[i]
x2, y2 = points_sample[i+1]
x3, y3 = points_sample[i-1]
theta = np.arctan((y1-y2)/(x1-x2)) if (x1-x2) != 0 else 0.5*np.pi
theta_rev = np.arctan((y1-y3)/(x1-x3)) if (x1-x3) != 0 else 0.5*np.pi
slope.append(theta)
slope_rev.append(theta_rev)
interested = []
for i in range(len(slope)-1):
diff = np.abs(slope[i]-slope_rev[i])
if diff > slope_lower and diff < slope_upper:
# imgshow(test)
interested.append(points_sample[i])
# cv2.circle(erode, points_sample[i], 2, 0, cv2.FILLED)
flags = [-1 for _ in range(len(interested))]
count = 0
for i in range(len(interested)):
if flags[i] == -1:
color = count
count += 1
else:
color = flags[i]
flags[i] = color
for j in range(len(interested)):
if j != i and distance(*interested[i], *interested[j]) < distance_threshold:
flags[j] = color
final = []
for i in range(len(flags)):
if flags[i] != -1:
color = flags[i]
flags[i] = -1
tmp = [interested[i]]
for j in range(i+1, len(flags)):
if flags[j] == color:
tmp.append(interested[j])
flags[j] = -1
mean = np.mean(tmp, axis=0)
final.append((int(mean[0]), int(mean[1])))
return final
def find_4_points(img):
x, y = np.nonzero(img==0)
left, right = np.min(x), np.max(x)
up, down = np.min(y), np.max(y)
return left, right, up, down
def count_one(img):
left, right, up, down = find_4_points(img)
if np.abs((down-up)-(right-left)) > single_circle_thresh1:
return None
else:
center = int((right+left)/2), int((down+up)/2)
radius = int(min((right-left)/2, (down-up)/2))
test = np.full_like(img, 255)
imgshow(img)
cv2.circle(test, center, radius, 0, cv2.FILLED)
imgshow(test)
ol = np.logical_and(img==0, test==0)
area_ol = np.sum(ol)
test[ol] = 255
area_sub = np.sum(test==0)
area_img = np.sum(img==0)
if area_ol/area_img > single_circle_thresh2 and area_sub/area_img < single_circle_thresh3:
return center, radius
else:
return None
if __name__ == '__main__':
# img = cv2.imread('73.png')
# img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# find_all_circles(img)
circle1 = ((50, 50), 40)
circle2 = ((80, 80), 40)
|
from binary_array_to_number import binary_array_to_number
import unittest
class Test(unittest.TestCase):
def test_1(self):
result = binary_array_to_number([0, 0, 0, 1])
self.assertEqual(result, 1)
def test_2(self):
result = binary_array_to_number([0, 0, 1, 0])
self.assertEqual(result, 2)
if __name__ == "__main__":
unittest.main()
|
from requests import get, post
from subprocess import check_output
from json import dumps
def acessar_dontpad(link_dontpad):
dontpad = get(link_dontpad).text
text = dontpad[dontpad.find('<textarea id="text">' ) + len('<textarea id="text">') : dontpad.find('</textarea>') ]
return text
def listar_processos():
lista_processos = []
comando = "tasklist"
comando_output = check_output(comando, shell=True).decode("latin-1")
comando_output = comando_output.split("\n")
comando_output = comando_output[3:]
for processos in comando_output:
processo = processos.split()
if len(processo) > 0:
if ".exe" in processo[0]:
lista_processos.append({"processo":processo[0], "PID":processo[1]})
lista_processos = dumps(lista_processos)
return lista_processos
def encerrar_processos(nome_exe):
comando = "taskkill /IM " + nome_exe
subprocess.check_output(comando, shell=True).decode("latin-1")
return 0
def enviar_processos_dontpad(lista_processos, url):
data = {"text": lista_processos}
post(url, data).text
return 0
def main(args):
url = "http://dontpad.com/testando2015"
lista_de_processos = listar_processos()
enviar_processos_dontpad(lista_de_processos, url)
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
import numpy as np
import matplotlib.pyplot as plt
curve_F3 = np.loadtxt('./result/CUM_NN_F3_L100_50000.txt', delimiter = ',', dtype = float)
curve_F9 = np.loadtxt('./result/CUM_NN_F9_L100_50000.txt', delimiter = ',', dtype = float)
curve_TC = np.loadtxt('./result/cum_based_all_L100.txt', delimiter = ',', dtype = float)
#plot the curve of SNR and Total Accuracy
plt.figure()
plt.plot(curve_F3[0,:],curve_F3[1,:],'bo-')
plt.plot(curve_F9[0,:],curve_F9[1,:],'rs-')
plt.plot(curve_TC[0,:],curve_TC[1,:],'y^-')
plt.legend(['FCNN based on 3 features','FCNN based on 9 features','TC Method'],loc=4)
plt.xlabel('SNR/dB')
plt.ylabel('Total Accuracy')
plt.title('Total accuracy and signal-to-noise ratio')
plt.show()
#plot the curve of SNR and Accuracy of each categories
NClass=4
output_label=np.array(['BPSK','QPSK','8PSK','16QAM'])
for i in range(NClass):
plt.subplot(2,2,i+1);
plt.plot(curve_F3[0,:],curve_F3[i+2,:],'bo-')
plt.plot(curve_F9[0,:],curve_F9[i+2,:],'rs-')
plt.legend(['3 features','9 features'],loc=4)
plt.xlabel('SNR/dB')
plt.ylabel(output_label[i]+' accuracy')
plt.title(output_label[i])
plt.subplots_adjust(wspace=0.5, hspace=0.5)
plt.show()
|
# MINLP written by GAMS Convert at 04/21/18 13:55:18
#
# Equation counts
# Total E G L N X C B
# 1853 1105 307 441 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 1495 1414 81 0 0 0 0 0
# FX 12 12 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 4918 4009 909 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b29 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b30 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b31 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b32 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b33 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b34 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b35 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b36 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b37 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x83 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x84 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x87 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x90 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x93 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x97 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x101 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x105 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x109 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x113 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x117 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x121 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x125 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x129 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x132 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x135 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x138 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x141 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x144 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x147 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x150 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x153 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x156 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x160 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x164 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x168 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x172 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x176 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x180 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x184 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x188 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x192 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x195 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x198 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x201 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x204 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x207 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x210 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x213 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x216 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x219 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x223 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x227 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x231 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x235 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x239 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x243 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x247 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x251 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x255 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x258 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x261 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x264 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x267 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x270 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x273 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x276 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x279 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,2.4),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,1.16),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,5),initialize=0)
m.x362 = Var(within=Reals,bounds=(3.5,3.5),initialize=3.5)
m.x363 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x364 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x365 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x366 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x367 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x368 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x369 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x370 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x371 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x372 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x373 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x374 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x375 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x376 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x377 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x378 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x379 = Var(within=Reals,bounds=(2,5),initialize=2)
m.x380 = Var(within=Reals,bounds=(4.1,4.1),initialize=4.1)
m.x381 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x382 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x383 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x384 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x385 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x386 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x387 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x388 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x389 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x390 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x391 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x392 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x393 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x394 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x395 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x396 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x397 = Var(within=Reals,bounds=(2.5,5),initialize=2.5)
m.x398 = Var(within=Reals,bounds=(4,4),initialize=4)
m.x399 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x400 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x401 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x402 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x403 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x404 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x405 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x406 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x407 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x408 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x409 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x410 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x411 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x412 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x413 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x414 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x415 = Var(within=Reals,bounds=(2,6),initialize=2)
m.x416 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x417 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x419 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x421 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x423 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x425 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x427 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x429 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x431 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x433 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x435 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x437 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x439 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x441 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x443 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x445 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x447 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x449 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x451 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x453 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x455 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x457 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x459 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x461 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x463 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x465 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x467 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,0.8),initialize=0)
m.x469 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x471 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x473 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x475 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x477 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x479 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x481 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x483 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x485 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x487 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x489 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x491 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x493 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x495 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x497 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x499 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x501 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x503 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,0.5),initialize=0)
m.x505 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x507 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x509 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x511 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x513 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x515 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x517 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x519 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x521 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x523 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x525 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x527 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x529 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x531 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x533 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x535 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x537 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x539 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,0.7),initialize=0)
m.x541 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x543 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x545 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x547 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x549 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x551 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x553 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x555 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x557 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x559 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x561 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x563 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x565 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x567 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x569 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x571 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x573 = Var(within=Reals,bounds=(-1000,1000),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x575 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,0.58),initialize=0)
m.x577 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x578 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x579 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x580 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x581 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x582 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x583 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x584 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x585 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x586 = Var(within=Reals,bounds=(62,65),initialize=62)
m.x587 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x588 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x589 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x590 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x591 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x592 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x593 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x594 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x595 = Var(within=Reals,bounds=(92.5,95),initialize=92.5)
m.x596 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x597 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x598 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x599 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x600 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x601 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x602 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x603 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x604 = Var(within=Reals,bounds=(105,109),initialize=105)
m.x605 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x606 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x608 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x610 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x612 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x614 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x616 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x618 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x620 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x622 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x625 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x628 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x631 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x634 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x637 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x640 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x643 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x646 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x649 = Var(within=Reals,bounds=(-100,100),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x651 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x653 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x655 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x657 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x659 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x661 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x663 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x665 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x667 = Var(within=Reals,bounds=(-125,125),initialize=0)
m.x668 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x669 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x670 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x671 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x672 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x673 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x674 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x675 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x676 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x677 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x678 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x679 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x680 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x681 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x682 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x683 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x684 = Var(within=Reals,bounds=(49,49),initialize=49)
m.x685 = Var(within=Reals,bounds=(-49,1000),initialize=0)
m.x686 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x687 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x688 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x689 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x690 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x691 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x692 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x693 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x694 = Var(within=Reals,bounds=(-65,1000),initialize=0)
m.x695 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x696 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x697 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x698 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x699 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x700 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x701 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x702 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x703 = Var(within=Reals,bounds=(-95,1000),initialize=0)
m.x704 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x705 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x706 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x707 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x708 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x709 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x710 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x711 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x712 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x713 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x714 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x715 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x716 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x717 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x718 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x719 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x720 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x721 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x722 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x723 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x724 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x725 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x726 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x727 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x728 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x729 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x730 = Var(within=Reals,bounds=(0.2,0.8),initialize=0.2)
m.x731 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x732 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x733 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x734 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x735 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x736 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x737 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x738 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x739 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x740 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x741 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x742 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x743 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x744 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x745 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x746 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x747 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x748 = Var(within=Reals,bounds=(0.25,0.5),initialize=0.25)
m.x749 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x750 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x751 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x752 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x753 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x754 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x755 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x756 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x757 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x758 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x759 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x760 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x761 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x762 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x763 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x764 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x765 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x766 = Var(within=Reals,bounds=(0.4,0.7),initialize=0.4)
m.x767 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x768 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x769 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x770 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x771 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x772 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x773 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x774 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x775 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x776 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x777 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x778 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x779 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x780 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x781 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x782 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x783 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x784 = Var(within=Reals,bounds=(0.24,0.58),initialize=0.24)
m.x785 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x786 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x787 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x788 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x789 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x790 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x791 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x792 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x793 = Var(within=Reals,bounds=(0.6,1),initialize=0.6)
m.x794 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x795 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x796 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x797 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x798 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x799 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x800 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x801 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x802 = Var(within=Reals,bounds=(0.8,1),initialize=0.8)
m.x803 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x804 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x805 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x806 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x807 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x808 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x809 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x810 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x811 = Var(within=Reals,bounds=(0.85,1),initialize=0.85)
m.x812 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x813 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x814 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x815 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x816 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x817 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x818 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x819 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x820 = Var(within=Reals,bounds=(0.7,1),initialize=0.7)
m.x821 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x822 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x823 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x824 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x825 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x826 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x827 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x828 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x829 = Var(within=Reals,bounds=(100,1000),initialize=100)
m.x830 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x835 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x836 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x841 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x850 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x851 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x856 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x861 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x866 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x867 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x868 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x869 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x870 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x871 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x872 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x873 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x874 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x875 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x876 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x877 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x878 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x879 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x880 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x881 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x882 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x883 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x884 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x885 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x886 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x887 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x888 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x889 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x890 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x891 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x892 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x893 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x894 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x895 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x896 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x897 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x898 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x899 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x900 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x901 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x902 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x903 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x904 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x905 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x906 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x907 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x908 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x909 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x910 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x911 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x912 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x913 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x914 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x915 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x916 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x917 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x918 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x919 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x920 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x921 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x922 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x923 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x924 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x925 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x926 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x927 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x928 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x929 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x930 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x931 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x932 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x933 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x934 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x935 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x936 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x937 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x938 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x939 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x940 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x941 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x942 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x943 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x944 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x945 = Var(within=Reals,bounds=(0,54.1717996137183),initialize=0)
m.x946 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x947 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x948 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x949 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x950 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x951 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x952 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x953 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x954 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x955 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x956 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x957 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x958 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x959 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x960 = Var(within=Reals,bounds=(0,126.620406999846),initialize=0)
m.x961 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x962 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x963 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x964 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x965 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x966 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x967 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x968 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x969 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x970 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x971 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x972 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x973 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x974 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x975 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x976 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x977 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x978 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x979 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x980 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x981 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x982 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x983 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x984 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x985 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x986 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x987 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x988 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x989 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x990 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x991 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x992 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x993 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x994 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x995 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x996 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x997 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x998 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x999 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1000 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x1001 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1002 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1003 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1004 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1005 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x1006 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1007 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1008 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1009 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1010 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1011 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1012 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1013 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1014 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1015 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1016 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1017 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1018 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1019 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1020 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1021 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1022 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1023 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1024 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1025 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1026 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1027 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1028 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1029 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1030 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1031 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1032 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1033 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1034 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1035 = Var(within=Reals,bounds=(0,93.045051789432),initialize=0)
m.x1036 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1037 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1038 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1039 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1040 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x1041 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1042 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1043 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1044 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1045 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x1046 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1047 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1048 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1049 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1050 = Var(within=Reals,bounds=(0,217.482203118763),initialize=0)
m.x1051 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1052 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1053 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1054 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1055 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1056 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1057 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1058 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1059 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1060 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1061 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1062 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1063 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1064 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1065 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1066 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1067 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1068 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1069 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1070 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1071 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1072 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1073 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1074 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1075 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1076 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1077 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1078 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1079 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1080 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1081 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1082 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1083 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1084 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1085 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1086 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1087 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1088 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1089 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1090 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1091 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1092 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1093 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1094 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1095 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1096 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1097 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1098 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1099 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1100 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1101 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1102 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1103 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1104 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1105 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1106 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1107 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1108 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1109 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1110 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1111 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1112 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1113 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1114 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1115 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1116 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1117 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1118 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1119 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1120 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1121 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1122 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1123 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1124 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1125 = Var(within=Reals,bounds=(0,112.384987749469),initialize=0)
m.x1126 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1127 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1128 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1129 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1130 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1131 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1132 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1133 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1134 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1135 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1136 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1137 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1138 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1139 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1140 = Var(within=Reals,bounds=(0,262.687099025355),initialize=0)
m.x1141 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1142 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1143 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1144 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1146 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1147 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1148 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1149 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1150 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1151 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1152 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1153 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1154 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1155 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1156 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1157 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1158 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1159 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1160 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1161 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1162 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1163 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1164 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1165 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1166 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1167 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1168 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1169 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1170 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1171 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1172 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1174 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1175 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1176 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1177 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1178 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1179 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1180 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1182 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1183 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1184 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1185 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1186 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1187 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1188 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1189 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1190 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1191 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1192 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1193 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1194 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1195 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1196 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1197 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1198 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1199 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1200 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1201 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1202 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1203 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1204 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1205 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1206 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1207 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1208 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1209 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1210 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1211 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1212 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1213 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1214 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1215 = Var(within=Reals,bounds=(0,42.066542469172),initialize=0)
m.x1216 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1217 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1218 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1219 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1220 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1221 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1222 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1223 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1224 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1225 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1226 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1227 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1228 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1229 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1230 = Var(within=Reals,bounds=(0,98.325748203019),initialize=0)
m.x1231 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1232 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1233 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1234 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x1235 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1236 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1237 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1238 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1239 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1240 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1241 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1242 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1243 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1244 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1245 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1246 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1247 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1248 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1249 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1250 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1251 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1252 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1253 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1254 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1255 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1256 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1257 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1258 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1259 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1260 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1261 = Var(within=Reals,bounds=(0,25),initialize=0)
m.x1262 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1263 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1264 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1265 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1266 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1267 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1268 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1269 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1270 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1271 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1272 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1273 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1274 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1275 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1276 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1277 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1278 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1279 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1280 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1281 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1282 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1283 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1284 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1285 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1286 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1287 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1288 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1289 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1290 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1291 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1292 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1293 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1294 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1295 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1296 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1297 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1298 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1299 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1300 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1301 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1302 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1303 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1304 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1305 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1306 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1307 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1308 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1309 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1310 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1311 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1312 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1313 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1314 = Var(within=Reals,bounds=(0,0.64),initialize=0)
m.x1315 = Var(within=Reals,bounds=(0,0.512),initialize=0)
m.x1316 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1317 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1318 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1319 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1320 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1321 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1322 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1323 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1324 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1325 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1326 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1327 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1328 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1329 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1330 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1331 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1332 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1333 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1334 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1335 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1336 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1337 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1338 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1339 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1340 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1341 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1342 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1343 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1344 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1345 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1346 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1347 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1348 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1349 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1350 = Var(within=Reals,bounds=(0,0.25),initialize=0)
m.x1351 = Var(within=Reals,bounds=(0,0.125),initialize=0)
m.x1352 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1353 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1354 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1355 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1356 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1357 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1358 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1359 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1360 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1361 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1362 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1363 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1364 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1365 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1366 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1367 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1368 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1369 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1370 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1371 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1372 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1373 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1374 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1375 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1376 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1377 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1378 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1379 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1380 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1381 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1382 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1383 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1384 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1385 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1386 = Var(within=Reals,bounds=(0,0.49),initialize=0)
m.x1387 = Var(within=Reals,bounds=(0,0.343),initialize=0)
m.x1388 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1389 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1390 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1391 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1392 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1393 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1394 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1395 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1396 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1397 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1398 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1399 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1400 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1401 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1402 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1403 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1404 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1405 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1406 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1407 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1408 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1409 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1410 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1411 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1412 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1413 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1414 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1415 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1416 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1417 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1418 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1419 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1420 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1421 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1422 = Var(within=Reals,bounds=(0,0.3364),initialize=0)
m.x1423 = Var(within=Reals,bounds=(0,0.195112),initialize=0)
m.x1424 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1425 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1426 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1427 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1428 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1429 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1430 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1431 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1432 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1433 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1434 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1435 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1436 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1437 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1438 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1439 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1440 = Var(within=Reals,bounds=(0.36,1),initialize=0.36)
m.x1441 = Var(within=Reals,bounds=(0.216,1),initialize=0.216)
m.x1442 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1443 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1444 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1445 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1446 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1447 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1448 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1449 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1450 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1451 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1452 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1453 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1454 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1455 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1456 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1457 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1458 = Var(within=Reals,bounds=(0.64,1),initialize=0.64)
m.x1459 = Var(within=Reals,bounds=(0.512,1),initialize=0.512)
m.x1460 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1461 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1462 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1463 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1464 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1465 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1466 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1467 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1468 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1469 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1470 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1471 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1472 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1473 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1474 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1475 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1476 = Var(within=Reals,bounds=(0.7225,1),initialize=0.7225)
m.x1477 = Var(within=Reals,bounds=(0.614125,1),initialize=0.614125)
m.x1478 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1479 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1480 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1481 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1482 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1483 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1484 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1485 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1486 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1487 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1488 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1489 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1490 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1491 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1492 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1493 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.x1494 = Var(within=Reals,bounds=(0.49,1),initialize=0.49)
m.x1495 = Var(within=Reals,bounds=(0.343,1),initialize=0.343)
m.obj = Objective(expr= m.x834 + m.x835 + m.x840 + m.x849 + m.x850 + m.x855 + m.x860 + m.x865 + m.x870 + m.x879
+ m.x880 + m.x885 + m.x890 + m.x895 + m.x900 + m.x905 + m.x914 + m.x915 + m.x920 + m.x925
+ m.x930 + m.x935 + m.x940 + m.x945 + m.x952 + m.x959 + m.x960 + m.x965 + m.x970 + m.x978
+ m.x980 + m.x985 + m.x990 + m.x995 + m.x1000 + m.x1005 + m.x1010 + m.x1015 + m.x1020 + m.x1025
+ m.x1030 + m.x1035 + m.x1040 + m.x1045 + m.x1050 + m.x1055 + m.x1060 + m.x1065 + m.x1070
+ m.x1075 + m.x1080 + m.x1085 + m.x1090 + m.x1095 + m.x1100 + m.x1105 + m.x1110 + m.x1115
+ m.x1120 + m.x1125 + m.x1130 + m.x1135 + m.x1140 + m.x1149 + m.x1150 + m.x1155 + m.x1160
+ m.x1165 + m.x1170 + m.x1175 + m.x1180 + m.x1185 + m.x1194 + m.x1195 + m.x1200 + m.x1205
+ m.x1210 + m.x1215 + m.x1220 + m.x1225 + m.x1230, sense=minimize)
m.c2 = Constraint(expr= m.x417 + 37.5407324*m.x419 - 57.2814121*m.x421 + 27.42831624*m.x423 == 0)
m.c3 = Constraint(expr= m.x425 + 27.42831624*m.x427 - 57.2814121*m.x429 + 37.5407324*m.x431 == 0)
m.c4 = Constraint(expr= m.x433 - 57.2814121*m.x435 + 37.5407324*m.x437 + 27.42831624*m.x439 == 0)
m.c5 = Constraint(expr= m.x441 - 57.2814121*m.x443 + 37.5407324*m.x445 + 27.42831624*m.x447 == 0)
m.c6 = Constraint(expr= m.x449 - 57.2814121*m.x451 + 37.5407324*m.x453 + 27.42831624*m.x455 == 0)
m.c7 = Constraint(expr= m.x457 + 27.42831624*m.x459 - 57.2814121*m.x461 + 37.5407324*m.x463 == 0)
m.c8 = Constraint(expr= m.x465 - 57.2814121*m.x467 + 27.42831624*m.x469 + 37.5407324*m.x471 == 0)
m.c9 = Constraint(expr= m.x473 - 57.2814121*m.x475 + 37.5407324*m.x477 + 27.42831624*m.x479 == 0)
m.c10 = Constraint(expr= m.x481 + 27.42831624*m.x483 - 57.2814121*m.x485 + 37.5407324*m.x487 == 0)
m.c11 = Constraint(expr= - 57.2814121*m.x421 + m.x489 + 37.5407324*m.x491 + 27.42831624*m.x493 == 0)
m.c12 = Constraint(expr= - 57.2814121*m.x429 + m.x495 + 37.5407324*m.x497 + 27.42831624*m.x499 == 0)
m.c13 = Constraint(expr= - 57.2814121*m.x435 + m.x501 + 37.5407324*m.x503 + 27.42831624*m.x505 == 0)
m.c14 = Constraint(expr= - 57.2814121*m.x443 + m.x507 + 37.5407324*m.x509 + 27.42831624*m.x511 == 0)
m.c15 = Constraint(expr= - 57.2814121*m.x451 + m.x513 + 27.42831624*m.x515 + 37.5407324*m.x517 == 0)
m.c16 = Constraint(expr= - 57.2814121*m.x461 + m.x519 + 37.5407324*m.x521 + 27.42831624*m.x523 == 0)
m.c17 = Constraint(expr= - 57.2814121*m.x467 + m.x525 + 37.5407324*m.x527 + 27.42831624*m.x529 == 0)
m.c18 = Constraint(expr= - 57.2814121*m.x475 + m.x531 + 27.42831624*m.x533 + 37.5407324*m.x535 == 0)
m.c19 = Constraint(expr= - 57.2814121*m.x485 + m.x537 + 37.5407324*m.x539 + 27.42831624*m.x541 == 0)
m.c20 = Constraint(expr= - 57.2814121*m.x421 + m.x543 + 37.5407324*m.x545 + 27.42831624*m.x547 == 0)
m.c21 = Constraint(expr= - 57.2814121*m.x429 + m.x549 + 27.42831624*m.x551 + 37.5407324*m.x553 == 0)
m.c22 = Constraint(expr= - 57.2814121*m.x435 + m.x555 + 27.42831624*m.x557 + 37.5407324*m.x559 == 0)
m.c23 = Constraint(expr= - 57.2814121*m.x443 + m.x561 + 37.5407324*m.x563 + 27.42831624*m.x565 == 0)
m.c24 = Constraint(expr= - 57.2814121*m.x451 + m.x567 + 27.42831624*m.x569 + 37.5407324*m.x571 == 0)
m.c25 = Constraint(expr= - 57.2814121*m.x461 + m.x573 + 27.42831624*m.x575 + 37.5407324*m.x577 == 0)
m.c26 = Constraint(expr= m.x83 + 37.5407324*m.x84 + 27.42831624*m.x85 - 57.2814121*m.x467 == 0)
m.c27 = Constraint(expr= m.x86 + 27.42831624*m.x87 + 37.5407324*m.x88 - 57.2814121*m.x475 == 0)
m.c28 = Constraint(expr= m.x89 + 37.5407324*m.x90 + 27.42831624*m.x91 - 57.2814121*m.x485 == 0)
m.c29 = Constraint(expr= m.x92 + 43.14087708*m.x93 - 76.45219958*m.x94 + 50.37356589*m.x95 == 0)
m.c30 = Constraint(expr= m.x96 + 50.37356589*m.x97 - 76.45219958*m.x98 + 43.14087708*m.x99 == 0)
m.c31 = Constraint(expr= m.x100 + 43.14087708*m.x101 - 76.45219958*m.x102 + 50.37356589*m.x103 == 0)
m.c32 = Constraint(expr= m.x104 + 43.14087708*m.x105 - 76.45219958*m.x106 + 50.37356589*m.x107 == 0)
m.c33 = Constraint(expr= m.x108 - 76.45219958*m.x109 + 50.37356589*m.x110 + 43.14087708*m.x111 == 0)
m.c34 = Constraint(expr= m.x112 + 50.37356589*m.x113 - 76.45219958*m.x114 + 43.14087708*m.x115 == 0)
m.c35 = Constraint(expr= m.x116 + 43.14087708*m.x117 - 76.45219958*m.x118 + 50.37356589*m.x119 == 0)
m.c36 = Constraint(expr= m.x120 + 43.14087708*m.x121 - 76.45219958*m.x122 + 50.37356589*m.x123 == 0)
m.c37 = Constraint(expr= m.x124 + 50.37356589*m.x125 + 43.14087708*m.x126 - 76.45219958*m.x127 == 0)
m.c38 = Constraint(expr= - 76.45219958*m.x94 + m.x128 + 43.14087708*m.x129 + 50.37356589*m.x130 == 0)
m.c39 = Constraint(expr= - 76.45219958*m.x98 + m.x131 + 50.37356589*m.x132 + 43.14087708*m.x133 == 0)
m.c40 = Constraint(expr= - 76.45219958*m.x102 + m.x134 + 43.14087708*m.x135 + 50.37356589*m.x136 == 0)
m.c41 = Constraint(expr= - 76.45219958*m.x106 + m.x137 + 43.14087708*m.x138 + 50.37356589*m.x139 == 0)
m.c42 = Constraint(expr= - 76.45219958*m.x109 + m.x140 + 50.37356589*m.x141 + 43.14087708*m.x142 == 0)
m.c43 = Constraint(expr= - 76.45219958*m.x114 + m.x143 + 50.37356589*m.x144 + 43.14087708*m.x145 == 0)
m.c44 = Constraint(expr= - 76.45219958*m.x118 + m.x146 + 43.14087708*m.x147 + 50.37356589*m.x148 == 0)
m.c45 = Constraint(expr= - 76.45219958*m.x122 + m.x149 + 43.14087708*m.x150 + 50.37356589*m.x151 == 0)
m.c46 = Constraint(expr= - 76.45219958*m.x127 + m.x152 + 50.37356589*m.x153 + 43.14087708*m.x154 == 0)
m.c47 = Constraint(expr= m.x155 - 69.39622571*m.x156 + 58.31011875*m.x157 - 25.39911174*m.x158 == 0)
m.c48 = Constraint(expr= m.x159 + 58.31011875*m.x160 - 25.39911174*m.x161 - 69.39622571*m.x162 == 0)
m.c49 = Constraint(expr= m.x163 - 25.39911174*m.x164 - 69.39622571*m.x165 + 58.31011875*m.x166 == 0)
m.c50 = Constraint(expr= m.x167 - 25.39911174*m.x168 - 69.39622571*m.x169 + 58.31011875*m.x170 == 0)
m.c51 = Constraint(expr= m.x171 - 69.39622571*m.x172 + 58.31011875*m.x173 - 25.39911174*m.x174 == 0)
m.c52 = Constraint(expr= m.x175 - 25.39911174*m.x176 - 69.39622571*m.x177 + 58.31011875*m.x178 == 0)
m.c53 = Constraint(expr= m.x179 - 69.39622571*m.x180 + 58.31011875*m.x181 - 25.39911174*m.x182 == 0)
m.c54 = Constraint(expr= m.x183 - 69.39622571*m.x184 + 58.31011875*m.x185 - 25.39911174*m.x186 == 0)
m.c55 = Constraint(expr= m.x187 - 69.39622571*m.x188 + 58.31011875*m.x189 - 25.39911174*m.x190 == 0)
m.c56 = Constraint(expr= - 69.39622571*m.x156 + m.x191 - 25.39911174*m.x192 + 58.31011875*m.x193 == 0)
m.c57 = Constraint(expr= - 69.39622571*m.x162 + m.x194 - 25.39911174*m.x195 + 58.31011875*m.x196 == 0)
m.c58 = Constraint(expr= - 69.39622571*m.x165 + m.x197 + 58.31011875*m.x198 - 25.39911174*m.x199 == 0)
m.c59 = Constraint(expr= - 69.39622571*m.x169 + m.x200 + 58.31011875*m.x201 - 25.39911174*m.x202 == 0)
m.c60 = Constraint(expr= - 69.39622571*m.x172 + m.x203 + 58.31011875*m.x204 - 25.39911174*m.x205 == 0)
m.c61 = Constraint(expr= - 69.39622571*m.x177 + m.x206 - 25.39911174*m.x207 + 58.31011875*m.x208 == 0)
m.c62 = Constraint(expr= - 69.39622571*m.x180 + m.x209 - 25.39911174*m.x210 + 58.31011875*m.x211 == 0)
m.c63 = Constraint(expr= - 69.39622571*m.x184 + m.x212 - 25.39911174*m.x213 + 58.31011875*m.x214 == 0)
m.c64 = Constraint(expr= - 69.39622571*m.x188 + m.x215 + 58.31011875*m.x216 - 25.39911174*m.x217 == 0)
m.c65 = Constraint(expr= m.x218 - 34.92732674*m.x219 - 2.03724124*m.x220 + 63.61644904*m.x221 == 0)
m.c66 = Constraint(expr= m.x222 + 63.61644904*m.x223 - 34.92732674*m.x224 - 2.03724124*m.x225 == 0)
m.c67 = Constraint(expr= m.x226 - 34.92732674*m.x227 - 2.03724124*m.x228 + 63.61644904*m.x229 == 0)
m.c68 = Constraint(expr= m.x230 + 63.61644904*m.x231 - 2.03724124*m.x232 - 34.92732674*m.x233 == 0)
m.c69 = Constraint(expr= m.x234 + 63.61644904*m.x235 - 34.92732674*m.x236 - 2.03724124*m.x237 == 0)
m.c70 = Constraint(expr= m.x238 + 63.61644904*m.x239 - 34.92732674*m.x240 - 2.03724124*m.x241 == 0)
m.c71 = Constraint(expr= m.x242 - 2.03724124*m.x243 - 34.92732674*m.x244 + 63.61644904*m.x245 == 0)
m.c72 = Constraint(expr= m.x246 + 63.61644904*m.x247 - 34.92732674*m.x248 - 2.03724124*m.x249 == 0)
m.c73 = Constraint(expr= m.x250 - 34.92732674*m.x251 - 2.03724124*m.x252 + 63.61644904*m.x253 == 0)
m.c74 = Constraint(expr= - 34.92732674*m.x219 + m.x254 + 63.61644904*m.x255 - 2.03724124*m.x256 == 0)
m.c75 = Constraint(expr= - 34.92732674*m.x224 + m.x257 + 63.61644904*m.x258 - 2.03724124*m.x259 == 0)
m.c76 = Constraint(expr= - 34.92732674*m.x227 + m.x260 - 2.03724124*m.x261 + 63.61644904*m.x262 == 0)
m.c77 = Constraint(expr= - 34.92732674*m.x233 + m.x263 - 2.03724124*m.x264 + 63.61644904*m.x265 == 0)
m.c78 = Constraint(expr= - 34.92732674*m.x236 + m.x266 + 63.61644904*m.x267 - 2.03724124*m.x268 == 0)
m.c79 = Constraint(expr= - 34.92732674*m.x240 + m.x269 + 63.61644904*m.x270 - 2.03724124*m.x271 == 0)
m.c80 = Constraint(expr= - 34.92732674*m.x244 + m.x272 - 2.03724124*m.x273 + 63.61644904*m.x274 == 0)
m.c81 = Constraint(expr= - 34.92732674*m.x248 + m.x275 - 2.03724124*m.x276 + 63.61644904*m.x277 == 0)
m.c82 = Constraint(expr= - 34.92732674*m.x251 + m.x278 + 63.61644904*m.x279 - 2.03724124*m.x280 == 0)
m.c83 = Constraint(expr= m.x281 + m.x282 + m.x283 + m.x284 + m.x285 + m.x286 + m.x287 + m.x288 + m.x289
>= 3.723333333)
m.c84 = Constraint(expr= - m.x290 + m.x291 == 0)
m.c85 = Constraint(expr= - m.x292 + m.x293 == 0)
m.c86 = Constraint(expr= - m.x294 + m.x295 == 0)
m.c87 = Constraint(expr= - m.x296 + m.x297 == 0)
m.c88 = Constraint(expr= - m.x298 + m.x299 == 0)
m.c89 = Constraint(expr= - m.x300 + m.x301 == 0)
m.c90 = Constraint(expr= - m.x302 + m.x303 == 0)
m.c91 = Constraint(expr= - m.x304 + m.x305 == 0)
m.c92 = Constraint(expr= - m.x306 + m.x307 == 0)
m.c93 = Constraint(expr= - m.x308 + m.x309 == 0)
m.c94 = Constraint(expr= - m.x310 + m.x311 == 0)
m.c95 = Constraint(expr= - m.x312 + m.x313 == 0)
m.c96 = Constraint(expr= - m.x314 + m.x315 == 0)
m.c97 = Constraint(expr= - m.x316 + m.x317 == 0)
m.c98 = Constraint(expr= - m.x318 + m.x319 == 0)
m.c99 = Constraint(expr= - m.x320 + m.x321 == 0)
m.c100 = Constraint(expr= - m.x322 + m.x323 == 0)
m.c101 = Constraint(expr= - m.x324 + m.x325 == 0)
m.c102 = Constraint(expr= m.x308 - m.x326 == 0)
m.c103 = Constraint(expr= m.x310 - m.x327 == 0)
m.c104 = Constraint(expr= m.x312 - m.x328 == 0)
m.c105 = Constraint(expr= m.x314 - m.x329 == 0)
m.c106 = Constraint(expr= m.x316 - m.x330 == 0)
m.c107 = Constraint(expr= m.x318 - m.x331 == 0)
m.c108 = Constraint(expr= m.x320 - m.x332 == 0)
m.c109 = Constraint(expr= m.x322 - m.x333 == 0)
m.c110 = Constraint(expr= m.x324 - m.x334 == 0)
m.c111 = Constraint(expr= - m.x335 + m.x336 == 0)
m.c112 = Constraint(expr= - m.x337 + m.x338 == 0)
m.c113 = Constraint(expr= - m.x339 + m.x340 == 0)
m.c114 = Constraint(expr= - m.x341 + m.x342 == 0)
m.c115 = Constraint(expr= - m.x343 + m.x344 == 0)
m.c116 = Constraint(expr= - m.x345 + m.x346 == 0)
m.c117 = Constraint(expr= - m.x347 + m.x348 == 0)
m.c118 = Constraint(expr= - m.x349 + m.x350 == 0)
m.c119 = Constraint(expr= - m.x351 + m.x352 == 0)
m.c120 = Constraint(expr= m.x353 == 0.296666667)
m.c121 = Constraint(expr= m.x354 == 0.294444444)
m.c122 = Constraint(expr= m.x355 == 0.283888889)
m.c123 = Constraint(expr= m.x356 == 0.277222222)
m.c124 = Constraint(expr= m.x357 == 0.293333333)
m.c125 = Constraint(expr= m.x358 == 0.306944444)
m.c126 = Constraint(expr= m.x359 == 0.595555556)
m.c127 = Constraint(expr= m.x360 == 0.641388889)
m.c128 = Constraint(expr= m.x361 == 0.733888889)
m.c129 = Constraint(expr= m.x281 - m.x291 == 0)
m.c130 = Constraint(expr= m.x282 - m.x293 == 0)
m.c131 = Constraint(expr= m.x283 - m.x295 == 0)
m.c132 = Constraint(expr= m.x284 - m.x297 == 0)
m.c133 = Constraint(expr= m.x285 - m.x299 == 0)
m.c134 = Constraint(expr= m.x286 - m.x301 == 0)
m.c135 = Constraint(expr= m.x287 - m.x303 == 0)
m.c136 = Constraint(expr= m.x288 - m.x305 == 0)
m.c137 = Constraint(expr= m.x289 - m.x307 == 0)
m.c138 = Constraint(expr= 3600*m.x290 - 3600*m.x309 + 1800*m.x362 - 1800*m.x363 == 0)
m.c139 = Constraint(expr= 3600*m.x292 - 3600*m.x311 + 1800*m.x364 - 1800*m.x365 == 0)
m.c140 = Constraint(expr= 3600*m.x294 - 3600*m.x313 + 1800*m.x366 - 1800*m.x367 == 0)
m.c141 = Constraint(expr= 3600*m.x296 - 3600*m.x315 + 1800*m.x368 - 1800*m.x369 == 0)
m.c142 = Constraint(expr= 3600*m.x298 - 3600*m.x317 + 1800*m.x370 - 1800*m.x371 == 0)
m.c143 = Constraint(expr= 3600*m.x300 - 3600*m.x319 + 1800*m.x372 - 1800*m.x373 == 0)
m.c144 = Constraint(expr= 3600*m.x302 - 3600*m.x321 + 1800*m.x374 - 1800*m.x375 == 0)
m.c145 = Constraint(expr= 3600*m.x304 - 3600*m.x323 + 1800*m.x376 - 1800*m.x377 == 0)
m.c146 = Constraint(expr= 3600*m.x306 - 3600*m.x325 + 1800*m.x378 - 1800*m.x379 == 0)
m.c147 = Constraint(expr= 3600*m.x326 - 3600*m.x336 + 720*m.x380 - 720*m.x381 == 0)
m.c148 = Constraint(expr= 3600*m.x327 - 3600*m.x338 + 720*m.x382 - 720*m.x383 == 0)
m.c149 = Constraint(expr= 3600*m.x328 - 3600*m.x340 + 720*m.x384 - 720*m.x385 == 0)
m.c150 = Constraint(expr= 3600*m.x329 - 3600*m.x342 + 720*m.x386 - 720*m.x387 == 0)
m.c151 = Constraint(expr= 3600*m.x330 - 3600*m.x344 + 720*m.x388 - 720*m.x389 == 0)
m.c152 = Constraint(expr= 3600*m.x331 - 3600*m.x346 + 720*m.x390 - 720*m.x391 == 0)
m.c153 = Constraint(expr= 3600*m.x332 - 3600*m.x348 + 720*m.x392 - 720*m.x393 == 0)
m.c154 = Constraint(expr= 3600*m.x333 - 3600*m.x350 + 720*m.x394 - 720*m.x395 == 0)
m.c155 = Constraint(expr= 3600*m.x334 - 3600*m.x352 + 720*m.x396 - 720*m.x397 == 0)
m.c156 = Constraint(expr= 3600*m.x335 - 3600*m.x353 + 1600*m.x398 - 1600*m.x399 == 0)
m.c157 = Constraint(expr= 3600*m.x337 - 3600*m.x354 + 1600*m.x400 - 1600*m.x401 == 0)
m.c158 = Constraint(expr= 3600*m.x339 - 3600*m.x355 + 1600*m.x402 - 1600*m.x403 == 0)
m.c159 = Constraint(expr= 3600*m.x341 - 3600*m.x356 + 1600*m.x404 - 1600*m.x405 == 0)
m.c160 = Constraint(expr= 3600*m.x343 - 3600*m.x357 + 1600*m.x406 - 1600*m.x407 == 0)
m.c161 = Constraint(expr= 3600*m.x345 - 3600*m.x358 + 1600*m.x408 - 1600*m.x409 == 0)
m.c162 = Constraint(expr= 3600*m.x347 - 3600*m.x359 + 1600*m.x410 - 1600*m.x411 == 0)
m.c163 = Constraint(expr= 3600*m.x349 - 3600*m.x360 + 1600*m.x412 - 1600*m.x413 == 0)
m.c164 = Constraint(expr= 3600*m.x351 - 3600*m.x361 + 1600*m.x414 - 1600*m.x415 == 0)
m.c165 = Constraint(expr= - m.x363 + m.x364 == 0)
m.c166 = Constraint(expr= - m.x365 + m.x366 == 0)
m.c167 = Constraint(expr= - m.x367 + m.x368 == 0)
m.c168 = Constraint(expr= - m.x369 + m.x370 == 0)
m.c169 = Constraint(expr= - m.x371 + m.x372 == 0)
m.c170 = Constraint(expr= - m.x373 + m.x374 == 0)
m.c171 = Constraint(expr= - m.x375 + m.x376 == 0)
m.c172 = Constraint(expr= - m.x377 + m.x378 == 0)
m.c173 = Constraint(expr= - m.x381 + m.x382 == 0)
m.c174 = Constraint(expr= - m.x383 + m.x384 == 0)
m.c175 = Constraint(expr= - m.x385 + m.x386 == 0)
m.c176 = Constraint(expr= - m.x387 + m.x388 == 0)
m.c177 = Constraint(expr= - m.x389 + m.x390 == 0)
m.c178 = Constraint(expr= - m.x391 + m.x392 == 0)
m.c179 = Constraint(expr= - m.x393 + m.x394 == 0)
m.c180 = Constraint(expr= - m.x395 + m.x396 == 0)
m.c181 = Constraint(expr= - m.x399 + m.x400 == 0)
m.c182 = Constraint(expr= - m.x401 + m.x402 == 0)
m.c183 = Constraint(expr= - m.x403 + m.x404 == 0)
m.c184 = Constraint(expr= - m.x405 + m.x406 == 0)
m.c185 = Constraint(expr= - m.x407 + m.x408 == 0)
m.c186 = Constraint(expr= - m.x409 + m.x410 == 0)
m.c187 = Constraint(expr= - m.x411 + m.x412 == 0)
m.c188 = Constraint(expr= - m.x413 + m.x414 == 0)
m.c189 = Constraint(expr= - 0.2*m.b2 + m.x416 >= 0)
m.c190 = Constraint(expr= - 0.2*m.b3 + m.x418 >= 0)
m.c191 = Constraint(expr= - 0.2*m.b4 + m.x420 >= 0)
m.c192 = Constraint(expr= - 0.2*m.b5 + m.x422 >= 0)
m.c193 = Constraint(expr= - 0.2*m.b6 + m.x424 >= 0)
m.c194 = Constraint(expr= - 0.2*m.b7 + m.x426 >= 0)
m.c195 = Constraint(expr= - 0.2*m.b8 + m.x428 >= 0)
m.c196 = Constraint(expr= - 0.2*m.b9 + m.x430 >= 0)
m.c197 = Constraint(expr= - 0.2*m.b10 + m.x432 >= 0)
m.c198 = Constraint(expr= - 0.2*m.b11 + m.x434 >= 0)
m.c199 = Constraint(expr= - 0.2*m.b12 + m.x436 >= 0)
m.c200 = Constraint(expr= - 0.2*m.b13 + m.x438 >= 0)
m.c201 = Constraint(expr= - 0.2*m.b14 + m.x440 >= 0)
m.c202 = Constraint(expr= - 0.2*m.b15 + m.x442 >= 0)
m.c203 = Constraint(expr= - 0.2*m.b16 + m.x444 >= 0)
m.c204 = Constraint(expr= - 0.2*m.b17 + m.x446 >= 0)
m.c205 = Constraint(expr= - 0.2*m.b18 + m.x448 >= 0)
m.c206 = Constraint(expr= - 0.2*m.b19 + m.x450 >= 0)
m.c207 = Constraint(expr= - 0.2*m.b20 + m.x452 >= 0)
m.c208 = Constraint(expr= - 0.2*m.b21 + m.x454 >= 0)
m.c209 = Constraint(expr= - 0.2*m.b22 + m.x456 >= 0)
m.c210 = Constraint(expr= - 0.2*m.b23 + m.x458 >= 0)
m.c211 = Constraint(expr= - 0.2*m.b24 + m.x460 >= 0)
m.c212 = Constraint(expr= - 0.2*m.b25 + m.x462 >= 0)
m.c213 = Constraint(expr= - 0.2*m.b26 + m.x464 >= 0)
m.c214 = Constraint(expr= - 0.2*m.b27 + m.x466 >= 0)
m.c215 = Constraint(expr= - 0.2*m.b28 + m.x468 >= 0)
m.c216 = Constraint(expr= - 0.25*m.b29 + m.x470 >= 0)
m.c217 = Constraint(expr= - 0.25*m.b30 + m.x472 >= 0)
m.c218 = Constraint(expr= - 0.25*m.b31 + m.x474 >= 0)
m.c219 = Constraint(expr= - 0.25*m.b32 + m.x476 >= 0)
m.c220 = Constraint(expr= - 0.25*m.b33 + m.x478 >= 0)
m.c221 = Constraint(expr= - 0.25*m.b34 + m.x480 >= 0)
m.c222 = Constraint(expr= - 0.25*m.b35 + m.x482 >= 0)
m.c223 = Constraint(expr= - 0.25*m.b36 + m.x484 >= 0)
m.c224 = Constraint(expr= - 0.25*m.b37 + m.x486 >= 0)
m.c225 = Constraint(expr= - 0.25*m.b38 + m.x488 >= 0)
m.c226 = Constraint(expr= - 0.25*m.b39 + m.x490 >= 0)
m.c227 = Constraint(expr= - 0.25*m.b40 + m.x492 >= 0)
m.c228 = Constraint(expr= - 0.25*m.b41 + m.x494 >= 0)
m.c229 = Constraint(expr= - 0.25*m.b42 + m.x496 >= 0)
m.c230 = Constraint(expr= - 0.25*m.b43 + m.x498 >= 0)
m.c231 = Constraint(expr= - 0.25*m.b44 + m.x500 >= 0)
m.c232 = Constraint(expr= - 0.25*m.b45 + m.x502 >= 0)
m.c233 = Constraint(expr= - 0.25*m.b46 + m.x504 >= 0)
m.c234 = Constraint(expr= - 0.4*m.b47 + m.x506 >= 0)
m.c235 = Constraint(expr= - 0.4*m.b48 + m.x508 >= 0)
m.c236 = Constraint(expr= - 0.4*m.b49 + m.x510 >= 0)
m.c237 = Constraint(expr= - 0.4*m.b50 + m.x512 >= 0)
m.c238 = Constraint(expr= - 0.4*m.b51 + m.x514 >= 0)
m.c239 = Constraint(expr= - 0.4*m.b52 + m.x516 >= 0)
m.c240 = Constraint(expr= - 0.4*m.b53 + m.x518 >= 0)
m.c241 = Constraint(expr= - 0.4*m.b54 + m.x520 >= 0)
m.c242 = Constraint(expr= - 0.4*m.b55 + m.x522 >= 0)
m.c243 = Constraint(expr= - 0.4*m.b56 + m.x524 >= 0)
m.c244 = Constraint(expr= - 0.4*m.b57 + m.x526 >= 0)
m.c245 = Constraint(expr= - 0.4*m.b58 + m.x528 >= 0)
m.c246 = Constraint(expr= - 0.4*m.b59 + m.x530 >= 0)
m.c247 = Constraint(expr= - 0.4*m.b60 + m.x532 >= 0)
m.c248 = Constraint(expr= - 0.4*m.b61 + m.x534 >= 0)
m.c249 = Constraint(expr= - 0.4*m.b62 + m.x536 >= 0)
m.c250 = Constraint(expr= - 0.4*m.b63 + m.x538 >= 0)
m.c251 = Constraint(expr= - 0.4*m.b64 + m.x540 >= 0)
m.c252 = Constraint(expr= - 0.24*m.b65 + m.x542 >= 0)
m.c253 = Constraint(expr= - 0.24*m.b66 + m.x544 >= 0)
m.c254 = Constraint(expr= - 0.24*m.b67 + m.x546 >= 0)
m.c255 = Constraint(expr= - 0.24*m.b68 + m.x548 >= 0)
m.c256 = Constraint(expr= - 0.24*m.b69 + m.x550 >= 0)
m.c257 = Constraint(expr= - 0.24*m.b70 + m.x552 >= 0)
m.c258 = Constraint(expr= - 0.24*m.b71 + m.x554 >= 0)
m.c259 = Constraint(expr= - 0.24*m.b72 + m.x556 >= 0)
m.c260 = Constraint(expr= - 0.24*m.b73 + m.x558 >= 0)
m.c261 = Constraint(expr= - 0.24*m.b74 + m.x560 >= 0)
m.c262 = Constraint(expr= - 0.24*m.b75 + m.x562 >= 0)
m.c263 = Constraint(expr= - 0.24*m.b76 + m.x564 >= 0)
m.c264 = Constraint(expr= - 0.24*m.b77 + m.x566 >= 0)
m.c265 = Constraint(expr= - 0.24*m.b78 + m.x568 >= 0)
m.c266 = Constraint(expr= - 0.24*m.b79 + m.x570 >= 0)
m.c267 = Constraint(expr= - 0.24*m.b80 + m.x572 >= 0)
m.c268 = Constraint(expr= - 0.24*m.b81 + m.x574 >= 0)
m.c269 = Constraint(expr= - 0.24*m.b82 + m.x576 >= 0)
m.c270 = Constraint(expr= - 0.8*m.b2 + m.x416 <= 0)
m.c271 = Constraint(expr= - 0.8*m.b3 + m.x418 <= 0)
m.c272 = Constraint(expr= - 0.8*m.b4 + m.x420 <= 0)
m.c273 = Constraint(expr= - 0.8*m.b5 + m.x422 <= 0)
m.c274 = Constraint(expr= - 0.8*m.b6 + m.x424 <= 0)
m.c275 = Constraint(expr= - 0.8*m.b7 + m.x426 <= 0)
m.c276 = Constraint(expr= - 0.8*m.b8 + m.x428 <= 0)
m.c277 = Constraint(expr= - 0.8*m.b9 + m.x430 <= 0)
m.c278 = Constraint(expr= - 0.8*m.b10 + m.x432 <= 0)
m.c279 = Constraint(expr= - 0.8*m.b11 + m.x434 <= 0)
m.c280 = Constraint(expr= - 0.8*m.b12 + m.x436 <= 0)
m.c281 = Constraint(expr= - 0.8*m.b13 + m.x438 <= 0)
m.c282 = Constraint(expr= - 0.8*m.b14 + m.x440 <= 0)
m.c283 = Constraint(expr= - 0.8*m.b15 + m.x442 <= 0)
m.c284 = Constraint(expr= - 0.8*m.b16 + m.x444 <= 0)
m.c285 = Constraint(expr= - 0.8*m.b17 + m.x446 <= 0)
m.c286 = Constraint(expr= - 0.8*m.b18 + m.x448 <= 0)
m.c287 = Constraint(expr= - 0.8*m.b19 + m.x450 <= 0)
m.c288 = Constraint(expr= - 0.8*m.b20 + m.x452 <= 0)
m.c289 = Constraint(expr= - 0.8*m.b21 + m.x454 <= 0)
m.c290 = Constraint(expr= - 0.8*m.b22 + m.x456 <= 0)
m.c291 = Constraint(expr= - 0.8*m.b23 + m.x458 <= 0)
m.c292 = Constraint(expr= - 0.8*m.b24 + m.x460 <= 0)
m.c293 = Constraint(expr= - 0.8*m.b25 + m.x462 <= 0)
m.c294 = Constraint(expr= - 0.8*m.b26 + m.x464 <= 0)
m.c295 = Constraint(expr= - 0.8*m.b27 + m.x466 <= 0)
m.c296 = Constraint(expr= - 0.8*m.b28 + m.x468 <= 0)
m.c297 = Constraint(expr= - 0.5*m.b29 + m.x470 <= 0)
m.c298 = Constraint(expr= - 0.5*m.b30 + m.x472 <= 0)
m.c299 = Constraint(expr= - 0.5*m.b31 + m.x474 <= 0)
m.c300 = Constraint(expr= - 0.5*m.b32 + m.x476 <= 0)
m.c301 = Constraint(expr= - 0.5*m.b33 + m.x478 <= 0)
m.c302 = Constraint(expr= - 0.5*m.b34 + m.x480 <= 0)
m.c303 = Constraint(expr= - 0.5*m.b35 + m.x482 <= 0)
m.c304 = Constraint(expr= - 0.5*m.b36 + m.x484 <= 0)
m.c305 = Constraint(expr= - 0.5*m.b37 + m.x486 <= 0)
m.c306 = Constraint(expr= - 0.5*m.b38 + m.x488 <= 0)
m.c307 = Constraint(expr= - 0.5*m.b39 + m.x490 <= 0)
m.c308 = Constraint(expr= - 0.5*m.b40 + m.x492 <= 0)
m.c309 = Constraint(expr= - 0.5*m.b41 + m.x494 <= 0)
m.c310 = Constraint(expr= - 0.5*m.b42 + m.x496 <= 0)
m.c311 = Constraint(expr= - 0.5*m.b43 + m.x498 <= 0)
m.c312 = Constraint(expr= - 0.5*m.b44 + m.x500 <= 0)
m.c313 = Constraint(expr= - 0.5*m.b45 + m.x502 <= 0)
m.c314 = Constraint(expr= - 0.5*m.b46 + m.x504 <= 0)
m.c315 = Constraint(expr= - 0.7*m.b47 + m.x506 <= 0)
m.c316 = Constraint(expr= - 0.7*m.b48 + m.x508 <= 0)
m.c317 = Constraint(expr= - 0.7*m.b49 + m.x510 <= 0)
m.c318 = Constraint(expr= - 0.7*m.b50 + m.x512 <= 0)
m.c319 = Constraint(expr= - 0.7*m.b51 + m.x514 <= 0)
m.c320 = Constraint(expr= - 0.7*m.b52 + m.x516 <= 0)
m.c321 = Constraint(expr= - 0.7*m.b53 + m.x518 <= 0)
m.c322 = Constraint(expr= - 0.7*m.b54 + m.x520 <= 0)
m.c323 = Constraint(expr= - 0.7*m.b55 + m.x522 <= 0)
m.c324 = Constraint(expr= - 0.7*m.b56 + m.x524 <= 0)
m.c325 = Constraint(expr= - 0.7*m.b57 + m.x526 <= 0)
m.c326 = Constraint(expr= - 0.7*m.b58 + m.x528 <= 0)
m.c327 = Constraint(expr= - 0.7*m.b59 + m.x530 <= 0)
m.c328 = Constraint(expr= - 0.7*m.b60 + m.x532 <= 0)
m.c329 = Constraint(expr= - 0.7*m.b61 + m.x534 <= 0)
m.c330 = Constraint(expr= - 0.7*m.b62 + m.x536 <= 0)
m.c331 = Constraint(expr= - 0.7*m.b63 + m.x538 <= 0)
m.c332 = Constraint(expr= - 0.7*m.b64 + m.x540 <= 0)
m.c333 = Constraint(expr= - 0.58*m.b65 + m.x542 <= 0)
m.c334 = Constraint(expr= - 0.58*m.b66 + m.x544 <= 0)
m.c335 = Constraint(expr= - 0.58*m.b67 + m.x546 <= 0)
m.c336 = Constraint(expr= - 0.58*m.b68 + m.x548 <= 0)
m.c337 = Constraint(expr= - 0.58*m.b69 + m.x550 <= 0)
m.c338 = Constraint(expr= - 0.58*m.b70 + m.x552 <= 0)
m.c339 = Constraint(expr= - 0.58*m.b71 + m.x554 <= 0)
m.c340 = Constraint(expr= - 0.58*m.b72 + m.x556 <= 0)
m.c341 = Constraint(expr= - 0.58*m.b73 + m.x558 <= 0)
m.c342 = Constraint(expr= - 0.58*m.b74 + m.x560 <= 0)
m.c343 = Constraint(expr= - 0.58*m.b75 + m.x562 <= 0)
m.c344 = Constraint(expr= - 0.58*m.b76 + m.x564 <= 0)
m.c345 = Constraint(expr= - 0.58*m.b77 + m.x566 <= 0)
m.c346 = Constraint(expr= - 0.58*m.b78 + m.x568 <= 0)
m.c347 = Constraint(expr= - 0.58*m.b79 + m.x570 <= 0)
m.c348 = Constraint(expr= - 0.58*m.b80 + m.x572 <= 0)
m.c349 = Constraint(expr= - 0.58*m.b81 + m.x574 <= 0)
m.c350 = Constraint(expr= - 0.58*m.b82 + m.x576 <= 0)
m.c351 = Constraint(expr= - m.x362 + m.x578 == 60)
m.c352 = Constraint(expr= - m.x364 + m.x579 == 60)
m.c353 = Constraint(expr= - m.x366 + m.x580 == 60)
m.c354 = Constraint(expr= - m.x368 + m.x581 == 60)
m.c355 = Constraint(expr= - m.x370 + m.x582 == 60)
m.c356 = Constraint(expr= - m.x372 + m.x583 == 60)
m.c357 = Constraint(expr= - m.x374 + m.x584 == 60)
m.c358 = Constraint(expr= - m.x376 + m.x585 == 60)
m.c359 = Constraint(expr= - m.x378 + m.x586 == 60)
m.c360 = Constraint(expr= - m.x380 + m.x587 == 90)
m.c361 = Constraint(expr= - m.x382 + m.x588 == 90)
m.c362 = Constraint(expr= - m.x384 + m.x589 == 90)
m.c363 = Constraint(expr= - m.x386 + m.x590 == 90)
m.c364 = Constraint(expr= - m.x388 + m.x591 == 90)
m.c365 = Constraint(expr= - m.x390 + m.x592 == 90)
m.c366 = Constraint(expr= - m.x392 + m.x593 == 90)
m.c367 = Constraint(expr= - m.x394 + m.x594 == 90)
m.c368 = Constraint(expr= - m.x396 + m.x595 == 90)
m.c369 = Constraint(expr= - m.x398 + m.x596 == 103)
m.c370 = Constraint(expr= - m.x400 + m.x597 == 103)
m.c371 = Constraint(expr= - m.x402 + m.x598 == 103)
m.c372 = Constraint(expr= - m.x404 + m.x599 == 103)
m.c373 = Constraint(expr= - m.x406 + m.x600 == 103)
m.c374 = Constraint(expr= - m.x408 + m.x601 == 103)
m.c375 = Constraint(expr= - m.x410 + m.x602 == 103)
m.c376 = Constraint(expr= - m.x412 + m.x603 == 103)
m.c377 = Constraint(expr= - m.x414 + m.x604 == 103)
m.c378 = Constraint(expr= - m.x578 + m.x605 - m.x606 == 0)
m.c379 = Constraint(expr= - m.x579 + m.x607 - m.x608 == 0)
m.c380 = Constraint(expr= - m.x580 + m.x609 - m.x610 == 0)
m.c381 = Constraint(expr= - m.x581 + m.x611 - m.x612 == 0)
m.c382 = Constraint(expr= - m.x582 + m.x613 - m.x614 == 0)
m.c383 = Constraint(expr= - m.x583 + m.x615 - m.x616 == 0)
m.c384 = Constraint(expr= - m.x584 + m.x617 - m.x618 == 0)
m.c385 = Constraint(expr= - m.x585 + m.x619 - m.x620 == 0)
m.c386 = Constraint(expr= - m.x586 + m.x621 - m.x622 == 0)
m.c387 = Constraint(expr= m.x623 - m.x624 - m.x625 == 0)
m.c388 = Constraint(expr= m.x626 - m.x627 - m.x628 == 0)
m.c389 = Constraint(expr= m.x629 - m.x630 - m.x631 == 0)
m.c390 = Constraint(expr= m.x632 - m.x633 - m.x634 == 0)
m.c391 = Constraint(expr= m.x635 - m.x636 - m.x637 == 0)
m.c392 = Constraint(expr= m.x638 - m.x639 - m.x640 == 0)
m.c393 = Constraint(expr= m.x641 - m.x642 - m.x643 == 0)
m.c394 = Constraint(expr= m.x644 - m.x645 - m.x646 == 0)
m.c395 = Constraint(expr= m.x647 - m.x648 - m.x649 == 0)
m.c396 = Constraint(expr= - m.x596 + m.x650 - m.x651 == 0)
m.c397 = Constraint(expr= - m.x597 + m.x652 - m.x653 == 0)
m.c398 = Constraint(expr= - m.x598 + m.x654 - m.x655 == 0)
m.c399 = Constraint(expr= - m.x599 + m.x656 - m.x657 == 0)
m.c400 = Constraint(expr= - m.x600 + m.x658 - m.x659 == 0)
m.c401 = Constraint(expr= - m.x601 + m.x660 - m.x661 == 0)
m.c402 = Constraint(expr= - m.x602 + m.x662 - m.x663 == 0)
m.c403 = Constraint(expr= - m.x603 + m.x664 - m.x665 == 0)
m.c404 = Constraint(expr= - m.x604 + m.x666 - m.x667 == 0)
m.c405 = Constraint(expr= m.x605 - m.x668 - m.x669 == 0)
m.c406 = Constraint(expr= m.x607 - m.x670 - m.x671 == 0)
m.c407 = Constraint(expr= m.x609 - m.x672 - m.x673 == 0)
m.c408 = Constraint(expr= m.x611 - m.x674 - m.x675 == 0)
m.c409 = Constraint(expr= m.x613 - m.x676 - m.x677 == 0)
m.c410 = Constraint(expr= m.x615 - m.x678 - m.x679 == 0)
m.c411 = Constraint(expr= m.x617 - m.x680 - m.x681 == 0)
m.c412 = Constraint(expr= m.x619 - m.x682 - m.x683 == 0)
m.c413 = Constraint(expr= m.x621 - m.x684 - m.x685 == 0)
m.c414 = Constraint(expr= - m.x578 + m.x623 - m.x686 == 0)
m.c415 = Constraint(expr= - m.x579 + m.x626 - m.x687 == 0)
m.c416 = Constraint(expr= - m.x580 + m.x629 - m.x688 == 0)
m.c417 = Constraint(expr= - m.x581 + m.x632 - m.x689 == 0)
m.c418 = Constraint(expr= - m.x582 + m.x635 - m.x690 == 0)
m.c419 = Constraint(expr= - m.x583 + m.x638 - m.x691 == 0)
m.c420 = Constraint(expr= - m.x584 + m.x641 - m.x692 == 0)
m.c421 = Constraint(expr= - m.x585 + m.x644 - m.x693 == 0)
m.c422 = Constraint(expr= - m.x586 + m.x647 - m.x694 == 0)
m.c423 = Constraint(expr= - m.x587 + m.x650 - m.x695 == 0)
m.c424 = Constraint(expr= - m.x588 + m.x652 - m.x696 == 0)
m.c425 = Constraint(expr= - m.x589 + m.x654 - m.x697 == 0)
m.c426 = Constraint(expr= - m.x590 + m.x656 - m.x698 == 0)
m.c427 = Constraint(expr= - m.x591 + m.x658 - m.x699 == 0)
m.c428 = Constraint(expr= - m.x592 + m.x660 - m.x700 == 0)
m.c429 = Constraint(expr= - m.x593 + m.x662 - m.x701 == 0)
m.c430 = Constraint(expr= - m.x594 + m.x664 - m.x702 == 0)
m.c431 = Constraint(expr= - m.x595 + m.x666 - m.x703 == 0)
m.c432 = Constraint(expr= 0.2*m.b2 - m.x416 + m.x704 <= 0.2)
m.c433 = Constraint(expr= 0.2*m.b3 - m.x418 + m.x705 <= 0.2)
m.c434 = Constraint(expr= 0.2*m.b4 - m.x420 + m.x706 <= 0.2)
m.c435 = Constraint(expr= 0.2*m.b5 - m.x422 + m.x707 <= 0.2)
m.c436 = Constraint(expr= 0.2*m.b6 - m.x424 + m.x708 <= 0.2)
m.c437 = Constraint(expr= 0.2*m.b7 - m.x426 + m.x709 <= 0.2)
m.c438 = Constraint(expr= 0.2*m.b8 - m.x428 + m.x710 <= 0.2)
m.c439 = Constraint(expr= 0.2*m.b9 - m.x430 + m.x711 <= 0.2)
m.c440 = Constraint(expr= 0.2*m.b10 - m.x432 + m.x712 <= 0.2)
m.c441 = Constraint(expr= 0.2*m.b11 - m.x434 + m.x713 <= 0.2)
m.c442 = Constraint(expr= 0.2*m.b12 - m.x436 + m.x714 <= 0.2)
m.c443 = Constraint(expr= 0.2*m.b13 - m.x438 + m.x715 <= 0.2)
m.c444 = Constraint(expr= 0.2*m.b14 - m.x440 + m.x716 <= 0.2)
m.c445 = Constraint(expr= 0.2*m.b15 - m.x442 + m.x717 <= 0.2)
m.c446 = Constraint(expr= 0.2*m.b16 - m.x444 + m.x718 <= 0.2)
m.c447 = Constraint(expr= 0.2*m.b17 - m.x446 + m.x719 <= 0.2)
m.c448 = Constraint(expr= 0.2*m.b18 - m.x448 + m.x720 <= 0.2)
m.c449 = Constraint(expr= 0.2*m.b19 - m.x450 + m.x721 <= 0.2)
m.c450 = Constraint(expr= 0.2*m.b20 - m.x452 + m.x722 <= 0.2)
m.c451 = Constraint(expr= 0.2*m.b21 - m.x454 + m.x723 <= 0.2)
m.c452 = Constraint(expr= 0.2*m.b22 - m.x456 + m.x724 <= 0.2)
m.c453 = Constraint(expr= 0.2*m.b23 - m.x458 + m.x725 <= 0.2)
m.c454 = Constraint(expr= 0.2*m.b24 - m.x460 + m.x726 <= 0.2)
m.c455 = Constraint(expr= 0.2*m.b25 - m.x462 + m.x727 <= 0.2)
m.c456 = Constraint(expr= 0.2*m.b26 - m.x464 + m.x728 <= 0.2)
m.c457 = Constraint(expr= 0.2*m.b27 - m.x466 + m.x729 <= 0.2)
m.c458 = Constraint(expr= 0.2*m.b28 - m.x468 + m.x730 <= 0.2)
m.c459 = Constraint(expr= 0.25*m.b29 - m.x470 + m.x731 <= 0.25)
m.c460 = Constraint(expr= 0.25*m.b30 - m.x472 + m.x732 <= 0.25)
m.c461 = Constraint(expr= 0.25*m.b31 - m.x474 + m.x733 <= 0.25)
m.c462 = Constraint(expr= 0.25*m.b32 - m.x476 + m.x734 <= 0.25)
m.c463 = Constraint(expr= 0.25*m.b33 - m.x478 + m.x735 <= 0.25)
m.c464 = Constraint(expr= 0.25*m.b34 - m.x480 + m.x736 <= 0.25)
m.c465 = Constraint(expr= 0.25*m.b35 - m.x482 + m.x737 <= 0.25)
m.c466 = Constraint(expr= 0.25*m.b36 - m.x484 + m.x738 <= 0.25)
m.c467 = Constraint(expr= 0.25*m.b37 - m.x486 + m.x739 <= 0.25)
m.c468 = Constraint(expr= 0.25*m.b38 - m.x488 + m.x740 <= 0.25)
m.c469 = Constraint(expr= 0.25*m.b39 - m.x490 + m.x741 <= 0.25)
m.c470 = Constraint(expr= 0.25*m.b40 - m.x492 + m.x742 <= 0.25)
m.c471 = Constraint(expr= 0.25*m.b41 - m.x494 + m.x743 <= 0.25)
m.c472 = Constraint(expr= 0.25*m.b42 - m.x496 + m.x744 <= 0.25)
m.c473 = Constraint(expr= 0.25*m.b43 - m.x498 + m.x745 <= 0.25)
m.c474 = Constraint(expr= 0.25*m.b44 - m.x500 + m.x746 <= 0.25)
m.c475 = Constraint(expr= 0.25*m.b45 - m.x502 + m.x747 <= 0.25)
m.c476 = Constraint(expr= 0.25*m.b46 - m.x504 + m.x748 <= 0.25)
m.c477 = Constraint(expr= 0.4*m.b47 - m.x506 + m.x749 <= 0.4)
m.c478 = Constraint(expr= 0.4*m.b48 - m.x508 + m.x750 <= 0.4)
m.c479 = Constraint(expr= 0.4*m.b49 - m.x510 + m.x751 <= 0.4)
m.c480 = Constraint(expr= 0.4*m.b50 - m.x512 + m.x752 <= 0.4)
m.c481 = Constraint(expr= 0.4*m.b51 - m.x514 + m.x753 <= 0.4)
m.c482 = Constraint(expr= 0.4*m.b52 - m.x516 + m.x754 <= 0.4)
m.c483 = Constraint(expr= 0.4*m.b53 - m.x518 + m.x755 <= 0.4)
m.c484 = Constraint(expr= 0.4*m.b54 - m.x520 + m.x756 <= 0.4)
m.c485 = Constraint(expr= 0.4*m.b55 - m.x522 + m.x757 <= 0.4)
m.c486 = Constraint(expr= 0.4*m.b56 - m.x524 + m.x758 <= 0.4)
m.c487 = Constraint(expr= 0.4*m.b57 - m.x526 + m.x759 <= 0.4)
m.c488 = Constraint(expr= 0.4*m.b58 - m.x528 + m.x760 <= 0.4)
m.c489 = Constraint(expr= 0.4*m.b59 - m.x530 + m.x761 <= 0.4)
m.c490 = Constraint(expr= 0.4*m.b60 - m.x532 + m.x762 <= 0.4)
m.c491 = Constraint(expr= 0.4*m.b61 - m.x534 + m.x763 <= 0.4)
m.c492 = Constraint(expr= 0.4*m.b62 - m.x536 + m.x764 <= 0.4)
m.c493 = Constraint(expr= 0.4*m.b63 - m.x538 + m.x765 <= 0.4)
m.c494 = Constraint(expr= 0.4*m.b64 - m.x540 + m.x766 <= 0.4)
m.c495 = Constraint(expr= 0.24*m.b65 - m.x542 + m.x767 <= 0.24)
m.c496 = Constraint(expr= 0.24*m.b66 - m.x544 + m.x768 <= 0.24)
m.c497 = Constraint(expr= 0.24*m.b67 - m.x546 + m.x769 <= 0.24)
m.c498 = Constraint(expr= 0.24*m.b68 - m.x548 + m.x770 <= 0.24)
m.c499 = Constraint(expr= 0.24*m.b69 - m.x550 + m.x771 <= 0.24)
m.c500 = Constraint(expr= 0.24*m.b70 - m.x552 + m.x772 <= 0.24)
m.c501 = Constraint(expr= 0.24*m.b71 - m.x554 + m.x773 <= 0.24)
m.c502 = Constraint(expr= 0.24*m.b72 - m.x556 + m.x774 <= 0.24)
m.c503 = Constraint(expr= 0.24*m.b73 - m.x558 + m.x775 <= 0.24)
m.c504 = Constraint(expr= 0.24*m.b74 - m.x560 + m.x776 <= 0.24)
m.c505 = Constraint(expr= 0.24*m.b75 - m.x562 + m.x777 <= 0.24)
m.c506 = Constraint(expr= 0.24*m.b76 - m.x564 + m.x778 <= 0.24)
m.c507 = Constraint(expr= 0.24*m.b77 - m.x566 + m.x779 <= 0.24)
m.c508 = Constraint(expr= 0.24*m.b78 - m.x568 + m.x780 <= 0.24)
m.c509 = Constraint(expr= 0.24*m.b79 - m.x570 + m.x781 <= 0.24)
m.c510 = Constraint(expr= 0.24*m.b80 - m.x572 + m.x782 <= 0.24)
m.c511 = Constraint(expr= 0.24*m.b81 - m.x574 + m.x783 <= 0.24)
m.c512 = Constraint(expr= 0.24*m.b82 - m.x576 + m.x784 <= 0.24)
m.c513 = Constraint(expr= - m.x416 + m.x704 >= 0)
m.c514 = Constraint(expr= - m.x418 + m.x705 >= 0)
m.c515 = Constraint(expr= - m.x420 + m.x706 >= 0)
m.c516 = Constraint(expr= - m.x422 + m.x707 >= 0)
m.c517 = Constraint(expr= - m.x424 + m.x708 >= 0)
m.c518 = Constraint(expr= - m.x426 + m.x709 >= 0)
m.c519 = Constraint(expr= - m.x428 + m.x710 >= 0)
m.c520 = Constraint(expr= - m.x430 + m.x711 >= 0)
m.c521 = Constraint(expr= - m.x432 + m.x712 >= 0)
m.c522 = Constraint(expr= - m.x434 + m.x713 >= 0)
m.c523 = Constraint(expr= - m.x436 + m.x714 >= 0)
m.c524 = Constraint(expr= - m.x438 + m.x715 >= 0)
m.c525 = Constraint(expr= - m.x440 + m.x716 >= 0)
m.c526 = Constraint(expr= - m.x442 + m.x717 >= 0)
m.c527 = Constraint(expr= - m.x444 + m.x718 >= 0)
m.c528 = Constraint(expr= - m.x446 + m.x719 >= 0)
m.c529 = Constraint(expr= - m.x448 + m.x720 >= 0)
m.c530 = Constraint(expr= - m.x450 + m.x721 >= 0)
m.c531 = Constraint(expr= - m.x452 + m.x722 >= 0)
m.c532 = Constraint(expr= - m.x454 + m.x723 >= 0)
m.c533 = Constraint(expr= - m.x456 + m.x724 >= 0)
m.c534 = Constraint(expr= - m.x458 + m.x725 >= 0)
m.c535 = Constraint(expr= - m.x460 + m.x726 >= 0)
m.c536 = Constraint(expr= - m.x462 + m.x727 >= 0)
m.c537 = Constraint(expr= - m.x464 + m.x728 >= 0)
m.c538 = Constraint(expr= - m.x466 + m.x729 >= 0)
m.c539 = Constraint(expr= - m.x468 + m.x730 >= 0)
m.c540 = Constraint(expr= - m.x470 + m.x731 >= 0)
m.c541 = Constraint(expr= - m.x472 + m.x732 >= 0)
m.c542 = Constraint(expr= - m.x474 + m.x733 >= 0)
m.c543 = Constraint(expr= - m.x476 + m.x734 >= 0)
m.c544 = Constraint(expr= - m.x478 + m.x735 >= 0)
m.c545 = Constraint(expr= - m.x480 + m.x736 >= 0)
m.c546 = Constraint(expr= - m.x482 + m.x737 >= 0)
m.c547 = Constraint(expr= - m.x484 + m.x738 >= 0)
m.c548 = Constraint(expr= - m.x486 + m.x739 >= 0)
m.c549 = Constraint(expr= - m.x488 + m.x740 >= 0)
m.c550 = Constraint(expr= - m.x490 + m.x741 >= 0)
m.c551 = Constraint(expr= - m.x492 + m.x742 >= 0)
m.c552 = Constraint(expr= - m.x494 + m.x743 >= 0)
m.c553 = Constraint(expr= - m.x496 + m.x744 >= 0)
m.c554 = Constraint(expr= - m.x498 + m.x745 >= 0)
m.c555 = Constraint(expr= - m.x500 + m.x746 >= 0)
m.c556 = Constraint(expr= - m.x502 + m.x747 >= 0)
m.c557 = Constraint(expr= - m.x504 + m.x748 >= 0)
m.c558 = Constraint(expr= - m.x506 + m.x749 >= 0)
m.c559 = Constraint(expr= - m.x508 + m.x750 >= 0)
m.c560 = Constraint(expr= - m.x510 + m.x751 >= 0)
m.c561 = Constraint(expr= - m.x512 + m.x752 >= 0)
m.c562 = Constraint(expr= - m.x514 + m.x753 >= 0)
m.c563 = Constraint(expr= - m.x516 + m.x754 >= 0)
m.c564 = Constraint(expr= - m.x518 + m.x755 >= 0)
m.c565 = Constraint(expr= - m.x520 + m.x756 >= 0)
m.c566 = Constraint(expr= - m.x522 + m.x757 >= 0)
m.c567 = Constraint(expr= - m.x524 + m.x758 >= 0)
m.c568 = Constraint(expr= - m.x526 + m.x759 >= 0)
m.c569 = Constraint(expr= - m.x528 + m.x760 >= 0)
m.c570 = Constraint(expr= - m.x530 + m.x761 >= 0)
m.c571 = Constraint(expr= - m.x532 + m.x762 >= 0)
m.c572 = Constraint(expr= - m.x534 + m.x763 >= 0)
m.c573 = Constraint(expr= - m.x536 + m.x764 >= 0)
m.c574 = Constraint(expr= - m.x538 + m.x765 >= 0)
m.c575 = Constraint(expr= - m.x540 + m.x766 >= 0)
m.c576 = Constraint(expr= - m.x542 + m.x767 >= 0)
m.c577 = Constraint(expr= - m.x544 + m.x768 >= 0)
m.c578 = Constraint(expr= - m.x546 + m.x769 >= 0)
m.c579 = Constraint(expr= - m.x548 + m.x770 >= 0)
m.c580 = Constraint(expr= - m.x550 + m.x771 >= 0)
m.c581 = Constraint(expr= - m.x552 + m.x772 >= 0)
m.c582 = Constraint(expr= - m.x554 + m.x773 >= 0)
m.c583 = Constraint(expr= - m.x556 + m.x774 >= 0)
m.c584 = Constraint(expr= - m.x558 + m.x775 >= 0)
m.c585 = Constraint(expr= - m.x560 + m.x776 >= 0)
m.c586 = Constraint(expr= - m.x562 + m.x777 >= 0)
m.c587 = Constraint(expr= - m.x564 + m.x778 >= 0)
m.c588 = Constraint(expr= - m.x566 + m.x779 >= 0)
m.c589 = Constraint(expr= - m.x568 + m.x780 >= 0)
m.c590 = Constraint(expr= - m.x570 + m.x781 >= 0)
m.c591 = Constraint(expr= - m.x572 + m.x782 >= 0)
m.c592 = Constraint(expr= - m.x574 + m.x783 >= 0)
m.c593 = Constraint(expr= - m.x576 + m.x784 >= 0)
m.c594 = Constraint(expr= - 0.6*m.b2 + m.x704 <= 0.2)
m.c595 = Constraint(expr= - 0.6*m.b3 + m.x705 <= 0.2)
m.c596 = Constraint(expr= - 0.6*m.b4 + m.x706 <= 0.2)
m.c597 = Constraint(expr= - 0.6*m.b5 + m.x707 <= 0.2)
m.c598 = Constraint(expr= - 0.6*m.b6 + m.x708 <= 0.2)
m.c599 = Constraint(expr= - 0.6*m.b7 + m.x709 <= 0.2)
m.c600 = Constraint(expr= - 0.6*m.b8 + m.x710 <= 0.2)
m.c601 = Constraint(expr= - 0.6*m.b9 + m.x711 <= 0.2)
m.c602 = Constraint(expr= - 0.6*m.b10 + m.x712 <= 0.2)
m.c603 = Constraint(expr= - 0.6*m.b11 + m.x713 <= 0.2)
m.c604 = Constraint(expr= - 0.6*m.b12 + m.x714 <= 0.2)
m.c605 = Constraint(expr= - 0.6*m.b13 + m.x715 <= 0.2)
m.c606 = Constraint(expr= - 0.6*m.b14 + m.x716 <= 0.2)
m.c607 = Constraint(expr= - 0.6*m.b15 + m.x717 <= 0.2)
m.c608 = Constraint(expr= - 0.6*m.b16 + m.x718 <= 0.2)
m.c609 = Constraint(expr= - 0.6*m.b17 + m.x719 <= 0.2)
m.c610 = Constraint(expr= - 0.6*m.b18 + m.x720 <= 0.2)
m.c611 = Constraint(expr= - 0.6*m.b19 + m.x721 <= 0.2)
m.c612 = Constraint(expr= - 0.6*m.b20 + m.x722 <= 0.2)
m.c613 = Constraint(expr= - 0.6*m.b21 + m.x723 <= 0.2)
m.c614 = Constraint(expr= - 0.6*m.b22 + m.x724 <= 0.2)
m.c615 = Constraint(expr= - 0.6*m.b23 + m.x725 <= 0.2)
m.c616 = Constraint(expr= - 0.6*m.b24 + m.x726 <= 0.2)
m.c617 = Constraint(expr= - 0.6*m.b25 + m.x727 <= 0.2)
m.c618 = Constraint(expr= - 0.6*m.b26 + m.x728 <= 0.2)
m.c619 = Constraint(expr= - 0.6*m.b27 + m.x729 <= 0.2)
m.c620 = Constraint(expr= - 0.6*m.b28 + m.x730 <= 0.2)
m.c621 = Constraint(expr= - 0.25*m.b29 + m.x731 <= 0.25)
m.c622 = Constraint(expr= - 0.25*m.b30 + m.x732 <= 0.25)
m.c623 = Constraint(expr= - 0.25*m.b31 + m.x733 <= 0.25)
m.c624 = Constraint(expr= - 0.25*m.b32 + m.x734 <= 0.25)
m.c625 = Constraint(expr= - 0.25*m.b33 + m.x735 <= 0.25)
m.c626 = Constraint(expr= - 0.25*m.b34 + m.x736 <= 0.25)
m.c627 = Constraint(expr= - 0.25*m.b35 + m.x737 <= 0.25)
m.c628 = Constraint(expr= - 0.25*m.b36 + m.x738 <= 0.25)
m.c629 = Constraint(expr= - 0.25*m.b37 + m.x739 <= 0.25)
m.c630 = Constraint(expr= - 0.25*m.b38 + m.x740 <= 0.25)
m.c631 = Constraint(expr= - 0.25*m.b39 + m.x741 <= 0.25)
m.c632 = Constraint(expr= - 0.25*m.b40 + m.x742 <= 0.25)
m.c633 = Constraint(expr= - 0.25*m.b41 + m.x743 <= 0.25)
m.c634 = Constraint(expr= - 0.25*m.b42 + m.x744 <= 0.25)
m.c635 = Constraint(expr= - 0.25*m.b43 + m.x745 <= 0.25)
m.c636 = Constraint(expr= - 0.25*m.b44 + m.x746 <= 0.25)
m.c637 = Constraint(expr= - 0.25*m.b45 + m.x747 <= 0.25)
m.c638 = Constraint(expr= - 0.25*m.b46 + m.x748 <= 0.25)
m.c639 = Constraint(expr= - 0.3*m.b47 + m.x749 <= 0.4)
m.c640 = Constraint(expr= - 0.3*m.b48 + m.x750 <= 0.4)
m.c641 = Constraint(expr= - 0.3*m.b49 + m.x751 <= 0.4)
m.c642 = Constraint(expr= - 0.3*m.b50 + m.x752 <= 0.4)
m.c643 = Constraint(expr= - 0.3*m.b51 + m.x753 <= 0.4)
m.c644 = Constraint(expr= - 0.3*m.b52 + m.x754 <= 0.4)
m.c645 = Constraint(expr= - 0.3*m.b53 + m.x755 <= 0.4)
m.c646 = Constraint(expr= - 0.3*m.b54 + m.x756 <= 0.4)
m.c647 = Constraint(expr= - 0.3*m.b55 + m.x757 <= 0.4)
m.c648 = Constraint(expr= - 0.3*m.b56 + m.x758 <= 0.4)
m.c649 = Constraint(expr= - 0.3*m.b57 + m.x759 <= 0.4)
m.c650 = Constraint(expr= - 0.3*m.b58 + m.x760 <= 0.4)
m.c651 = Constraint(expr= - 0.3*m.b59 + m.x761 <= 0.4)
m.c652 = Constraint(expr= - 0.3*m.b60 + m.x762 <= 0.4)
m.c653 = Constraint(expr= - 0.3*m.b61 + m.x763 <= 0.4)
m.c654 = Constraint(expr= - 0.3*m.b62 + m.x764 <= 0.4)
m.c655 = Constraint(expr= - 0.3*m.b63 + m.x765 <= 0.4)
m.c656 = Constraint(expr= - 0.3*m.b64 + m.x766 <= 0.4)
m.c657 = Constraint(expr= - 0.34*m.b65 + m.x767 <= 0.24)
m.c658 = Constraint(expr= - 0.34*m.b66 + m.x768 <= 0.24)
m.c659 = Constraint(expr= - 0.34*m.b67 + m.x769 <= 0.24)
m.c660 = Constraint(expr= - 0.34*m.b68 + m.x770 <= 0.24)
m.c661 = Constraint(expr= - 0.34*m.b69 + m.x771 <= 0.24)
m.c662 = Constraint(expr= - 0.34*m.b70 + m.x772 <= 0.24)
m.c663 = Constraint(expr= - 0.34*m.b71 + m.x773 <= 0.24)
m.c664 = Constraint(expr= - 0.34*m.b72 + m.x774 <= 0.24)
m.c665 = Constraint(expr= - 0.34*m.b73 + m.x775 <= 0.24)
m.c666 = Constraint(expr= - 0.34*m.b74 + m.x776 <= 0.24)
m.c667 = Constraint(expr= - 0.34*m.b75 + m.x777 <= 0.24)
m.c668 = Constraint(expr= - 0.34*m.b76 + m.x778 <= 0.24)
m.c669 = Constraint(expr= - 0.34*m.b77 + m.x779 <= 0.24)
m.c670 = Constraint(expr= - 0.34*m.b78 + m.x780 <= 0.24)
m.c671 = Constraint(expr= - 0.34*m.b79 + m.x781 <= 0.24)
m.c672 = Constraint(expr= - 0.34*m.b80 + m.x782 <= 0.24)
m.c673 = Constraint(expr= - 0.34*m.b81 + m.x783 <= 0.24)
m.c674 = Constraint(expr= - 0.34*m.b82 + m.x784 <= 0.24)
m.c675 = Constraint(expr= - 0.4*m.b2 + m.x785 <= 0.6)
m.c676 = Constraint(expr= - 0.4*m.b3 + m.x786 <= 0.6)
m.c677 = Constraint(expr= - 0.4*m.b4 + m.x787 <= 0.6)
m.c678 = Constraint(expr= - 0.4*m.b5 + m.x788 <= 0.6)
m.c679 = Constraint(expr= - 0.4*m.b6 + m.x789 <= 0.6)
m.c680 = Constraint(expr= - 0.4*m.b7 + m.x790 <= 0.6)
m.c681 = Constraint(expr= - 0.4*m.b8 + m.x791 <= 0.6)
m.c682 = Constraint(expr= - 0.4*m.b9 + m.x792 <= 0.6)
m.c683 = Constraint(expr= - 0.4*m.b10 + m.x793 <= 0.6)
m.c684 = Constraint(expr= - 0.2*m.b29 + m.x794 <= 0.8)
m.c685 = Constraint(expr= - 0.2*m.b30 + m.x795 <= 0.8)
m.c686 = Constraint(expr= - 0.2*m.b31 + m.x796 <= 0.8)
m.c687 = Constraint(expr= - 0.2*m.b32 + m.x797 <= 0.8)
m.c688 = Constraint(expr= - 0.2*m.b33 + m.x798 <= 0.8)
m.c689 = Constraint(expr= - 0.2*m.b34 + m.x799 <= 0.8)
m.c690 = Constraint(expr= - 0.2*m.b35 + m.x800 <= 0.8)
m.c691 = Constraint(expr= - 0.2*m.b36 + m.x801 <= 0.8)
m.c692 = Constraint(expr= - 0.2*m.b37 + m.x802 <= 0.8)
m.c693 = Constraint(expr= - 0.15*m.b47 + m.x803 <= 0.85)
m.c694 = Constraint(expr= - 0.15*m.b48 + m.x804 <= 0.85)
m.c695 = Constraint(expr= - 0.15*m.b49 + m.x805 <= 0.85)
m.c696 = Constraint(expr= - 0.15*m.b50 + m.x806 <= 0.85)
m.c697 = Constraint(expr= - 0.15*m.b51 + m.x807 <= 0.85)
m.c698 = Constraint(expr= - 0.15*m.b52 + m.x808 <= 0.85)
m.c699 = Constraint(expr= - 0.15*m.b53 + m.x809 <= 0.85)
m.c700 = Constraint(expr= - 0.15*m.b54 + m.x810 <= 0.85)
m.c701 = Constraint(expr= - 0.15*m.b55 + m.x811 <= 0.85)
m.c702 = Constraint(expr= - 0.3*m.b65 + m.x812 <= 0.7)
m.c703 = Constraint(expr= - 0.3*m.b66 + m.x813 <= 0.7)
m.c704 = Constraint(expr= - 0.3*m.b67 + m.x814 <= 0.7)
m.c705 = Constraint(expr= - 0.3*m.b68 + m.x815 <= 0.7)
m.c706 = Constraint(expr= - 0.3*m.b69 + m.x816 <= 0.7)
m.c707 = Constraint(expr= - 0.3*m.b70 + m.x817 <= 0.7)
m.c708 = Constraint(expr= - 0.3*m.b71 + m.x818 <= 0.7)
m.c709 = Constraint(expr= - 0.3*m.b72 + m.x819 <= 0.7)
m.c710 = Constraint(expr= - 0.3*m.b73 + m.x820 <= 0.7)
m.c711 = Constraint(expr= m.b2 - m.b11 >= 0)
m.c712 = Constraint(expr= m.b3 - m.b12 >= 0)
m.c713 = Constraint(expr= m.b4 - m.b13 >= 0)
m.c714 = Constraint(expr= m.b5 - m.b14 >= 0)
m.c715 = Constraint(expr= m.b6 - m.b15 >= 0)
m.c716 = Constraint(expr= m.b7 - m.b16 >= 0)
m.c717 = Constraint(expr= m.b8 - m.b17 >= 0)
m.c718 = Constraint(expr= m.b9 - m.b18 >= 0)
m.c719 = Constraint(expr= m.b10 - m.b19 >= 0)
m.c720 = Constraint(expr= m.b11 - m.b20 >= 0)
m.c721 = Constraint(expr= m.b12 - m.b21 >= 0)
m.c722 = Constraint(expr= m.b13 - m.b22 >= 0)
m.c723 = Constraint(expr= m.b14 - m.b23 >= 0)
m.c724 = Constraint(expr= m.b15 - m.b24 >= 0)
m.c725 = Constraint(expr= m.b16 - m.b25 >= 0)
m.c726 = Constraint(expr= m.b17 - m.b26 >= 0)
m.c727 = Constraint(expr= m.b18 - m.b27 >= 0)
m.c728 = Constraint(expr= m.b19 - m.b28 >= 0)
m.c729 = Constraint(expr= m.b29 - m.b38 >= 0)
m.c730 = Constraint(expr= m.b30 - m.b39 >= 0)
m.c731 = Constraint(expr= m.b31 - m.b40 >= 0)
m.c732 = Constraint(expr= m.b32 - m.b41 >= 0)
m.c733 = Constraint(expr= m.b33 - m.b42 >= 0)
m.c734 = Constraint(expr= m.b34 - m.b43 >= 0)
m.c735 = Constraint(expr= m.b35 - m.b44 >= 0)
m.c736 = Constraint(expr= m.b36 - m.b45 >= 0)
m.c737 = Constraint(expr= m.b37 - m.b46 >= 0)
m.c738 = Constraint(expr= m.b47 - m.b56 >= 0)
m.c739 = Constraint(expr= m.b48 - m.b57 >= 0)
m.c740 = Constraint(expr= m.b49 - m.b58 >= 0)
m.c741 = Constraint(expr= m.b50 - m.b59 >= 0)
m.c742 = Constraint(expr= m.b51 - m.b60 >= 0)
m.c743 = Constraint(expr= m.b52 - m.b61 >= 0)
m.c744 = Constraint(expr= m.b53 - m.b62 >= 0)
m.c745 = Constraint(expr= m.b54 - m.b63 >= 0)
m.c746 = Constraint(expr= m.b55 - m.b64 >= 0)
m.c747 = Constraint(expr= m.b65 - m.b74 >= 0)
m.c748 = Constraint(expr= m.b66 - m.b75 >= 0)
m.c749 = Constraint(expr= m.b67 - m.b76 >= 0)
m.c750 = Constraint(expr= m.b68 - m.b77 >= 0)
m.c751 = Constraint(expr= m.b69 - m.b78 >= 0)
m.c752 = Constraint(expr= m.b70 - m.b79 >= 0)
m.c753 = Constraint(expr= m.b71 - m.b80 >= 0)
m.c754 = Constraint(expr= m.b72 - m.b81 >= 0)
m.c755 = Constraint(expr= m.b73 - m.b82 >= 0)
m.c756 = Constraint(expr= m.x291 - m.x416 - m.x434 - m.x452 == 0)
m.c757 = Constraint(expr= m.x293 - m.x418 - m.x436 - m.x454 == 0)
m.c758 = Constraint(expr= m.x295 - m.x420 - m.x438 - m.x456 == 0)
m.c759 = Constraint(expr= m.x297 - m.x422 - m.x440 - m.x458 == 0)
m.c760 = Constraint(expr= m.x299 - m.x424 - m.x442 - m.x460 == 0)
m.c761 = Constraint(expr= m.x301 - m.x426 - m.x444 - m.x462 == 0)
m.c762 = Constraint(expr= m.x303 - m.x428 - m.x446 - m.x464 == 0)
m.c763 = Constraint(expr= m.x305 - m.x430 - m.x448 - m.x466 == 0)
m.c764 = Constraint(expr= m.x307 - m.x432 - m.x450 - m.x468 == 0)
m.c765 = Constraint(expr= m.x309 - m.x470 - m.x488 - m.x506 - m.x524 == 0)
m.c766 = Constraint(expr= m.x311 - m.x472 - m.x490 - m.x508 - m.x526 == 0)
m.c767 = Constraint(expr= m.x313 - m.x474 - m.x492 - m.x510 - m.x528 == 0)
m.c768 = Constraint(expr= m.x315 - m.x476 - m.x494 - m.x512 - m.x530 == 0)
m.c769 = Constraint(expr= m.x317 - m.x478 - m.x496 - m.x514 - m.x532 == 0)
m.c770 = Constraint(expr= m.x319 - m.x480 - m.x498 - m.x516 - m.x534 == 0)
m.c771 = Constraint(expr= m.x321 - m.x482 - m.x500 - m.x518 - m.x536 == 0)
m.c772 = Constraint(expr= m.x323 - m.x484 - m.x502 - m.x520 - m.x538 == 0)
m.c773 = Constraint(expr= m.x325 - m.x486 - m.x504 - m.x522 - m.x540 == 0)
m.c774 = Constraint(expr= m.x336 - m.x542 - m.x560 == 0)
m.c775 = Constraint(expr= m.x338 - m.x544 - m.x562 == 0)
m.c776 = Constraint(expr= m.x340 - m.x546 - m.x564 == 0)
m.c777 = Constraint(expr= m.x342 - m.x548 - m.x566 == 0)
m.c778 = Constraint(expr= m.x344 - m.x550 - m.x568 == 0)
m.c779 = Constraint(expr= m.x346 - m.x552 - m.x570 == 0)
m.c780 = Constraint(expr= m.x348 - m.x554 - m.x572 == 0)
m.c781 = Constraint(expr= m.x350 - m.x556 - m.x574 == 0)
m.c782 = Constraint(expr= m.x352 - m.x558 - m.x576 == 0)
m.c783 = Constraint(expr= - 2000*m.b2 + m.x417 - m.x669 >= -2000)
m.c784 = Constraint(expr= - 2000*m.b3 + m.x425 - m.x671 >= -2000)
m.c785 = Constraint(expr= - 2000*m.b4 + m.x433 - m.x673 >= -2000)
m.c786 = Constraint(expr= - 2000*m.b5 + m.x441 - m.x675 >= -2000)
m.c787 = Constraint(expr= - 2000*m.b6 + m.x449 - m.x677 >= -2000)
m.c788 = Constraint(expr= - 2000*m.b7 + m.x457 - m.x679 >= -2000)
m.c789 = Constraint(expr= - 2000*m.b8 + m.x465 - m.x681 >= -2000)
m.c790 = Constraint(expr= - 2000*m.b9 + m.x473 - m.x683 >= -2000)
m.c791 = Constraint(expr= - 2000*m.b10 + m.x481 - m.x685 >= -2000)
m.c792 = Constraint(expr= - 2000*m.b11 + m.x489 - m.x669 >= -2000)
m.c793 = Constraint(expr= - 2000*m.b12 + m.x495 - m.x671 >= -2000)
m.c794 = Constraint(expr= - 2000*m.b13 + m.x501 - m.x673 >= -2000)
m.c795 = Constraint(expr= - 2000*m.b14 + m.x507 - m.x675 >= -2000)
m.c796 = Constraint(expr= - 2000*m.b15 + m.x513 - m.x677 >= -2000)
m.c797 = Constraint(expr= - 2000*m.b16 + m.x519 - m.x679 >= -2000)
m.c798 = Constraint(expr= - 2000*m.b17 + m.x525 - m.x681 >= -2000)
m.c799 = Constraint(expr= - 2000*m.b18 + m.x531 - m.x683 >= -2000)
m.c800 = Constraint(expr= - 2000*m.b19 + m.x537 - m.x685 >= -2000)
m.c801 = Constraint(expr= - 2000*m.b20 + m.x543 - m.x669 >= -2000)
m.c802 = Constraint(expr= - 2000*m.b21 + m.x549 - m.x671 >= -2000)
m.c803 = Constraint(expr= - 2000*m.b22 + m.x555 - m.x673 >= -2000)
m.c804 = Constraint(expr= - 2000*m.b23 + m.x561 - m.x675 >= -2000)
m.c805 = Constraint(expr= - 2000*m.b24 + m.x567 - m.x677 >= -2000)
m.c806 = Constraint(expr= - 2000*m.b25 + m.x573 - m.x679 >= -2000)
m.c807 = Constraint(expr= - 2000*m.b26 + m.x83 - m.x681 >= -2000)
m.c808 = Constraint(expr= - 2000*m.b27 + m.x86 - m.x683 >= -2000)
m.c809 = Constraint(expr= - 2000*m.b28 + m.x89 - m.x685 >= -2000)
m.c810 = Constraint(expr= - 2000*m.b29 + m.x92 - m.x686 >= -2000)
m.c811 = Constraint(expr= - 2000*m.b30 + m.x96 - m.x687 >= -2000)
m.c812 = Constraint(expr= - 2000*m.b31 + m.x100 - m.x688 >= -2000)
m.c813 = Constraint(expr= - 2000*m.b32 + m.x104 - m.x689 >= -2000)
m.c814 = Constraint(expr= - 2000*m.b33 + m.x108 - m.x690 >= -2000)
m.c815 = Constraint(expr= - 2000*m.b34 + m.x112 - m.x691 >= -2000)
m.c816 = Constraint(expr= - 2000*m.b35 + m.x116 - m.x692 >= -2000)
m.c817 = Constraint(expr= - 2000*m.b36 + m.x120 - m.x693 >= -2000)
m.c818 = Constraint(expr= - 2000*m.b37 + m.x124 - m.x694 >= -2000)
m.c819 = Constraint(expr= - 2000*m.b38 + m.x128 - m.x686 >= -2000)
m.c820 = Constraint(expr= - 2000*m.b39 + m.x131 - m.x687 >= -2000)
m.c821 = Constraint(expr= - 2000*m.b40 + m.x134 - m.x688 >= -2000)
m.c822 = Constraint(expr= - 2000*m.b41 + m.x137 - m.x689 >= -2000)
m.c823 = Constraint(expr= - 2000*m.b42 + m.x140 - m.x690 >= -2000)
m.c824 = Constraint(expr= - 2000*m.b43 + m.x143 - m.x691 >= -2000)
m.c825 = Constraint(expr= - 2000*m.b44 + m.x146 - m.x692 >= -2000)
m.c826 = Constraint(expr= - 2000*m.b45 + m.x149 - m.x693 >= -2000)
m.c827 = Constraint(expr= - 2000*m.b46 + m.x152 - m.x694 >= -2000)
m.c828 = Constraint(expr= - 2000*m.b47 + m.x155 - m.x686 >= -2000)
m.c829 = Constraint(expr= - 2000*m.b48 + m.x159 - m.x687 >= -2000)
m.c830 = Constraint(expr= - 2000*m.b49 + m.x163 - m.x688 >= -2000)
m.c831 = Constraint(expr= - 2000*m.b50 + m.x167 - m.x689 >= -2000)
m.c832 = Constraint(expr= - 2000*m.b51 + m.x171 - m.x690 >= -2000)
m.c833 = Constraint(expr= - 2000*m.b52 + m.x175 - m.x691 >= -2000)
m.c834 = Constraint(expr= - 2000*m.b53 + m.x179 - m.x692 >= -2000)
m.c835 = Constraint(expr= - 2000*m.b54 + m.x183 - m.x693 >= -2000)
m.c836 = Constraint(expr= - 2000*m.b55 + m.x187 - m.x694 >= -2000)
m.c837 = Constraint(expr= - 2000*m.b56 + m.x191 - m.x686 >= -2000)
m.c838 = Constraint(expr= - 2000*m.b57 + m.x194 - m.x687 >= -2000)
m.c839 = Constraint(expr= - 2000*m.b58 + m.x197 - m.x688 >= -2000)
m.c840 = Constraint(expr= - 2000*m.b59 + m.x200 - m.x689 >= -2000)
m.c841 = Constraint(expr= - 2000*m.b60 + m.x203 - m.x690 >= -2000)
m.c842 = Constraint(expr= - 2000*m.b61 + m.x206 - m.x691 >= -2000)
m.c843 = Constraint(expr= - 2000*m.b62 + m.x209 - m.x692 >= -2000)
m.c844 = Constraint(expr= - 2000*m.b63 + m.x212 - m.x693 >= -2000)
m.c845 = Constraint(expr= - 2000*m.b64 + m.x215 - m.x694 >= -2000)
m.c846 = Constraint(expr= - 2000*m.b65 + m.x218 - m.x695 >= -2000)
m.c847 = Constraint(expr= - 2000*m.b66 + m.x222 - m.x696 >= -2000)
m.c848 = Constraint(expr= - 2000*m.b67 + m.x226 - m.x697 >= -2000)
m.c849 = Constraint(expr= - 2000*m.b68 + m.x230 - m.x698 >= -2000)
m.c850 = Constraint(expr= - 2000*m.b69 + m.x234 - m.x699 >= -2000)
m.c851 = Constraint(expr= - 2000*m.b70 + m.x238 - m.x700 >= -2000)
m.c852 = Constraint(expr= - 2000*m.b71 + m.x242 - m.x701 >= -2000)
m.c853 = Constraint(expr= - 2000*m.b72 + m.x246 - m.x702 >= -2000)
m.c854 = Constraint(expr= - 2000*m.b73 + m.x250 - m.x703 >= -2000)
m.c855 = Constraint(expr= - 2000*m.b74 + m.x254 - m.x695 >= -2000)
m.c856 = Constraint(expr= - 2000*m.b75 + m.x257 - m.x696 >= -2000)
m.c857 = Constraint(expr= - 2000*m.b76 + m.x260 - m.x697 >= -2000)
m.c858 = Constraint(expr= - 2000*m.b77 + m.x263 - m.x698 >= -2000)
m.c859 = Constraint(expr= - 2000*m.b78 + m.x266 - m.x699 >= -2000)
m.c860 = Constraint(expr= - 2000*m.b79 + m.x269 - m.x700 >= -2000)
m.c861 = Constraint(expr= - 2000*m.b80 + m.x272 - m.x701 >= -2000)
m.c862 = Constraint(expr= - 2000*m.b81 + m.x275 - m.x702 >= -2000)
m.c863 = Constraint(expr= - 2000*m.b82 + m.x278 - m.x703 >= -2000)
m.c864 = Constraint(expr= 1049*m.b2 + m.x417 - m.x669 <= 1049)
m.c865 = Constraint(expr= 1049*m.b3 + m.x425 - m.x671 <= 1049)
m.c866 = Constraint(expr= 1049*m.b4 + m.x433 - m.x673 <= 1049)
m.c867 = Constraint(expr= 1049*m.b5 + m.x441 - m.x675 <= 1049)
m.c868 = Constraint(expr= 1049*m.b6 + m.x449 - m.x677 <= 1049)
m.c869 = Constraint(expr= 1049*m.b7 + m.x457 - m.x679 <= 1049)
m.c870 = Constraint(expr= 1049*m.b8 + m.x465 - m.x681 <= 1049)
m.c871 = Constraint(expr= 1049*m.b9 + m.x473 - m.x683 <= 1049)
m.c872 = Constraint(expr= 1049*m.b10 + m.x481 - m.x685 <= 1049)
m.c873 = Constraint(expr= 1049*m.b11 + m.x489 - m.x669 <= 1049)
m.c874 = Constraint(expr= 1049*m.b12 + m.x495 - m.x671 <= 1049)
m.c875 = Constraint(expr= 1049*m.b13 + m.x501 - m.x673 <= 1049)
m.c876 = Constraint(expr= 1049*m.b14 + m.x507 - m.x675 <= 1049)
m.c877 = Constraint(expr= 1049*m.b15 + m.x513 - m.x677 <= 1049)
m.c878 = Constraint(expr= 1049*m.b16 + m.x519 - m.x679 <= 1049)
m.c879 = Constraint(expr= 1049*m.b17 + m.x525 - m.x681 <= 1049)
m.c880 = Constraint(expr= 1049*m.b18 + m.x531 - m.x683 <= 1049)
m.c881 = Constraint(expr= 1049*m.b19 + m.x537 - m.x685 <= 1049)
m.c882 = Constraint(expr= 1049*m.b20 + m.x543 - m.x669 <= 1049)
m.c883 = Constraint(expr= 1049*m.b21 + m.x549 - m.x671 <= 1049)
m.c884 = Constraint(expr= 1049*m.b22 + m.x555 - m.x673 <= 1049)
m.c885 = Constraint(expr= 1049*m.b23 + m.x561 - m.x675 <= 1049)
m.c886 = Constraint(expr= 1049*m.b24 + m.x567 - m.x677 <= 1049)
m.c887 = Constraint(expr= 1049*m.b25 + m.x573 - m.x679 <= 1049)
m.c888 = Constraint(expr= 1049*m.b26 + m.x83 - m.x681 <= 1049)
m.c889 = Constraint(expr= 1049*m.b27 + m.x86 - m.x683 <= 1049)
m.c890 = Constraint(expr= 1049*m.b28 + m.x89 - m.x685 <= 1049)
m.c891 = Constraint(expr= 1065*m.b29 + m.x92 - m.x686 <= 1065)
m.c892 = Constraint(expr= 1065*m.b30 + m.x96 - m.x687 <= 1065)
m.c893 = Constraint(expr= 1065*m.b31 + m.x100 - m.x688 <= 1065)
m.c894 = Constraint(expr= 1065*m.b32 + m.x104 - m.x689 <= 1065)
m.c895 = Constraint(expr= 1065*m.b33 + m.x108 - m.x690 <= 1065)
m.c896 = Constraint(expr= 1065*m.b34 + m.x112 - m.x691 <= 1065)
m.c897 = Constraint(expr= 1065*m.b35 + m.x116 - m.x692 <= 1065)
m.c898 = Constraint(expr= 1065*m.b36 + m.x120 - m.x693 <= 1065)
m.c899 = Constraint(expr= 1065*m.b37 + m.x124 - m.x694 <= 1065)
m.c900 = Constraint(expr= 1065*m.b38 + m.x128 - m.x686 <= 1065)
m.c901 = Constraint(expr= 1065*m.b39 + m.x131 - m.x687 <= 1065)
m.c902 = Constraint(expr= 1065*m.b40 + m.x134 - m.x688 <= 1065)
m.c903 = Constraint(expr= 1065*m.b41 + m.x137 - m.x689 <= 1065)
m.c904 = Constraint(expr= 1065*m.b42 + m.x140 - m.x690 <= 1065)
m.c905 = Constraint(expr= 1065*m.b43 + m.x143 - m.x691 <= 1065)
m.c906 = Constraint(expr= 1065*m.b44 + m.x146 - m.x692 <= 1065)
m.c907 = Constraint(expr= 1065*m.b45 + m.x149 - m.x693 <= 1065)
m.c908 = Constraint(expr= 1065*m.b46 + m.x152 - m.x694 <= 1065)
m.c909 = Constraint(expr= 1065*m.b47 + m.x155 - m.x686 <= 1065)
m.c910 = Constraint(expr= 1065*m.b48 + m.x159 - m.x687 <= 1065)
m.c911 = Constraint(expr= 1065*m.b49 + m.x163 - m.x688 <= 1065)
m.c912 = Constraint(expr= 1065*m.b50 + m.x167 - m.x689 <= 1065)
m.c913 = Constraint(expr= 1065*m.b51 + m.x171 - m.x690 <= 1065)
m.c914 = Constraint(expr= 1065*m.b52 + m.x175 - m.x691 <= 1065)
m.c915 = Constraint(expr= 1065*m.b53 + m.x179 - m.x692 <= 1065)
m.c916 = Constraint(expr= 1065*m.b54 + m.x183 - m.x693 <= 1065)
m.c917 = Constraint(expr= 1065*m.b55 + m.x187 - m.x694 <= 1065)
m.c918 = Constraint(expr= 1065*m.b56 + m.x191 - m.x686 <= 1065)
m.c919 = Constraint(expr= 1065*m.b57 + m.x194 - m.x687 <= 1065)
m.c920 = Constraint(expr= 1065*m.b58 + m.x197 - m.x688 <= 1065)
m.c921 = Constraint(expr= 1065*m.b59 + m.x200 - m.x689 <= 1065)
m.c922 = Constraint(expr= 1065*m.b60 + m.x203 - m.x690 <= 1065)
m.c923 = Constraint(expr= 1065*m.b61 + m.x206 - m.x691 <= 1065)
m.c924 = Constraint(expr= 1065*m.b62 + m.x209 - m.x692 <= 1065)
m.c925 = Constraint(expr= 1065*m.b63 + m.x212 - m.x693 <= 1065)
m.c926 = Constraint(expr= 1065*m.b64 + m.x215 - m.x694 <= 1065)
m.c927 = Constraint(expr= 1095*m.b65 + m.x218 - m.x695 <= 1095)
m.c928 = Constraint(expr= 1095*m.b66 + m.x222 - m.x696 <= 1095)
m.c929 = Constraint(expr= 1095*m.b67 + m.x226 - m.x697 <= 1095)
m.c930 = Constraint(expr= 1095*m.b68 + m.x230 - m.x698 <= 1095)
m.c931 = Constraint(expr= 1095*m.b69 + m.x234 - m.x699 <= 1095)
m.c932 = Constraint(expr= 1095*m.b70 + m.x238 - m.x700 <= 1095)
m.c933 = Constraint(expr= 1095*m.b71 + m.x242 - m.x701 <= 1095)
m.c934 = Constraint(expr= 1095*m.b72 + m.x246 - m.x702 <= 1095)
m.c935 = Constraint(expr= 1095*m.b73 + m.x250 - m.x703 <= 1095)
m.c936 = Constraint(expr= 1095*m.b74 + m.x254 - m.x695 <= 1095)
m.c937 = Constraint(expr= 1095*m.b75 + m.x257 - m.x696 <= 1095)
m.c938 = Constraint(expr= 1095*m.b76 + m.x260 - m.x697 <= 1095)
m.c939 = Constraint(expr= 1095*m.b77 + m.x263 - m.x698 <= 1095)
m.c940 = Constraint(expr= 1095*m.b78 + m.x266 - m.x699 <= 1095)
m.c941 = Constraint(expr= 1095*m.b79 + m.x269 - m.x700 <= 1095)
m.c942 = Constraint(expr= 1095*m.b80 + m.x272 - m.x701 <= 1095)
m.c943 = Constraint(expr= 1095*m.b81 + m.x275 - m.x702 <= 1095)
m.c944 = Constraint(expr= 1095*m.b82 + m.x278 - m.x703 <= 1095)
m.c945 = Constraint(expr= - m.x587 + m.x624 >= 0)
m.c946 = Constraint(expr= - m.x588 + m.x627 >= 0)
m.c947 = Constraint(expr= - m.x589 + m.x630 >= 0)
m.c948 = Constraint(expr= - m.x590 + m.x633 >= 0)
m.c949 = Constraint(expr= - m.x591 + m.x636 >= 0)
m.c950 = Constraint(expr= - m.x592 + m.x639 >= 0)
m.c951 = Constraint(expr= - m.x593 + m.x642 >= 0)
m.c952 = Constraint(expr= - m.x594 + m.x645 >= 0)
m.c953 = Constraint(expr= - m.x595 + m.x648 >= 0)
m.c954 = Constraint(expr= m.x596 - m.x821 >= 0)
m.c955 = Constraint(expr= m.x597 - m.x822 >= 0)
m.c956 = Constraint(expr= m.x598 - m.x823 >= 0)
m.c957 = Constraint(expr= m.x599 - m.x824 >= 0)
m.c958 = Constraint(expr= m.x600 - m.x825 >= 0)
m.c959 = Constraint(expr= m.x601 - m.x826 >= 0)
m.c960 = Constraint(expr= m.x602 - m.x827 >= 0)
m.c961 = Constraint(expr= m.x603 - m.x828 >= 0)
m.c962 = Constraint(expr= m.x604 - m.x829 >= 0)
m.c963 = Constraint(expr= 13.94696158*m.x830 + 24.46510819*m.x831 - 7.28623839*m.x832 - 23.57687014*m.x833
- 0.309838295393634*m.x834 <= 0)
m.c964 = Constraint(expr= - 0.309838295393634*m.x835 + 13.94696158*m.x836 + 24.46510819*m.x837 - 7.28623839*m.x838
- 23.57687014*m.x839 <= 0)
m.c965 = Constraint(expr= - 0.309838295393634*m.x840 + 13.94696158*m.x841 + 24.46510819*m.x842 - 7.28623839*m.x843
- 23.57687014*m.x844 <= 0)
m.c966 = Constraint(expr= 13.94696158*m.x845 + 24.46510819*m.x846 - 7.28623839*m.x847 - 23.57687014*m.x848
- 0.309838295393634*m.x849 <= 0)
m.c967 = Constraint(expr= - 0.309838295393634*m.x850 + 13.94696158*m.x851 + 24.46510819*m.x852 - 7.28623839*m.x853
- 23.57687014*m.x854 <= 0)
m.c968 = Constraint(expr= - 0.309838295393634*m.x855 + 13.94696158*m.x856 + 24.46510819*m.x857 - 7.28623839*m.x858
- 23.57687014*m.x859 <= 0)
m.c969 = Constraint(expr= - 0.132557606221724*m.x860 + 13.94696158*m.x861 + 24.46510819*m.x862 - 7.28623839*m.x863
- 23.57687014*m.x864 <= 0)
m.c970 = Constraint(expr= - 0.132557606221724*m.x865 + 13.94696158*m.x866 + 24.46510819*m.x867 - 7.28623839*m.x868
- 23.57687014*m.x869 <= 0)
m.c971 = Constraint(expr= - 0.132557606221724*m.x870 - 23.57687014*m.x871 + 13.94696158*m.x872 + 24.46510819*m.x873
- 7.28623839*m.x874 <= 0)
m.c972 = Constraint(expr= 13.94696158*m.x875 + 24.46510819*m.x876 - 7.28623839*m.x877 - 23.57687014*m.x878
- 0.309838295393634*m.x879 <= 0)
m.c973 = Constraint(expr= - 0.309838295393634*m.x880 + 13.94696158*m.x881 + 24.46510819*m.x882 - 7.28623839*m.x883
- 23.57687014*m.x884 <= 0)
m.c974 = Constraint(expr= - 0.309838295393634*m.x885 + 13.94696158*m.x886 + 24.46510819*m.x887 - 7.28623839*m.x888
- 23.57687014*m.x889 <= 0)
m.c975 = Constraint(expr= - 0.309838295393634*m.x890 + 13.94696158*m.x891 + 24.46510819*m.x892 - 7.28623839*m.x893
- 23.57687014*m.x894 <= 0)
m.c976 = Constraint(expr= - 0.309838295393634*m.x895 + 13.94696158*m.x896 + 24.46510819*m.x897 - 7.28623839*m.x898
- 23.57687014*m.x899 <= 0)
m.c977 = Constraint(expr= - 0.309838295393634*m.x900 + 13.94696158*m.x901 + 24.46510819*m.x902 - 7.28623839*m.x903
- 23.57687014*m.x904 <= 0)
m.c978 = Constraint(expr= - 0.132557606221724*m.x905 + 13.94696158*m.x906 + 24.46510819*m.x907 - 7.28623839*m.x908
- 23.57687014*m.x909 <= 0)
m.c979 = Constraint(expr= 13.94696158*m.x910 + 24.46510819*m.x911 - 7.28623839*m.x912 - 23.57687014*m.x913
- 0.132557606221724*m.x914 <= 0)
m.c980 = Constraint(expr= - 0.132557606221724*m.x915 + 13.94696158*m.x916 + 24.46510819*m.x917 - 7.28623839*m.x918
- 23.57687014*m.x919 <= 0)
m.c981 = Constraint(expr= - 0.309838295393634*m.x920 + 13.94696158*m.x921 + 24.46510819*m.x922 - 7.28623839*m.x923
- 23.57687014*m.x924 <= 0)
m.c982 = Constraint(expr= - 0.309838295393634*m.x925 + 13.94696158*m.x926 + 24.46510819*m.x927 - 7.28623839*m.x928
- 23.57687014*m.x929 <= 0)
m.c983 = Constraint(expr= - 0.309838295393634*m.x930 + 24.46510819*m.x931 + 13.94696158*m.x932 - 7.28623839*m.x933
- 23.57687014*m.x934 <= 0)
m.c984 = Constraint(expr= - 0.309838295393634*m.x935 + 13.94696158*m.x936 + 24.46510819*m.x937 - 7.28623839*m.x938
- 23.57687014*m.x939 <= 0)
m.c985 = Constraint(expr= - 0.309838295393634*m.x940 + 13.94696158*m.x941 + 24.46510819*m.x942 - 7.28623839*m.x943
- 23.57687014*m.x944 <= 0)
m.c986 = Constraint(expr= - 0.309838295393634*m.x945 + 13.94696158*m.x946 + 24.46510819*m.x947 - 7.28623839*m.x948
- 23.57687014*m.x949 <= 0)
m.c987 = Constraint(expr= - 7.28623839*m.x950 - 23.57687014*m.x951 - 0.132557606221724*m.x952 + 13.94696158*m.x953
+ 24.46510819*m.x954 <= 0)
m.c988 = Constraint(expr= 13.94696158*m.x955 + 24.46510819*m.x956 - 7.28623839*m.x957 - 23.57687014*m.x958
- 0.132557606221724*m.x959 <= 0)
m.c989 = Constraint(expr= - 0.132557606221724*m.x960 + 13.94696158*m.x961 + 24.46510819*m.x962 - 7.28623839*m.x963
- 23.57687014*m.x964 <= 0)
m.c990 = Constraint(expr= - 0.309838295393634*m.x965 + 29.29404529*m.x966 - 108.39408287*m.x967 + 442.21990639*m.x968
- 454.58448169*m.x969 <= 0)
m.c991 = Constraint(expr= - 0.309838295393634*m.x970 + 29.29404529*m.x971 - 108.39408287*m.x972 + 442.21990639*m.x973
- 454.58448169*m.x974 <= 0)
m.c992 = Constraint(expr= 29.29404529*m.x975 + 442.21990639*m.x976 - 454.58448169*m.x977 - 0.309838295393634*m.x978
- 108.39408287*m.x979 <= 0)
m.c993 = Constraint(expr= - 0.309838295393634*m.x980 + 29.29404529*m.x981 - 108.39408287*m.x982 + 442.21990639*m.x983
- 454.58448169*m.x984 <= 0)
m.c994 = Constraint(expr= - 0.309838295393634*m.x985 + 29.29404529*m.x986 - 108.39408287*m.x987 + 442.21990639*m.x988
- 454.58448169*m.x989 <= 0)
m.c995 = Constraint(expr= - 0.309838295393634*m.x990 + 29.29404529*m.x991 - 108.39408287*m.x992 + 442.21990639*m.x993
- 454.58448169*m.x994 <= 0)
m.c996 = Constraint(expr= - 0.132557606221724*m.x995 + 29.29404529*m.x996 - 108.39408287*m.x997 + 442.21990639*m.x998
- 454.58448169*m.x999 <= 0)
m.c997 = Constraint(expr= - 0.132557606221724*m.x1000 + 29.29404529*m.x1001 - 108.39408287*m.x1002
+ 442.21990639*m.x1003 - 454.58448169*m.x1004 <= 0)
m.c998 = Constraint(expr= - 0.132557606221724*m.x1005 + 29.29404529*m.x1006 - 108.39408287*m.x1007
+ 442.21990639*m.x1008 - 454.58448169*m.x1009 <= 0)
m.c999 = Constraint(expr= - 0.309838295393634*m.x1010 + 29.29404529*m.x1011 - 108.39408287*m.x1012
+ 442.21990639*m.x1013 - 454.58448169*m.x1014 <= 0)
m.c1000 = Constraint(expr= - 0.309838295393634*m.x1015 + 29.29404529*m.x1016 - 108.39408287*m.x1017
+ 442.21990639*m.x1018 - 454.58448169*m.x1019 <= 0)
m.c1001 = Constraint(expr= - 0.309838295393634*m.x1020 + 29.29404529*m.x1021 - 108.39408287*m.x1022
+ 442.21990639*m.x1023 - 454.58448169*m.x1024 <= 0)
m.c1002 = Constraint(expr= - 0.309838295393634*m.x1025 + 29.29404529*m.x1026 - 108.39408287*m.x1027
+ 442.21990639*m.x1028 - 454.58448169*m.x1029 <= 0)
m.c1003 = Constraint(expr= - 0.309838295393634*m.x1030 + 29.29404529*m.x1031 - 108.39408287*m.x1032
+ 442.21990639*m.x1033 - 454.58448169*m.x1034 <= 0)
m.c1004 = Constraint(expr= - 0.309838295393634*m.x1035 + 29.29404529*m.x1036 - 108.39408287*m.x1037
+ 442.21990639*m.x1038 - 454.58448169*m.x1039 <= 0)
m.c1005 = Constraint(expr= - 0.132557606221724*m.x1040 + 29.29404529*m.x1041 - 108.39408287*m.x1042
+ 442.21990639*m.x1043 - 454.58448169*m.x1044 <= 0)
m.c1006 = Constraint(expr= - 0.132557606221724*m.x1045 + 29.29404529*m.x1046 - 108.39408287*m.x1047
+ 442.21990639*m.x1048 - 454.58448169*m.x1049 <= 0)
m.c1007 = Constraint(expr= - 0.132557606221724*m.x1050 + 29.29404529*m.x1051 - 108.39408287*m.x1052
+ 442.21990639*m.x1053 - 454.58448169*m.x1054 <= 0)
m.c1008 = Constraint(expr= - 0.309838295393634*m.x1055 + 25.92674585*m.x1056 + 18.13482123*m.x1057 + 22.12766012*m.x1058
- 42.68950769*m.x1059 <= 0)
m.c1009 = Constraint(expr= - 0.309838295393634*m.x1060 + 25.92674585*m.x1061 + 18.13482123*m.x1062 + 22.12766012*m.x1063
- 42.68950769*m.x1064 <= 0)
m.c1010 = Constraint(expr= - 0.309838295393634*m.x1065 + 25.92674585*m.x1066 + 18.13482123*m.x1067 + 22.12766012*m.x1068
- 42.68950769*m.x1069 <= 0)
m.c1011 = Constraint(expr= - 0.309838295393634*m.x1070 + 25.92674585*m.x1071 + 18.13482123*m.x1072 + 22.12766012*m.x1073
- 42.68950769*m.x1074 <= 0)
m.c1012 = Constraint(expr= - 0.309838295393634*m.x1075 + 25.92674585*m.x1076 + 18.13482123*m.x1077 + 22.12766012*m.x1078
- 42.68950769*m.x1079 <= 0)
m.c1013 = Constraint(expr= - 0.309838295393634*m.x1080 + 25.92674585*m.x1081 + 18.13482123*m.x1082 + 22.12766012*m.x1083
- 42.68950769*m.x1084 <= 0)
m.c1014 = Constraint(expr= - 0.132557606221724*m.x1085 + 25.92674585*m.x1086 + 18.13482123*m.x1087 + 22.12766012*m.x1088
- 42.68950769*m.x1089 <= 0)
m.c1015 = Constraint(expr= - 0.132557606221724*m.x1090 + 25.92674585*m.x1091 + 18.13482123*m.x1092 + 22.12766012*m.x1093
- 42.68950769*m.x1094 <= 0)
m.c1016 = Constraint(expr= - 0.132557606221724*m.x1095 + 25.92674585*m.x1096 + 18.13482123*m.x1097 + 22.12766012*m.x1098
- 42.68950769*m.x1099 <= 0)
m.c1017 = Constraint(expr= - 0.309838295393634*m.x1100 + 25.92674585*m.x1101 + 18.13482123*m.x1102 + 22.12766012*m.x1103
- 42.68950769*m.x1104 <= 0)
m.c1018 = Constraint(expr= - 0.309838295393634*m.x1105 + 25.92674585*m.x1106 + 18.13482123*m.x1107 + 22.12766012*m.x1108
- 42.68950769*m.x1109 <= 0)
m.c1019 = Constraint(expr= - 0.309838295393634*m.x1110 + 25.92674585*m.x1111 + 18.13482123*m.x1112 + 22.12766012*m.x1113
- 42.68950769*m.x1114 <= 0)
m.c1020 = Constraint(expr= - 0.309838295393634*m.x1115 + 25.92674585*m.x1116 + 18.13482123*m.x1117 + 22.12766012*m.x1118
- 42.68950769*m.x1119 <= 0)
m.c1021 = Constraint(expr= - 0.309838295393634*m.x1120 + 25.92674585*m.x1121 + 18.13482123*m.x1122 + 22.12766012*m.x1123
- 42.68950769*m.x1124 <= 0)
m.c1022 = Constraint(expr= - 0.309838295393634*m.x1125 + 25.92674585*m.x1126 + 18.13482123*m.x1127 + 22.12766012*m.x1128
- 42.68950769*m.x1129 <= 0)
m.c1023 = Constraint(expr= - 0.132557606221724*m.x1130 + 25.92674585*m.x1131 + 18.13482123*m.x1132 + 22.12766012*m.x1133
- 42.68950769*m.x1134 <= 0)
m.c1024 = Constraint(expr= - 0.132557606221724*m.x1135 + 25.92674585*m.x1136 + 18.13482123*m.x1137 + 22.12766012*m.x1138
- 42.68950769*m.x1139 <= 0)
m.c1025 = Constraint(expr= - 0.132557606221724*m.x1140 + 25.92674585*m.x1141 + 18.13482123*m.x1142 + 22.12766012*m.x1143
- 42.68950769*m.x1144 <= 0)
m.c1026 = Constraint(expr= 17.4714791*m.x1145 - 39.98407808*m.x1146 + 134.55943082*m.x1147 - 135.88441782*m.x1148
- 0.309838295393634*m.x1149 <= 0)
m.c1027 = Constraint(expr= - 0.309838295393634*m.x1150 + 17.4714791*m.x1151 - 39.98407808*m.x1152 + 134.55943082*m.x1153
- 135.88441782*m.x1154 <= 0)
m.c1028 = Constraint(expr= - 0.309838295393634*m.x1155 + 17.4714791*m.x1156 - 39.98407808*m.x1157 + 134.55943082*m.x1158
- 135.88441782*m.x1159 <= 0)
m.c1029 = Constraint(expr= - 0.309838295393634*m.x1160 + 17.4714791*m.x1161 - 39.98407808*m.x1162 + 134.55943082*m.x1163
- 135.88441782*m.x1164 <= 0)
m.c1030 = Constraint(expr= - 0.309838295393634*m.x1165 + 17.4714791*m.x1166 - 39.98407808*m.x1167 + 134.55943082*m.x1168
- 135.88441782*m.x1169 <= 0)
m.c1031 = Constraint(expr= - 0.309838295393634*m.x1170 + 17.4714791*m.x1171 - 39.98407808*m.x1172 + 134.55943082*m.x1173
- 135.88441782*m.x1174 <= 0)
m.c1032 = Constraint(expr= - 0.132557606221724*m.x1175 + 17.4714791*m.x1176 - 39.98407808*m.x1177 + 134.55943082*m.x1178
- 135.88441782*m.x1179 <= 0)
m.c1033 = Constraint(expr= - 0.132557606221724*m.x1180 + 17.4714791*m.x1181 - 39.98407808*m.x1182 + 134.55943082*m.x1183
- 135.88441782*m.x1184 <= 0)
m.c1034 = Constraint(expr= - 0.132557606221724*m.x1185 + 17.4714791*m.x1186 - 39.98407808*m.x1187 + 134.55943082*m.x1188
- 135.88441782*m.x1189 <= 0)
m.c1035 = Constraint(expr= 17.4714791*m.x1190 - 39.98407808*m.x1191 + 134.55943082*m.x1192 - 135.88441782*m.x1193
- 0.309838295393634*m.x1194 <= 0)
m.c1036 = Constraint(expr= - 0.309838295393634*m.x1195 + 17.4714791*m.x1196 - 39.98407808*m.x1197 + 134.55943082*m.x1198
- 135.88441782*m.x1199 <= 0)
m.c1037 = Constraint(expr= - 0.309838295393634*m.x1200 + 17.4714791*m.x1201 - 39.98407808*m.x1202 + 134.55943082*m.x1203
- 135.88441782*m.x1204 <= 0)
m.c1038 = Constraint(expr= - 0.309838295393634*m.x1205 + 17.4714791*m.x1206 - 39.98407808*m.x1207 + 134.55943082*m.x1208
- 135.88441782*m.x1209 <= 0)
m.c1039 = Constraint(expr= - 0.309838295393634*m.x1210 + 17.4714791*m.x1211 - 39.98407808*m.x1212 + 134.55943082*m.x1213
- 135.88441782*m.x1214 <= 0)
m.c1040 = Constraint(expr= - 0.309838295393634*m.x1215 + 17.4714791*m.x1216 - 39.98407808*m.x1217 + 134.55943082*m.x1218
- 135.88441782*m.x1219 <= 0)
m.c1041 = Constraint(expr= - 0.132557606221724*m.x1220 + 17.4714791*m.x1221 - 39.98407808*m.x1222 + 134.55943082*m.x1223
- 135.88441782*m.x1224 <= 0)
m.c1042 = Constraint(expr= - 0.132557606221724*m.x1225 + 17.4714791*m.x1226 - 39.98407808*m.x1227 + 134.55943082*m.x1228
- 135.88441782*m.x1229 <= 0)
m.c1043 = Constraint(expr= - 0.132557606221724*m.x1230 + 17.4714791*m.x1231 - 39.98407808*m.x1232 + 134.55943082*m.x1233
- 135.88441782*m.x1234 <= 0)
m.c1044 = Constraint(expr=m.x290**2 - m.x1235 == 0)
m.c1045 = Constraint(expr= m.x606 - 5*m.x1235 == 0)
m.c1046 = Constraint(expr=m.x292**2 - m.x1236 == 0)
m.c1047 = Constraint(expr= m.x608 - 5*m.x1236 == 0)
m.c1048 = Constraint(expr=m.x294**2 - m.x1237 == 0)
m.c1049 = Constraint(expr= m.x610 - 5*m.x1237 == 0)
m.c1050 = Constraint(expr=m.x296**2 - m.x1238 == 0)
m.c1051 = Constraint(expr= m.x612 - 5*m.x1238 == 0)
m.c1052 = Constraint(expr=m.x298**2 - m.x1239 == 0)
m.c1053 = Constraint(expr= m.x614 - 5*m.x1239 == 0)
m.c1054 = Constraint(expr=m.x300**2 - m.x1240 == 0)
m.c1055 = Constraint(expr= m.x616 - 5*m.x1240 == 0)
m.c1056 = Constraint(expr=m.x302**2 - m.x1241 == 0)
m.c1057 = Constraint(expr= m.x618 - 5*m.x1241 == 0)
m.c1058 = Constraint(expr=m.x304**2 - m.x1242 == 0)
m.c1059 = Constraint(expr= m.x620 - 5*m.x1242 == 0)
m.c1060 = Constraint(expr=m.x306**2 - m.x1243 == 0)
m.c1061 = Constraint(expr= m.x622 - 5*m.x1243 == 0)
m.c1062 = Constraint(expr=m.x308**2 - m.x1244 == 0)
m.c1063 = Constraint(expr= m.x625 - 4*m.x1244 == 0)
m.c1064 = Constraint(expr=m.x310**2 - m.x1245 == 0)
m.c1065 = Constraint(expr= m.x628 - 4*m.x1245 == 0)
m.c1066 = Constraint(expr=m.x312**2 - m.x1246 == 0)
m.c1067 = Constraint(expr= m.x631 - 4*m.x1246 == 0)
m.c1068 = Constraint(expr=m.x314**2 - m.x1247 == 0)
m.c1069 = Constraint(expr= m.x634 - 4*m.x1247 == 0)
m.c1070 = Constraint(expr=m.x316**2 - m.x1248 == 0)
m.c1071 = Constraint(expr= m.x637 - 4*m.x1248 == 0)
m.c1072 = Constraint(expr=m.x318**2 - m.x1249 == 0)
m.c1073 = Constraint(expr= m.x640 - 4*m.x1249 == 0)
m.c1074 = Constraint(expr=m.x320**2 - m.x1250 == 0)
m.c1075 = Constraint(expr= m.x643 - 4*m.x1250 == 0)
m.c1076 = Constraint(expr=m.x322**2 - m.x1251 == 0)
m.c1077 = Constraint(expr= m.x646 - 4*m.x1251 == 0)
m.c1078 = Constraint(expr=m.x324**2 - m.x1252 == 0)
m.c1079 = Constraint(expr= m.x649 - 4*m.x1252 == 0)
m.c1080 = Constraint(expr=m.x335**2 - m.x1253 == 0)
m.c1081 = Constraint(expr= m.x651 - 5*m.x1253 == 0)
m.c1082 = Constraint(expr=m.x337**2 - m.x1254 == 0)
m.c1083 = Constraint(expr= m.x653 - 5*m.x1254 == 0)
m.c1084 = Constraint(expr=m.x339**2 - m.x1255 == 0)
m.c1085 = Constraint(expr= m.x655 - 5*m.x1255 == 0)
m.c1086 = Constraint(expr=m.x341**2 - m.x1256 == 0)
m.c1087 = Constraint(expr= m.x657 - 5*m.x1256 == 0)
m.c1088 = Constraint(expr=m.x343**2 - m.x1257 == 0)
m.c1089 = Constraint(expr= m.x659 - 5*m.x1257 == 0)
m.c1090 = Constraint(expr=m.x345**2 - m.x1258 == 0)
m.c1091 = Constraint(expr= m.x661 - 5*m.x1258 == 0)
m.c1092 = Constraint(expr=m.x347**2 - m.x1259 == 0)
m.c1093 = Constraint(expr= m.x663 - 5*m.x1259 == 0)
m.c1094 = Constraint(expr=m.x349**2 - m.x1260 == 0)
m.c1095 = Constraint(expr= m.x665 - 5*m.x1260 == 0)
m.c1096 = Constraint(expr=m.x351**2 - m.x1261 == 0)
m.c1097 = Constraint(expr= m.x667 - 5*m.x1261 == 0)
m.c1098 = Constraint(expr=m.x416**2 - m.x1262 == 0)
m.c1099 = Constraint(expr= m.x423 - m.x1262 == 0)
m.c1100 = Constraint(expr=m.x416**3 - m.x1263 == 0)
m.c1101 = Constraint(expr= m.x833 - m.x1263 == 0)
m.c1102 = Constraint(expr=m.x418**2 - m.x1264 == 0)
m.c1103 = Constraint(expr= m.x427 - m.x1264 == 0)
m.c1104 = Constraint(expr=m.x418**3 - m.x1265 == 0)
m.c1105 = Constraint(expr= m.x839 - m.x1265 == 0)
m.c1106 = Constraint(expr=m.x420**2 - m.x1266 == 0)
m.c1107 = Constraint(expr= m.x439 - m.x1266 == 0)
m.c1108 = Constraint(expr=m.x420**3 - m.x1267 == 0)
m.c1109 = Constraint(expr= m.x844 - m.x1267 == 0)
m.c1110 = Constraint(expr=m.x422**2 - m.x1268 == 0)
m.c1111 = Constraint(expr= m.x447 - m.x1268 == 0)
m.c1112 = Constraint(expr=m.x422**3 - m.x1269 == 0)
m.c1113 = Constraint(expr= m.x848 - m.x1269 == 0)
m.c1114 = Constraint(expr=m.x424**2 - m.x1270 == 0)
m.c1115 = Constraint(expr= m.x455 - m.x1270 == 0)
m.c1116 = Constraint(expr=m.x424**3 - m.x1271 == 0)
m.c1117 = Constraint(expr= m.x854 - m.x1271 == 0)
m.c1118 = Constraint(expr=m.x426**2 - m.x1272 == 0)
m.c1119 = Constraint(expr= m.x459 - m.x1272 == 0)
m.c1120 = Constraint(expr=m.x426**3 - m.x1273 == 0)
m.c1121 = Constraint(expr= m.x859 - m.x1273 == 0)
m.c1122 = Constraint(expr=m.x428**2 - m.x1274 == 0)
m.c1123 = Constraint(expr= m.x469 - m.x1274 == 0)
m.c1124 = Constraint(expr=m.x428**3 - m.x1275 == 0)
m.c1125 = Constraint(expr= m.x864 - m.x1275 == 0)
m.c1126 = Constraint(expr=m.x430**2 - m.x1276 == 0)
m.c1127 = Constraint(expr= m.x479 - m.x1276 == 0)
m.c1128 = Constraint(expr=m.x430**3 - m.x1277 == 0)
m.c1129 = Constraint(expr= m.x869 - m.x1277 == 0)
m.c1130 = Constraint(expr=m.x432**2 - m.x1278 == 0)
m.c1131 = Constraint(expr= m.x483 - m.x1278 == 0)
m.c1132 = Constraint(expr=m.x432**3 - m.x1279 == 0)
m.c1133 = Constraint(expr= m.x871 - m.x1279 == 0)
m.c1134 = Constraint(expr=m.x434**2 - m.x1280 == 0)
m.c1135 = Constraint(expr= m.x493 - m.x1280 == 0)
m.c1136 = Constraint(expr=m.x434**3 - m.x1281 == 0)
m.c1137 = Constraint(expr= m.x878 - m.x1281 == 0)
m.c1138 = Constraint(expr=m.x436**2 - m.x1282 == 0)
m.c1139 = Constraint(expr= m.x499 - m.x1282 == 0)
m.c1140 = Constraint(expr=m.x436**3 - m.x1283 == 0)
m.c1141 = Constraint(expr= m.x884 - m.x1283 == 0)
m.c1142 = Constraint(expr=m.x438**2 - m.x1284 == 0)
m.c1143 = Constraint(expr= m.x505 - m.x1284 == 0)
m.c1144 = Constraint(expr=m.x438**3 - m.x1285 == 0)
m.c1145 = Constraint(expr= m.x889 - m.x1285 == 0)
m.c1146 = Constraint(expr=m.x440**2 - m.x1286 == 0)
m.c1147 = Constraint(expr= m.x511 - m.x1286 == 0)
m.c1148 = Constraint(expr=m.x440**3 - m.x1287 == 0)
m.c1149 = Constraint(expr= m.x894 - m.x1287 == 0)
m.c1150 = Constraint(expr=m.x442**2 - m.x1288 == 0)
m.c1151 = Constraint(expr= m.x515 - m.x1288 == 0)
m.c1152 = Constraint(expr=m.x442**3 - m.x1289 == 0)
m.c1153 = Constraint(expr= m.x899 - m.x1289 == 0)
m.c1154 = Constraint(expr=m.x444**2 - m.x1290 == 0)
m.c1155 = Constraint(expr= m.x523 - m.x1290 == 0)
m.c1156 = Constraint(expr=m.x444**3 - m.x1291 == 0)
m.c1157 = Constraint(expr= m.x904 - m.x1291 == 0)
m.c1158 = Constraint(expr=m.x446**2 - m.x1292 == 0)
m.c1159 = Constraint(expr= m.x529 - m.x1292 == 0)
m.c1160 = Constraint(expr=m.x446**3 - m.x1293 == 0)
m.c1161 = Constraint(expr= m.x909 - m.x1293 == 0)
m.c1162 = Constraint(expr=m.x448**2 - m.x1294 == 0)
m.c1163 = Constraint(expr= m.x533 - m.x1294 == 0)
m.c1164 = Constraint(expr=m.x448**3 - m.x1295 == 0)
m.c1165 = Constraint(expr= m.x913 - m.x1295 == 0)
m.c1166 = Constraint(expr=m.x450**2 - m.x1296 == 0)
m.c1167 = Constraint(expr= m.x541 - m.x1296 == 0)
m.c1168 = Constraint(expr=m.x450**3 - m.x1297 == 0)
m.c1169 = Constraint(expr= m.x919 - m.x1297 == 0)
m.c1170 = Constraint(expr=m.x452**2 - m.x1298 == 0)
m.c1171 = Constraint(expr= m.x547 - m.x1298 == 0)
m.c1172 = Constraint(expr=m.x452**3 - m.x1299 == 0)
m.c1173 = Constraint(expr= m.x924 - m.x1299 == 0)
m.c1174 = Constraint(expr=m.x454**2 - m.x1300 == 0)
m.c1175 = Constraint(expr= m.x551 - m.x1300 == 0)
m.c1176 = Constraint(expr=m.x454**3 - m.x1301 == 0)
m.c1177 = Constraint(expr= m.x929 - m.x1301 == 0)
m.c1178 = Constraint(expr=m.x456**2 - m.x1302 == 0)
m.c1179 = Constraint(expr= m.x557 - m.x1302 == 0)
m.c1180 = Constraint(expr=m.x456**3 - m.x1303 == 0)
m.c1181 = Constraint(expr= m.x934 - m.x1303 == 0)
m.c1182 = Constraint(expr=m.x458**2 - m.x1304 == 0)
m.c1183 = Constraint(expr= m.x565 - m.x1304 == 0)
m.c1184 = Constraint(expr=m.x458**3 - m.x1305 == 0)
m.c1185 = Constraint(expr= m.x939 - m.x1305 == 0)
m.c1186 = Constraint(expr=m.x460**2 - m.x1306 == 0)
m.c1187 = Constraint(expr= m.x569 - m.x1306 == 0)
m.c1188 = Constraint(expr=m.x460**3 - m.x1307 == 0)
m.c1189 = Constraint(expr= m.x944 - m.x1307 == 0)
m.c1190 = Constraint(expr=m.x462**2 - m.x1308 == 0)
m.c1191 = Constraint(expr= m.x575 - m.x1308 == 0)
m.c1192 = Constraint(expr=m.x462**3 - m.x1309 == 0)
m.c1193 = Constraint(expr= m.x949 - m.x1309 == 0)
m.c1194 = Constraint(expr=m.x464**2 - m.x1310 == 0)
m.c1195 = Constraint(expr= m.x85 - m.x1310 == 0)
m.c1196 = Constraint(expr=m.x464**3 - m.x1311 == 0)
m.c1197 = Constraint(expr= m.x951 - m.x1311 == 0)
m.c1198 = Constraint(expr=m.x466**2 - m.x1312 == 0)
m.c1199 = Constraint(expr= m.x87 - m.x1312 == 0)
m.c1200 = Constraint(expr=m.x466**3 - m.x1313 == 0)
m.c1201 = Constraint(expr= m.x958 - m.x1313 == 0)
m.c1202 = Constraint(expr=m.x468**2 - m.x1314 == 0)
m.c1203 = Constraint(expr= m.x91 - m.x1314 == 0)
m.c1204 = Constraint(expr=m.x468**3 - m.x1315 == 0)
m.c1205 = Constraint(expr= m.x964 - m.x1315 == 0)
m.c1206 = Constraint(expr=m.x470**2 - m.x1316 == 0)
m.c1207 = Constraint(expr= m.x95 - m.x1316 == 0)
m.c1208 = Constraint(expr=m.x470**3 - m.x1317 == 0)
m.c1209 = Constraint(expr= m.x969 - m.x1317 == 0)
m.c1210 = Constraint(expr=m.x472**2 - m.x1318 == 0)
m.c1211 = Constraint(expr= m.x97 - m.x1318 == 0)
m.c1212 = Constraint(expr=m.x472**3 - m.x1319 == 0)
m.c1213 = Constraint(expr= m.x974 - m.x1319 == 0)
m.c1214 = Constraint(expr=m.x474**2 - m.x1320 == 0)
m.c1215 = Constraint(expr= m.x103 - m.x1320 == 0)
m.c1216 = Constraint(expr=m.x474**3 - m.x1321 == 0)
m.c1217 = Constraint(expr= m.x977 - m.x1321 == 0)
m.c1218 = Constraint(expr=m.x476**2 - m.x1322 == 0)
m.c1219 = Constraint(expr= m.x107 - m.x1322 == 0)
m.c1220 = Constraint(expr=m.x476**3 - m.x1323 == 0)
m.c1221 = Constraint(expr= m.x984 - m.x1323 == 0)
m.c1222 = Constraint(expr=m.x478**2 - m.x1324 == 0)
m.c1223 = Constraint(expr= m.x110 - m.x1324 == 0)
m.c1224 = Constraint(expr=m.x478**3 - m.x1325 == 0)
m.c1225 = Constraint(expr= m.x989 - m.x1325 == 0)
m.c1226 = Constraint(expr=m.x480**2 - m.x1326 == 0)
m.c1227 = Constraint(expr= m.x113 - m.x1326 == 0)
m.c1228 = Constraint(expr=m.x480**3 - m.x1327 == 0)
m.c1229 = Constraint(expr= m.x994 - m.x1327 == 0)
m.c1230 = Constraint(expr=m.x482**2 - m.x1328 == 0)
m.c1231 = Constraint(expr= m.x119 - m.x1328 == 0)
m.c1232 = Constraint(expr=m.x482**3 - m.x1329 == 0)
m.c1233 = Constraint(expr= m.x999 - m.x1329 == 0)
m.c1234 = Constraint(expr=m.x484**2 - m.x1330 == 0)
m.c1235 = Constraint(expr= m.x123 - m.x1330 == 0)
m.c1236 = Constraint(expr=m.x484**3 - m.x1331 == 0)
m.c1237 = Constraint(expr= m.x1004 - m.x1331 == 0)
m.c1238 = Constraint(expr=m.x486**2 - m.x1332 == 0)
m.c1239 = Constraint(expr= m.x125 - m.x1332 == 0)
m.c1240 = Constraint(expr=m.x486**3 - m.x1333 == 0)
m.c1241 = Constraint(expr= m.x1009 - m.x1333 == 0)
m.c1242 = Constraint(expr=m.x488**2 - m.x1334 == 0)
m.c1243 = Constraint(expr= m.x130 - m.x1334 == 0)
m.c1244 = Constraint(expr=m.x488**3 - m.x1335 == 0)
m.c1245 = Constraint(expr= m.x1014 - m.x1335 == 0)
m.c1246 = Constraint(expr=m.x490**2 - m.x1336 == 0)
m.c1247 = Constraint(expr= m.x132 - m.x1336 == 0)
m.c1248 = Constraint(expr=m.x490**3 - m.x1337 == 0)
m.c1249 = Constraint(expr= m.x1019 - m.x1337 == 0)
m.c1250 = Constraint(expr=m.x492**2 - m.x1338 == 0)
m.c1251 = Constraint(expr= m.x136 - m.x1338 == 0)
m.c1252 = Constraint(expr=m.x492**3 - m.x1339 == 0)
m.c1253 = Constraint(expr= m.x1024 - m.x1339 == 0)
m.c1254 = Constraint(expr=m.x494**2 - m.x1340 == 0)
m.c1255 = Constraint(expr= m.x139 - m.x1340 == 0)
m.c1256 = Constraint(expr=m.x494**3 - m.x1341 == 0)
m.c1257 = Constraint(expr= m.x1029 - m.x1341 == 0)
m.c1258 = Constraint(expr=m.x496**2 - m.x1342 == 0)
m.c1259 = Constraint(expr= m.x141 - m.x1342 == 0)
m.c1260 = Constraint(expr=m.x496**3 - m.x1343 == 0)
m.c1261 = Constraint(expr= m.x1034 - m.x1343 == 0)
m.c1262 = Constraint(expr=m.x498**2 - m.x1344 == 0)
m.c1263 = Constraint(expr= m.x144 - m.x1344 == 0)
m.c1264 = Constraint(expr=m.x498**3 - m.x1345 == 0)
m.c1265 = Constraint(expr= m.x1039 - m.x1345 == 0)
m.c1266 = Constraint(expr=m.x500**2 - m.x1346 == 0)
m.c1267 = Constraint(expr= m.x148 - m.x1346 == 0)
m.c1268 = Constraint(expr=m.x500**3 - m.x1347 == 0)
m.c1269 = Constraint(expr= m.x1044 - m.x1347 == 0)
m.c1270 = Constraint(expr=m.x502**2 - m.x1348 == 0)
m.c1271 = Constraint(expr= m.x151 - m.x1348 == 0)
m.c1272 = Constraint(expr=m.x502**3 - m.x1349 == 0)
m.c1273 = Constraint(expr= m.x1049 - m.x1349 == 0)
m.c1274 = Constraint(expr=m.x504**2 - m.x1350 == 0)
m.c1275 = Constraint(expr= m.x153 - m.x1350 == 0)
m.c1276 = Constraint(expr=m.x504**3 - m.x1351 == 0)
m.c1277 = Constraint(expr= m.x1054 - m.x1351 == 0)
m.c1278 = Constraint(expr=m.x506**2 - m.x1352 == 0)
m.c1279 = Constraint(expr= m.x158 - m.x1352 == 0)
m.c1280 = Constraint(expr=m.x506**3 - m.x1353 == 0)
m.c1281 = Constraint(expr= m.x1059 - m.x1353 == 0)
m.c1282 = Constraint(expr=m.x508**2 - m.x1354 == 0)
m.c1283 = Constraint(expr= m.x161 - m.x1354 == 0)
m.c1284 = Constraint(expr=m.x508**3 - m.x1355 == 0)
m.c1285 = Constraint(expr= m.x1064 - m.x1355 == 0)
m.c1286 = Constraint(expr=m.x510**2 - m.x1356 == 0)
m.c1287 = Constraint(expr= m.x164 - m.x1356 == 0)
m.c1288 = Constraint(expr=m.x510**3 - m.x1357 == 0)
m.c1289 = Constraint(expr= m.x1069 - m.x1357 == 0)
m.c1290 = Constraint(expr=m.x512**2 - m.x1358 == 0)
m.c1291 = Constraint(expr= m.x168 - m.x1358 == 0)
m.c1292 = Constraint(expr=m.x512**3 - m.x1359 == 0)
m.c1293 = Constraint(expr= m.x1074 - m.x1359 == 0)
m.c1294 = Constraint(expr=m.x514**2 - m.x1360 == 0)
m.c1295 = Constraint(expr= m.x174 - m.x1360 == 0)
m.c1296 = Constraint(expr=m.x514**3 - m.x1361 == 0)
m.c1297 = Constraint(expr= m.x1079 - m.x1361 == 0)
m.c1298 = Constraint(expr=m.x516**2 - m.x1362 == 0)
m.c1299 = Constraint(expr= m.x176 - m.x1362 == 0)
m.c1300 = Constraint(expr=m.x516**3 - m.x1363 == 0)
m.c1301 = Constraint(expr= m.x1084 - m.x1363 == 0)
m.c1302 = Constraint(expr=m.x518**2 - m.x1364 == 0)
m.c1303 = Constraint(expr= m.x182 - m.x1364 == 0)
m.c1304 = Constraint(expr=m.x518**3 - m.x1365 == 0)
m.c1305 = Constraint(expr= m.x1089 - m.x1365 == 0)
m.c1306 = Constraint(expr=m.x520**2 - m.x1366 == 0)
m.c1307 = Constraint(expr= m.x186 - m.x1366 == 0)
m.c1308 = Constraint(expr=m.x520**3 - m.x1367 == 0)
m.c1309 = Constraint(expr= m.x1094 - m.x1367 == 0)
m.c1310 = Constraint(expr=m.x522**2 - m.x1368 == 0)
m.c1311 = Constraint(expr= m.x190 - m.x1368 == 0)
m.c1312 = Constraint(expr=m.x522**3 - m.x1369 == 0)
m.c1313 = Constraint(expr= m.x1099 - m.x1369 == 0)
m.c1314 = Constraint(expr=m.x524**2 - m.x1370 == 0)
m.c1315 = Constraint(expr= m.x192 - m.x1370 == 0)
m.c1316 = Constraint(expr=m.x524**3 - m.x1371 == 0)
m.c1317 = Constraint(expr= m.x1104 - m.x1371 == 0)
m.c1318 = Constraint(expr=m.x526**2 - m.x1372 == 0)
m.c1319 = Constraint(expr= m.x195 - m.x1372 == 0)
m.c1320 = Constraint(expr=m.x526**3 - m.x1373 == 0)
m.c1321 = Constraint(expr= m.x1109 - m.x1373 == 0)
m.c1322 = Constraint(expr=m.x528**2 - m.x1374 == 0)
m.c1323 = Constraint(expr= m.x199 - m.x1374 == 0)
m.c1324 = Constraint(expr=m.x528**3 - m.x1375 == 0)
m.c1325 = Constraint(expr= m.x1114 - m.x1375 == 0)
m.c1326 = Constraint(expr=m.x530**2 - m.x1376 == 0)
m.c1327 = Constraint(expr= m.x202 - m.x1376 == 0)
m.c1328 = Constraint(expr=m.x530**3 - m.x1377 == 0)
m.c1329 = Constraint(expr= m.x1119 - m.x1377 == 0)
m.c1330 = Constraint(expr=m.x532**2 - m.x1378 == 0)
m.c1331 = Constraint(expr= m.x205 - m.x1378 == 0)
m.c1332 = Constraint(expr=m.x532**3 - m.x1379 == 0)
m.c1333 = Constraint(expr= m.x1124 - m.x1379 == 0)
m.c1334 = Constraint(expr=m.x534**2 - m.x1380 == 0)
m.c1335 = Constraint(expr= m.x207 - m.x1380 == 0)
m.c1336 = Constraint(expr=m.x534**3 - m.x1381 == 0)
m.c1337 = Constraint(expr= m.x1129 - m.x1381 == 0)
m.c1338 = Constraint(expr=m.x536**2 - m.x1382 == 0)
m.c1339 = Constraint(expr= m.x210 - m.x1382 == 0)
m.c1340 = Constraint(expr=m.x536**3 - m.x1383 == 0)
m.c1341 = Constraint(expr= m.x1134 - m.x1383 == 0)
m.c1342 = Constraint(expr=m.x538**2 - m.x1384 == 0)
m.c1343 = Constraint(expr= m.x213 - m.x1384 == 0)
m.c1344 = Constraint(expr=m.x538**3 - m.x1385 == 0)
m.c1345 = Constraint(expr= m.x1139 - m.x1385 == 0)
m.c1346 = Constraint(expr=m.x540**2 - m.x1386 == 0)
m.c1347 = Constraint(expr= m.x217 - m.x1386 == 0)
m.c1348 = Constraint(expr=m.x540**3 - m.x1387 == 0)
m.c1349 = Constraint(expr= m.x1144 - m.x1387 == 0)
m.c1350 = Constraint(expr=m.x542**2 - m.x1388 == 0)
m.c1351 = Constraint(expr= m.x221 - m.x1388 == 0)
m.c1352 = Constraint(expr=m.x542**3 - m.x1389 == 0)
m.c1353 = Constraint(expr= m.x1148 - m.x1389 == 0)
m.c1354 = Constraint(expr=m.x544**2 - m.x1390 == 0)
m.c1355 = Constraint(expr= m.x223 - m.x1390 == 0)
m.c1356 = Constraint(expr=m.x544**3 - m.x1391 == 0)
m.c1357 = Constraint(expr= m.x1154 - m.x1391 == 0)
m.c1358 = Constraint(expr=m.x546**2 - m.x1392 == 0)
m.c1359 = Constraint(expr= m.x229 - m.x1392 == 0)
m.c1360 = Constraint(expr=m.x546**3 - m.x1393 == 0)
m.c1361 = Constraint(expr= m.x1159 - m.x1393 == 0)
m.c1362 = Constraint(expr=m.x548**2 - m.x1394 == 0)
m.c1363 = Constraint(expr= m.x231 - m.x1394 == 0)
m.c1364 = Constraint(expr=m.x548**3 - m.x1395 == 0)
m.c1365 = Constraint(expr= m.x1164 - m.x1395 == 0)
m.c1366 = Constraint(expr=m.x550**2 - m.x1396 == 0)
m.c1367 = Constraint(expr= m.x235 - m.x1396 == 0)
m.c1368 = Constraint(expr=m.x550**3 - m.x1397 == 0)
m.c1369 = Constraint(expr= m.x1169 - m.x1397 == 0)
m.c1370 = Constraint(expr=m.x552**2 - m.x1398 == 0)
m.c1371 = Constraint(expr= m.x239 - m.x1398 == 0)
m.c1372 = Constraint(expr=m.x552**3 - m.x1399 == 0)
m.c1373 = Constraint(expr= m.x1174 - m.x1399 == 0)
m.c1374 = Constraint(expr=m.x554**2 - m.x1400 == 0)
m.c1375 = Constraint(expr= m.x245 - m.x1400 == 0)
m.c1376 = Constraint(expr=m.x554**3 - m.x1401 == 0)
m.c1377 = Constraint(expr= m.x1179 - m.x1401 == 0)
m.c1378 = Constraint(expr=m.x556**2 - m.x1402 == 0)
m.c1379 = Constraint(expr= m.x247 - m.x1402 == 0)
m.c1380 = Constraint(expr=m.x556**3 - m.x1403 == 0)
m.c1381 = Constraint(expr= m.x1184 - m.x1403 == 0)
m.c1382 = Constraint(expr=m.x558**2 - m.x1404 == 0)
m.c1383 = Constraint(expr= m.x253 - m.x1404 == 0)
m.c1384 = Constraint(expr=m.x558**3 - m.x1405 == 0)
m.c1385 = Constraint(expr= m.x1189 - m.x1405 == 0)
m.c1386 = Constraint(expr=m.x560**2 - m.x1406 == 0)
m.c1387 = Constraint(expr= m.x255 - m.x1406 == 0)
m.c1388 = Constraint(expr=m.x560**3 - m.x1407 == 0)
m.c1389 = Constraint(expr= m.x1193 - m.x1407 == 0)
m.c1390 = Constraint(expr=m.x562**2 - m.x1408 == 0)
m.c1391 = Constraint(expr= m.x258 - m.x1408 == 0)
m.c1392 = Constraint(expr=m.x562**3 - m.x1409 == 0)
m.c1393 = Constraint(expr= m.x1199 - m.x1409 == 0)
m.c1394 = Constraint(expr=m.x564**2 - m.x1410 == 0)
m.c1395 = Constraint(expr= m.x262 - m.x1410 == 0)
m.c1396 = Constraint(expr=m.x564**3 - m.x1411 == 0)
m.c1397 = Constraint(expr= m.x1204 - m.x1411 == 0)
m.c1398 = Constraint(expr=m.x566**2 - m.x1412 == 0)
m.c1399 = Constraint(expr= m.x265 - m.x1412 == 0)
m.c1400 = Constraint(expr=m.x566**3 - m.x1413 == 0)
m.c1401 = Constraint(expr= m.x1209 - m.x1413 == 0)
m.c1402 = Constraint(expr=m.x568**2 - m.x1414 == 0)
m.c1403 = Constraint(expr= m.x267 - m.x1414 == 0)
m.c1404 = Constraint(expr=m.x568**3 - m.x1415 == 0)
m.c1405 = Constraint(expr= m.x1214 - m.x1415 == 0)
m.c1406 = Constraint(expr=m.x570**2 - m.x1416 == 0)
m.c1407 = Constraint(expr= m.x270 - m.x1416 == 0)
m.c1408 = Constraint(expr=m.x570**3 - m.x1417 == 0)
m.c1409 = Constraint(expr= m.x1219 - m.x1417 == 0)
m.c1410 = Constraint(expr=m.x572**2 - m.x1418 == 0)
m.c1411 = Constraint(expr= m.x274 - m.x1418 == 0)
m.c1412 = Constraint(expr=m.x572**3 - m.x1419 == 0)
m.c1413 = Constraint(expr= m.x1224 - m.x1419 == 0)
m.c1414 = Constraint(expr=m.x574**2 - m.x1420 == 0)
m.c1415 = Constraint(expr= m.x277 - m.x1420 == 0)
m.c1416 = Constraint(expr=m.x574**3 - m.x1421 == 0)
m.c1417 = Constraint(expr= m.x1229 - m.x1421 == 0)
m.c1418 = Constraint(expr=m.x576**2 - m.x1422 == 0)
m.c1419 = Constraint(expr= m.x279 - m.x1422 == 0)
m.c1420 = Constraint(expr=m.x576**3 - m.x1423 == 0)
m.c1421 = Constraint(expr= m.x1234 - m.x1423 == 0)
m.c1422 = Constraint(expr=m.x416*m.x785 - m.x419 == 0)
m.c1423 = Constraint(expr=m.x785*m.x1262 - m.x832 == 0)
m.c1424 = Constraint(expr=m.x434*m.x785 - m.x491 == 0)
m.c1425 = Constraint(expr=m.x785*m.x1280 - m.x877 == 0)
m.c1426 = Constraint(expr=m.x452*m.x785 - m.x545 == 0)
m.c1427 = Constraint(expr=m.x785*m.x1298 - m.x923 == 0)
m.c1428 = Constraint(expr=m.x785**2 - m.x1424 == 0)
m.c1429 = Constraint(expr= m.x421 - m.x1424 == 0)
m.c1430 = Constraint(expr=m.x416*m.x1424 - m.x831 == 0)
m.c1431 = Constraint(expr=m.x434*m.x1424 - m.x876 == 0)
m.c1432 = Constraint(expr=m.x452*m.x1424 - m.x922 == 0)
m.c1433 = Constraint(expr=m.x785**3 - m.x1425 == 0)
m.c1434 = Constraint(expr=m.b2*m.x1425 - m.x830 == 0)
m.c1435 = Constraint(expr=m.b11*m.x1425 - m.x875 == 0)
m.c1436 = Constraint(expr=m.b20*m.x1425 - m.x921 == 0)
m.c1437 = Constraint(expr=m.x418*m.x786 - m.x431 == 0)
m.c1438 = Constraint(expr=m.x786*m.x1264 - m.x838 == 0)
m.c1439 = Constraint(expr=m.x436*m.x786 - m.x497 == 0)
m.c1440 = Constraint(expr=m.x786*m.x1282 - m.x883 == 0)
m.c1441 = Constraint(expr=m.x454*m.x786 - m.x553 == 0)
m.c1442 = Constraint(expr=m.x786*m.x1300 - m.x928 == 0)
m.c1443 = Constraint(expr=m.x786**2 - m.x1426 == 0)
m.c1444 = Constraint(expr= m.x429 - m.x1426 == 0)
m.c1445 = Constraint(expr=m.x418*m.x1426 - m.x837 == 0)
m.c1446 = Constraint(expr=m.x436*m.x1426 - m.x882 == 0)
m.c1447 = Constraint(expr=m.x454*m.x1426 - m.x927 == 0)
m.c1448 = Constraint(expr=m.x786**3 - m.x1427 == 0)
m.c1449 = Constraint(expr=m.b3*m.x1427 - m.x836 == 0)
m.c1450 = Constraint(expr=m.b12*m.x1427 - m.x881 == 0)
m.c1451 = Constraint(expr=m.b21*m.x1427 - m.x926 == 0)
m.c1452 = Constraint(expr=m.x420*m.x787 - m.x437 == 0)
m.c1453 = Constraint(expr=m.x787*m.x1266 - m.x843 == 0)
m.c1454 = Constraint(expr=m.x438*m.x787 - m.x503 == 0)
m.c1455 = Constraint(expr=m.x787*m.x1284 - m.x888 == 0)
m.c1456 = Constraint(expr=m.x456*m.x787 - m.x559 == 0)
m.c1457 = Constraint(expr=m.x787*m.x1302 - m.x933 == 0)
m.c1458 = Constraint(expr=m.x787**2 - m.x1428 == 0)
m.c1459 = Constraint(expr= m.x435 - m.x1428 == 0)
m.c1460 = Constraint(expr=m.x420*m.x1428 - m.x842 == 0)
m.c1461 = Constraint(expr=m.x438*m.x1428 - m.x887 == 0)
m.c1462 = Constraint(expr=m.x456*m.x1428 - m.x931 == 0)
m.c1463 = Constraint(expr=m.x787**3 - m.x1429 == 0)
m.c1464 = Constraint(expr=m.b4*m.x1429 - m.x841 == 0)
m.c1465 = Constraint(expr=m.b13*m.x1429 - m.x886 == 0)
m.c1466 = Constraint(expr=m.b22*m.x1429 - m.x932 == 0)
m.c1467 = Constraint(expr=m.x422*m.x788 - m.x445 == 0)
m.c1468 = Constraint(expr=m.x788*m.x1268 - m.x847 == 0)
m.c1469 = Constraint(expr=m.x440*m.x788 - m.x509 == 0)
m.c1470 = Constraint(expr=m.x788*m.x1286 - m.x893 == 0)
m.c1471 = Constraint(expr=m.x458*m.x788 - m.x563 == 0)
m.c1472 = Constraint(expr=m.x788*m.x1304 - m.x938 == 0)
m.c1473 = Constraint(expr=m.x788**2 - m.x1430 == 0)
m.c1474 = Constraint(expr= m.x443 - m.x1430 == 0)
m.c1475 = Constraint(expr=m.x422*m.x1430 - m.x846 == 0)
m.c1476 = Constraint(expr=m.x440*m.x1430 - m.x892 == 0)
m.c1477 = Constraint(expr=m.x458*m.x1430 - m.x937 == 0)
m.c1478 = Constraint(expr=m.x788**3 - m.x1431 == 0)
m.c1479 = Constraint(expr=m.b5*m.x1431 - m.x845 == 0)
m.c1480 = Constraint(expr=m.b14*m.x1431 - m.x891 == 0)
m.c1481 = Constraint(expr=m.b23*m.x1431 - m.x936 == 0)
m.c1482 = Constraint(expr=m.x424*m.x789 - m.x453 == 0)
m.c1483 = Constraint(expr=m.x789*m.x1270 - m.x853 == 0)
m.c1484 = Constraint(expr=m.x442*m.x789 - m.x517 == 0)
m.c1485 = Constraint(expr=m.x789*m.x1288 - m.x898 == 0)
m.c1486 = Constraint(expr=m.x460*m.x789 - m.x571 == 0)
m.c1487 = Constraint(expr=m.x789*m.x1306 - m.x943 == 0)
m.c1488 = Constraint(expr=m.x789**2 - m.x1432 == 0)
m.c1489 = Constraint(expr= m.x451 - m.x1432 == 0)
m.c1490 = Constraint(expr=m.x424*m.x1432 - m.x852 == 0)
m.c1491 = Constraint(expr=m.x442*m.x1432 - m.x897 == 0)
m.c1492 = Constraint(expr=m.x460*m.x1432 - m.x942 == 0)
m.c1493 = Constraint(expr=m.x789**3 - m.x1433 == 0)
m.c1494 = Constraint(expr=m.b6*m.x1433 - m.x851 == 0)
m.c1495 = Constraint(expr=m.b15*m.x1433 - m.x896 == 0)
m.c1496 = Constraint(expr=m.b24*m.x1433 - m.x941 == 0)
m.c1497 = Constraint(expr=m.x426*m.x790 - m.x463 == 0)
m.c1498 = Constraint(expr=m.x790*m.x1272 - m.x858 == 0)
m.c1499 = Constraint(expr=m.x444*m.x790 - m.x521 == 0)
m.c1500 = Constraint(expr=m.x790*m.x1290 - m.x903 == 0)
m.c1501 = Constraint(expr=m.x462*m.x790 - m.x577 == 0)
m.c1502 = Constraint(expr=m.x790*m.x1308 - m.x948 == 0)
m.c1503 = Constraint(expr=m.x790**2 - m.x1434 == 0)
m.c1504 = Constraint(expr= m.x461 - m.x1434 == 0)
m.c1505 = Constraint(expr=m.x426*m.x1434 - m.x857 == 0)
m.c1506 = Constraint(expr=m.x444*m.x1434 - m.x902 == 0)
m.c1507 = Constraint(expr=m.x462*m.x1434 - m.x947 == 0)
m.c1508 = Constraint(expr=m.x790**3 - m.x1435 == 0)
m.c1509 = Constraint(expr=m.b7*m.x1435 - m.x856 == 0)
m.c1510 = Constraint(expr=m.b16*m.x1435 - m.x901 == 0)
m.c1511 = Constraint(expr=m.b25*m.x1435 - m.x946 == 0)
m.c1512 = Constraint(expr=m.x428*m.x791 - m.x471 == 0)
m.c1513 = Constraint(expr=m.x791*m.x1274 - m.x863 == 0)
m.c1514 = Constraint(expr=m.x446*m.x791 - m.x527 == 0)
m.c1515 = Constraint(expr=m.x791*m.x1292 - m.x908 == 0)
m.c1516 = Constraint(expr=m.x464*m.x791 - m.x84 == 0)
m.c1517 = Constraint(expr=m.x791*m.x1310 - m.x950 == 0)
m.c1518 = Constraint(expr=m.x791**2 - m.x1436 == 0)
m.c1519 = Constraint(expr= m.x467 - m.x1436 == 0)
m.c1520 = Constraint(expr=m.x428*m.x1436 - m.x862 == 0)
m.c1521 = Constraint(expr=m.x446*m.x1436 - m.x907 == 0)
m.c1522 = Constraint(expr=m.x464*m.x1436 - m.x954 == 0)
m.c1523 = Constraint(expr=m.x791**3 - m.x1437 == 0)
m.c1524 = Constraint(expr=m.b8*m.x1437 - m.x861 == 0)
m.c1525 = Constraint(expr=m.b17*m.x1437 - m.x906 == 0)
m.c1526 = Constraint(expr=m.b26*m.x1437 - m.x953 == 0)
m.c1527 = Constraint(expr=m.x430*m.x792 - m.x477 == 0)
m.c1528 = Constraint(expr=m.x792*m.x1276 - m.x868 == 0)
m.c1529 = Constraint(expr=m.x448*m.x792 - m.x535 == 0)
m.c1530 = Constraint(expr=m.x792*m.x1294 - m.x912 == 0)
m.c1531 = Constraint(expr=m.x466*m.x792 - m.x88 == 0)
m.c1532 = Constraint(expr=m.x792*m.x1312 - m.x957 == 0)
m.c1533 = Constraint(expr=m.x792**2 - m.x1438 == 0)
m.c1534 = Constraint(expr= m.x475 - m.x1438 == 0)
m.c1535 = Constraint(expr=m.x430*m.x1438 - m.x867 == 0)
m.c1536 = Constraint(expr=m.x448*m.x1438 - m.x911 == 0)
m.c1537 = Constraint(expr=m.x466*m.x1438 - m.x956 == 0)
m.c1538 = Constraint(expr=m.x792**3 - m.x1439 == 0)
m.c1539 = Constraint(expr=m.b9*m.x1439 - m.x866 == 0)
m.c1540 = Constraint(expr=m.b18*m.x1439 - m.x910 == 0)
m.c1541 = Constraint(expr=m.b27*m.x1439 - m.x955 == 0)
m.c1542 = Constraint(expr=m.x432*m.x793 - m.x487 == 0)
m.c1543 = Constraint(expr=m.x793*m.x1278 - m.x874 == 0)
m.c1544 = Constraint(expr=m.x450*m.x793 - m.x539 == 0)
m.c1545 = Constraint(expr=m.x793*m.x1296 - m.x918 == 0)
m.c1546 = Constraint(expr=m.x468*m.x793 - m.x90 == 0)
m.c1547 = Constraint(expr=m.x793*m.x1314 - m.x963 == 0)
m.c1548 = Constraint(expr=m.x793**2 - m.x1440 == 0)
m.c1549 = Constraint(expr= m.x485 - m.x1440 == 0)
m.c1550 = Constraint(expr=m.x432*m.x1440 - m.x873 == 0)
m.c1551 = Constraint(expr=m.x450*m.x1440 - m.x917 == 0)
m.c1552 = Constraint(expr=m.x468*m.x1440 - m.x962 == 0)
m.c1553 = Constraint(expr=m.x793**3 - m.x1441 == 0)
m.c1554 = Constraint(expr=m.b10*m.x1441 - m.x872 == 0)
m.c1555 = Constraint(expr=m.b19*m.x1441 - m.x916 == 0)
m.c1556 = Constraint(expr=m.b28*m.x1441 - m.x961 == 0)
m.c1557 = Constraint(expr=m.x470*m.x794 - m.x93 == 0)
m.c1558 = Constraint(expr=m.x794*m.x1316 - m.x968 == 0)
m.c1559 = Constraint(expr=m.x488*m.x794 - m.x129 == 0)
m.c1560 = Constraint(expr=m.x794*m.x1334 - m.x1013 == 0)
m.c1561 = Constraint(expr=m.x794**2 - m.x1442 == 0)
m.c1562 = Constraint(expr= m.x94 - m.x1442 == 0)
m.c1563 = Constraint(expr=m.x470*m.x1442 - m.x967 == 0)
m.c1564 = Constraint(expr=m.x488*m.x1442 - m.x1012 == 0)
m.c1565 = Constraint(expr=m.x794**3 - m.x1443 == 0)
m.c1566 = Constraint(expr=m.b29*m.x1443 - m.x966 == 0)
m.c1567 = Constraint(expr=m.b38*m.x1443 - m.x1011 == 0)
m.c1568 = Constraint(expr=m.x472*m.x795 - m.x99 == 0)
m.c1569 = Constraint(expr=m.x795*m.x1318 - m.x973 == 0)
m.c1570 = Constraint(expr=m.x490*m.x795 - m.x133 == 0)
m.c1571 = Constraint(expr=m.x795*m.x1336 - m.x1018 == 0)
m.c1572 = Constraint(expr=m.x795**2 - m.x1444 == 0)
m.c1573 = Constraint(expr= m.x98 - m.x1444 == 0)
m.c1574 = Constraint(expr=m.x472*m.x1444 - m.x972 == 0)
m.c1575 = Constraint(expr=m.x490*m.x1444 - m.x1017 == 0)
m.c1576 = Constraint(expr=m.x795**3 - m.x1445 == 0)
m.c1577 = Constraint(expr=m.b30*m.x1445 - m.x971 == 0)
m.c1578 = Constraint(expr=m.b39*m.x1445 - m.x1016 == 0)
m.c1579 = Constraint(expr=m.x474*m.x796 - m.x101 == 0)
m.c1580 = Constraint(expr=m.x796*m.x1320 - m.x976 == 0)
m.c1581 = Constraint(expr=m.x492*m.x796 - m.x135 == 0)
m.c1582 = Constraint(expr=m.x796*m.x1338 - m.x1023 == 0)
m.c1583 = Constraint(expr=m.x796**2 - m.x1446 == 0)
m.c1584 = Constraint(expr= m.x102 - m.x1446 == 0)
m.c1585 = Constraint(expr=m.x474*m.x1446 - m.x979 == 0)
m.c1586 = Constraint(expr=m.x492*m.x1446 - m.x1022 == 0)
m.c1587 = Constraint(expr=m.x796**3 - m.x1447 == 0)
m.c1588 = Constraint(expr=m.b31*m.x1447 - m.x975 == 0)
m.c1589 = Constraint(expr=m.b40*m.x1447 - m.x1021 == 0)
m.c1590 = Constraint(expr=m.x476*m.x797 - m.x105 == 0)
m.c1591 = Constraint(expr=m.x797*m.x1322 - m.x983 == 0)
m.c1592 = Constraint(expr=m.x494*m.x797 - m.x138 == 0)
m.c1593 = Constraint(expr=m.x797*m.x1340 - m.x1028 == 0)
m.c1594 = Constraint(expr=m.x797**2 - m.x1448 == 0)
m.c1595 = Constraint(expr= m.x106 - m.x1448 == 0)
m.c1596 = Constraint(expr=m.x476*m.x1448 - m.x982 == 0)
m.c1597 = Constraint(expr=m.x494*m.x1448 - m.x1027 == 0)
m.c1598 = Constraint(expr=m.x797**3 - m.x1449 == 0)
m.c1599 = Constraint(expr=m.b32*m.x1449 - m.x981 == 0)
m.c1600 = Constraint(expr=m.b41*m.x1449 - m.x1026 == 0)
m.c1601 = Constraint(expr=m.x478*m.x798 - m.x111 == 0)
m.c1602 = Constraint(expr=m.x798*m.x1324 - m.x988 == 0)
m.c1603 = Constraint(expr=m.x496*m.x798 - m.x142 == 0)
m.c1604 = Constraint(expr=m.x798*m.x1342 - m.x1033 == 0)
m.c1605 = Constraint(expr=m.x798**2 - m.x1450 == 0)
m.c1606 = Constraint(expr= m.x109 - m.x1450 == 0)
m.c1607 = Constraint(expr=m.x478*m.x1450 - m.x987 == 0)
m.c1608 = Constraint(expr=m.x496*m.x1450 - m.x1032 == 0)
m.c1609 = Constraint(expr=m.x798**3 - m.x1451 == 0)
m.c1610 = Constraint(expr=m.b33*m.x1451 - m.x986 == 0)
m.c1611 = Constraint(expr=m.b42*m.x1451 - m.x1031 == 0)
m.c1612 = Constraint(expr=m.x480*m.x799 - m.x115 == 0)
m.c1613 = Constraint(expr=m.x799*m.x1326 - m.x993 == 0)
m.c1614 = Constraint(expr=m.x498*m.x799 - m.x145 == 0)
m.c1615 = Constraint(expr=m.x799*m.x1344 - m.x1038 == 0)
m.c1616 = Constraint(expr=m.x799**2 - m.x1452 == 0)
m.c1617 = Constraint(expr= m.x114 - m.x1452 == 0)
m.c1618 = Constraint(expr=m.x480*m.x1452 - m.x992 == 0)
m.c1619 = Constraint(expr=m.x498*m.x1452 - m.x1037 == 0)
m.c1620 = Constraint(expr=m.x799**3 - m.x1453 == 0)
m.c1621 = Constraint(expr=m.b34*m.x1453 - m.x991 == 0)
m.c1622 = Constraint(expr=m.b43*m.x1453 - m.x1036 == 0)
m.c1623 = Constraint(expr=m.x482*m.x800 - m.x117 == 0)
m.c1624 = Constraint(expr=m.x800*m.x1328 - m.x998 == 0)
m.c1625 = Constraint(expr=m.x500*m.x800 - m.x147 == 0)
m.c1626 = Constraint(expr=m.x800*m.x1346 - m.x1043 == 0)
m.c1627 = Constraint(expr=m.x800**2 - m.x1454 == 0)
m.c1628 = Constraint(expr= m.x118 - m.x1454 == 0)
m.c1629 = Constraint(expr=m.x482*m.x1454 - m.x997 == 0)
m.c1630 = Constraint(expr=m.x500*m.x1454 - m.x1042 == 0)
m.c1631 = Constraint(expr=m.x800**3 - m.x1455 == 0)
m.c1632 = Constraint(expr=m.b35*m.x1455 - m.x996 == 0)
m.c1633 = Constraint(expr=m.b44*m.x1455 - m.x1041 == 0)
m.c1634 = Constraint(expr=m.x484*m.x801 - m.x121 == 0)
m.c1635 = Constraint(expr=m.x801*m.x1330 - m.x1003 == 0)
m.c1636 = Constraint(expr=m.x502*m.x801 - m.x150 == 0)
m.c1637 = Constraint(expr=m.x801*m.x1348 - m.x1048 == 0)
m.c1638 = Constraint(expr=m.x801**2 - m.x1456 == 0)
m.c1639 = Constraint(expr= m.x122 - m.x1456 == 0)
m.c1640 = Constraint(expr=m.x484*m.x1456 - m.x1002 == 0)
m.c1641 = Constraint(expr=m.x502*m.x1456 - m.x1047 == 0)
m.c1642 = Constraint(expr=m.x801**3 - m.x1457 == 0)
m.c1643 = Constraint(expr=m.b36*m.x1457 - m.x1001 == 0)
m.c1644 = Constraint(expr=m.b45*m.x1457 - m.x1046 == 0)
m.c1645 = Constraint(expr=m.x486*m.x802 - m.x126 == 0)
m.c1646 = Constraint(expr=m.x802*m.x1332 - m.x1008 == 0)
m.c1647 = Constraint(expr=m.x504*m.x802 - m.x154 == 0)
m.c1648 = Constraint(expr=m.x802*m.x1350 - m.x1053 == 0)
m.c1649 = Constraint(expr=m.x802**2 - m.x1458 == 0)
m.c1650 = Constraint(expr= m.x127 - m.x1458 == 0)
m.c1651 = Constraint(expr=m.x486*m.x1458 - m.x1007 == 0)
m.c1652 = Constraint(expr=m.x504*m.x1458 - m.x1052 == 0)
m.c1653 = Constraint(expr=m.x802**3 - m.x1459 == 0)
m.c1654 = Constraint(expr=m.b37*m.x1459 - m.x1006 == 0)
m.c1655 = Constraint(expr=m.b46*m.x1459 - m.x1051 == 0)
m.c1656 = Constraint(expr=m.x506*m.x803 - m.x157 == 0)
m.c1657 = Constraint(expr=m.x803*m.x1352 - m.x1058 == 0)
m.c1658 = Constraint(expr=m.x524*m.x803 - m.x193 == 0)
m.c1659 = Constraint(expr=m.x803*m.x1370 - m.x1103 == 0)
m.c1660 = Constraint(expr=m.x803**2 - m.x1460 == 0)
m.c1661 = Constraint(expr= m.x156 - m.x1460 == 0)
m.c1662 = Constraint(expr=m.x506*m.x1460 - m.x1057 == 0)
m.c1663 = Constraint(expr=m.x524*m.x1460 - m.x1102 == 0)
m.c1664 = Constraint(expr=m.x803**3 - m.x1461 == 0)
m.c1665 = Constraint(expr=m.b47*m.x1461 - m.x1056 == 0)
m.c1666 = Constraint(expr=m.b56*m.x1461 - m.x1101 == 0)
m.c1667 = Constraint(expr=m.x508*m.x804 - m.x160 == 0)
m.c1668 = Constraint(expr=m.x804*m.x1354 - m.x1063 == 0)
m.c1669 = Constraint(expr=m.x526*m.x804 - m.x196 == 0)
m.c1670 = Constraint(expr=m.x804*m.x1372 - m.x1108 == 0)
m.c1671 = Constraint(expr=m.x804**2 - m.x1462 == 0)
m.c1672 = Constraint(expr= m.x162 - m.x1462 == 0)
m.c1673 = Constraint(expr=m.x508*m.x1462 - m.x1062 == 0)
m.c1674 = Constraint(expr=m.x526*m.x1462 - m.x1107 == 0)
m.c1675 = Constraint(expr=m.x804**3 - m.x1463 == 0)
m.c1676 = Constraint(expr=m.b48*m.x1463 - m.x1061 == 0)
m.c1677 = Constraint(expr=m.b57*m.x1463 - m.x1106 == 0)
m.c1678 = Constraint(expr=m.x510*m.x805 - m.x166 == 0)
m.c1679 = Constraint(expr=m.x805*m.x1356 - m.x1068 == 0)
m.c1680 = Constraint(expr=m.x528*m.x805 - m.x198 == 0)
m.c1681 = Constraint(expr=m.x805*m.x1374 - m.x1113 == 0)
m.c1682 = Constraint(expr=m.x805**2 - m.x1464 == 0)
m.c1683 = Constraint(expr= m.x165 - m.x1464 == 0)
m.c1684 = Constraint(expr=m.x510*m.x1464 - m.x1067 == 0)
m.c1685 = Constraint(expr=m.x528*m.x1464 - m.x1112 == 0)
m.c1686 = Constraint(expr=m.x805**3 - m.x1465 == 0)
m.c1687 = Constraint(expr=m.b49*m.x1465 - m.x1066 == 0)
m.c1688 = Constraint(expr=m.b58*m.x1465 - m.x1111 == 0)
m.c1689 = Constraint(expr=m.x512*m.x806 - m.x170 == 0)
m.c1690 = Constraint(expr=m.x806*m.x1358 - m.x1073 == 0)
m.c1691 = Constraint(expr=m.x530*m.x806 - m.x201 == 0)
m.c1692 = Constraint(expr=m.x806*m.x1376 - m.x1118 == 0)
m.c1693 = Constraint(expr=m.x806**2 - m.x1466 == 0)
m.c1694 = Constraint(expr= m.x169 - m.x1466 == 0)
m.c1695 = Constraint(expr=m.x512*m.x1466 - m.x1072 == 0)
m.c1696 = Constraint(expr=m.x530*m.x1466 - m.x1117 == 0)
m.c1697 = Constraint(expr=m.x806**3 - m.x1467 == 0)
m.c1698 = Constraint(expr=m.b50*m.x1467 - m.x1071 == 0)
m.c1699 = Constraint(expr=m.b59*m.x1467 - m.x1116 == 0)
m.c1700 = Constraint(expr=m.x514*m.x807 - m.x173 == 0)
m.c1701 = Constraint(expr=m.x807*m.x1360 - m.x1078 == 0)
m.c1702 = Constraint(expr=m.x532*m.x807 - m.x204 == 0)
m.c1703 = Constraint(expr=m.x807*m.x1378 - m.x1123 == 0)
m.c1704 = Constraint(expr=m.x807**2 - m.x1468 == 0)
m.c1705 = Constraint(expr= m.x172 - m.x1468 == 0)
m.c1706 = Constraint(expr=m.x514*m.x1468 - m.x1077 == 0)
m.c1707 = Constraint(expr=m.x532*m.x1468 - m.x1122 == 0)
m.c1708 = Constraint(expr=m.x807**3 - m.x1469 == 0)
m.c1709 = Constraint(expr=m.b51*m.x1469 - m.x1076 == 0)
m.c1710 = Constraint(expr=m.b60*m.x1469 - m.x1121 == 0)
m.c1711 = Constraint(expr=m.x516*m.x808 - m.x178 == 0)
m.c1712 = Constraint(expr=m.x808*m.x1362 - m.x1083 == 0)
m.c1713 = Constraint(expr=m.x534*m.x808 - m.x208 == 0)
m.c1714 = Constraint(expr=m.x808*m.x1380 - m.x1128 == 0)
m.c1715 = Constraint(expr=m.x808**2 - m.x1470 == 0)
m.c1716 = Constraint(expr= m.x177 - m.x1470 == 0)
m.c1717 = Constraint(expr=m.x516*m.x1470 - m.x1082 == 0)
m.c1718 = Constraint(expr=m.x534*m.x1470 - m.x1127 == 0)
m.c1719 = Constraint(expr=m.x808**3 - m.x1471 == 0)
m.c1720 = Constraint(expr=m.b52*m.x1471 - m.x1081 == 0)
m.c1721 = Constraint(expr=m.b61*m.x1471 - m.x1126 == 0)
m.c1722 = Constraint(expr=m.x518*m.x809 - m.x181 == 0)
m.c1723 = Constraint(expr=m.x809*m.x1364 - m.x1088 == 0)
m.c1724 = Constraint(expr=m.x536*m.x809 - m.x211 == 0)
m.c1725 = Constraint(expr=m.x809*m.x1382 - m.x1133 == 0)
m.c1726 = Constraint(expr=m.x809**2 - m.x1472 == 0)
m.c1727 = Constraint(expr= m.x180 - m.x1472 == 0)
m.c1728 = Constraint(expr=m.x518*m.x1472 - m.x1087 == 0)
m.c1729 = Constraint(expr=m.x536*m.x1472 - m.x1132 == 0)
m.c1730 = Constraint(expr=m.x809**3 - m.x1473 == 0)
m.c1731 = Constraint(expr=m.b53*m.x1473 - m.x1086 == 0)
m.c1732 = Constraint(expr=m.b62*m.x1473 - m.x1131 == 0)
m.c1733 = Constraint(expr=m.x520*m.x810 - m.x185 == 0)
m.c1734 = Constraint(expr=m.x810*m.x1366 - m.x1093 == 0)
m.c1735 = Constraint(expr=m.x538*m.x810 - m.x214 == 0)
m.c1736 = Constraint(expr=m.x810*m.x1384 - m.x1138 == 0)
m.c1737 = Constraint(expr=m.x810**2 - m.x1474 == 0)
m.c1738 = Constraint(expr= m.x184 - m.x1474 == 0)
m.c1739 = Constraint(expr=m.x520*m.x1474 - m.x1092 == 0)
m.c1740 = Constraint(expr=m.x538*m.x1474 - m.x1137 == 0)
m.c1741 = Constraint(expr=m.x810**3 - m.x1475 == 0)
m.c1742 = Constraint(expr=m.b54*m.x1475 - m.x1091 == 0)
m.c1743 = Constraint(expr=m.b63*m.x1475 - m.x1136 == 0)
m.c1744 = Constraint(expr=m.x522*m.x811 - m.x189 == 0)
m.c1745 = Constraint(expr=m.x811*m.x1368 - m.x1098 == 0)
m.c1746 = Constraint(expr=m.x540*m.x811 - m.x216 == 0)
m.c1747 = Constraint(expr=m.x811*m.x1386 - m.x1143 == 0)
m.c1748 = Constraint(expr=m.x811**2 - m.x1476 == 0)
m.c1749 = Constraint(expr= m.x188 - m.x1476 == 0)
m.c1750 = Constraint(expr=m.x522*m.x1476 - m.x1097 == 0)
m.c1751 = Constraint(expr=m.x540*m.x1476 - m.x1142 == 0)
m.c1752 = Constraint(expr=m.x811**3 - m.x1477 == 0)
m.c1753 = Constraint(expr=m.b55*m.x1477 - m.x1096 == 0)
m.c1754 = Constraint(expr=m.b64*m.x1477 - m.x1141 == 0)
m.c1755 = Constraint(expr=m.x542*m.x812 - m.x220 == 0)
m.c1756 = Constraint(expr=m.x812*m.x1388 - m.x1147 == 0)
m.c1757 = Constraint(expr=m.x560*m.x812 - m.x256 == 0)
m.c1758 = Constraint(expr=m.x812*m.x1406 - m.x1192 == 0)
m.c1759 = Constraint(expr=m.x812**2 - m.x1478 == 0)
m.c1760 = Constraint(expr= m.x219 - m.x1478 == 0)
m.c1761 = Constraint(expr=m.x542*m.x1478 - m.x1146 == 0)
m.c1762 = Constraint(expr=m.x560*m.x1478 - m.x1191 == 0)
m.c1763 = Constraint(expr=m.x812**3 - m.x1479 == 0)
m.c1764 = Constraint(expr=m.b65*m.x1479 - m.x1145 == 0)
m.c1765 = Constraint(expr=m.b74*m.x1479 - m.x1190 == 0)
m.c1766 = Constraint(expr=m.x544*m.x813 - m.x225 == 0)
m.c1767 = Constraint(expr=m.x813*m.x1390 - m.x1153 == 0)
m.c1768 = Constraint(expr=m.x562*m.x813 - m.x259 == 0)
m.c1769 = Constraint(expr=m.x813*m.x1408 - m.x1198 == 0)
m.c1770 = Constraint(expr=m.x813**2 - m.x1480 == 0)
m.c1771 = Constraint(expr= m.x224 - m.x1480 == 0)
m.c1772 = Constraint(expr=m.x544*m.x1480 - m.x1152 == 0)
m.c1773 = Constraint(expr=m.x562*m.x1480 - m.x1197 == 0)
m.c1774 = Constraint(expr=m.x813**3 - m.x1481 == 0)
m.c1775 = Constraint(expr=m.b66*m.x1481 - m.x1151 == 0)
m.c1776 = Constraint(expr=m.b75*m.x1481 - m.x1196 == 0)
m.c1777 = Constraint(expr=m.x546*m.x814 - m.x228 == 0)
m.c1778 = Constraint(expr=m.x814*m.x1392 - m.x1158 == 0)
m.c1779 = Constraint(expr=m.x564*m.x814 - m.x261 == 0)
m.c1780 = Constraint(expr=m.x814*m.x1410 - m.x1203 == 0)
m.c1781 = Constraint(expr=m.x814**2 - m.x1482 == 0)
m.c1782 = Constraint(expr= m.x227 - m.x1482 == 0)
m.c1783 = Constraint(expr=m.x546*m.x1482 - m.x1157 == 0)
m.c1784 = Constraint(expr=m.x564*m.x1482 - m.x1202 == 0)
m.c1785 = Constraint(expr=m.x814**3 - m.x1483 == 0)
m.c1786 = Constraint(expr=m.b67*m.x1483 - m.x1156 == 0)
m.c1787 = Constraint(expr=m.b76*m.x1483 - m.x1201 == 0)
m.c1788 = Constraint(expr=m.x548*m.x815 - m.x232 == 0)
m.c1789 = Constraint(expr=m.x815*m.x1394 - m.x1163 == 0)
m.c1790 = Constraint(expr=m.x566*m.x815 - m.x264 == 0)
m.c1791 = Constraint(expr=m.x815*m.x1412 - m.x1208 == 0)
m.c1792 = Constraint(expr=m.x815**2 - m.x1484 == 0)
m.c1793 = Constraint(expr= m.x233 - m.x1484 == 0)
m.c1794 = Constraint(expr=m.x548*m.x1484 - m.x1162 == 0)
m.c1795 = Constraint(expr=m.x566*m.x1484 - m.x1207 == 0)
m.c1796 = Constraint(expr=m.x815**3 - m.x1485 == 0)
m.c1797 = Constraint(expr=m.b68*m.x1485 - m.x1161 == 0)
m.c1798 = Constraint(expr=m.b77*m.x1485 - m.x1206 == 0)
m.c1799 = Constraint(expr=m.x550*m.x816 - m.x237 == 0)
m.c1800 = Constraint(expr=m.x816*m.x1396 - m.x1168 == 0)
m.c1801 = Constraint(expr=m.x568*m.x816 - m.x268 == 0)
m.c1802 = Constraint(expr=m.x816*m.x1414 - m.x1213 == 0)
m.c1803 = Constraint(expr=m.x816**2 - m.x1486 == 0)
m.c1804 = Constraint(expr= m.x236 - m.x1486 == 0)
m.c1805 = Constraint(expr=m.x550*m.x1486 - m.x1167 == 0)
m.c1806 = Constraint(expr=m.x568*m.x1486 - m.x1212 == 0)
m.c1807 = Constraint(expr=m.x816**3 - m.x1487 == 0)
m.c1808 = Constraint(expr=m.b69*m.x1487 - m.x1166 == 0)
m.c1809 = Constraint(expr=m.b78*m.x1487 - m.x1211 == 0)
m.c1810 = Constraint(expr=m.x552*m.x817 - m.x241 == 0)
m.c1811 = Constraint(expr=m.x817*m.x1398 - m.x1173 == 0)
m.c1812 = Constraint(expr=m.x570*m.x817 - m.x271 == 0)
m.c1813 = Constraint(expr=m.x817*m.x1416 - m.x1218 == 0)
m.c1814 = Constraint(expr=m.x817**2 - m.x1488 == 0)
m.c1815 = Constraint(expr= m.x240 - m.x1488 == 0)
m.c1816 = Constraint(expr=m.x552*m.x1488 - m.x1172 == 0)
m.c1817 = Constraint(expr=m.x570*m.x1488 - m.x1217 == 0)
m.c1818 = Constraint(expr=m.x817**3 - m.x1489 == 0)
m.c1819 = Constraint(expr=m.b70*m.x1489 - m.x1171 == 0)
m.c1820 = Constraint(expr=m.b79*m.x1489 - m.x1216 == 0)
m.c1821 = Constraint(expr=m.x554*m.x818 - m.x243 == 0)
m.c1822 = Constraint(expr=m.x818*m.x1400 - m.x1178 == 0)
m.c1823 = Constraint(expr=m.x572*m.x818 - m.x273 == 0)
m.c1824 = Constraint(expr=m.x818*m.x1418 - m.x1223 == 0)
m.c1825 = Constraint(expr=m.x818**2 - m.x1490 == 0)
m.c1826 = Constraint(expr= m.x244 - m.x1490 == 0)
m.c1827 = Constraint(expr=m.x554*m.x1490 - m.x1177 == 0)
m.c1828 = Constraint(expr=m.x572*m.x1490 - m.x1222 == 0)
m.c1829 = Constraint(expr=m.x818**3 - m.x1491 == 0)
m.c1830 = Constraint(expr=m.b71*m.x1491 - m.x1176 == 0)
m.c1831 = Constraint(expr=m.b80*m.x1491 - m.x1221 == 0)
m.c1832 = Constraint(expr=m.x556*m.x819 - m.x249 == 0)
m.c1833 = Constraint(expr=m.x819*m.x1402 - m.x1183 == 0)
m.c1834 = Constraint(expr=m.x574*m.x819 - m.x276 == 0)
m.c1835 = Constraint(expr=m.x819*m.x1420 - m.x1228 == 0)
m.c1836 = Constraint(expr=m.x819**2 - m.x1492 == 0)
m.c1837 = Constraint(expr= m.x248 - m.x1492 == 0)
m.c1838 = Constraint(expr=m.x556*m.x1492 - m.x1182 == 0)
m.c1839 = Constraint(expr=m.x574*m.x1492 - m.x1227 == 0)
m.c1840 = Constraint(expr=m.x819**3 - m.x1493 == 0)
m.c1841 = Constraint(expr=m.b72*m.x1493 - m.x1181 == 0)
m.c1842 = Constraint(expr=m.b81*m.x1493 - m.x1226 == 0)
m.c1843 = Constraint(expr=m.x558*m.x820 - m.x252 == 0)
m.c1844 = Constraint(expr=m.x820*m.x1404 - m.x1188 == 0)
m.c1845 = Constraint(expr=m.x576*m.x820 - m.x280 == 0)
m.c1846 = Constraint(expr=m.x820*m.x1422 - m.x1233 == 0)
m.c1847 = Constraint(expr=m.x820**2 - m.x1494 == 0)
m.c1848 = Constraint(expr= m.x251 - m.x1494 == 0)
m.c1849 = Constraint(expr=m.x558*m.x1494 - m.x1187 == 0)
m.c1850 = Constraint(expr=m.x576*m.x1494 - m.x1232 == 0)
m.c1851 = Constraint(expr=m.x820**3 - m.x1495 == 0)
m.c1852 = Constraint(expr=m.b73*m.x1495 - m.x1186 == 0)
m.c1853 = Constraint(expr=m.b82*m.x1495 - m.x1231 == 0)
|
import os, sys, getopt
import itertools as it
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/src")
from pandocLatex import *
##################
### Formatting ###
##################
# Some global parameters
latexEngine="lualatex"
highlightstyle='"tango"' #pygments, kate, monochrome, espresso, zenburn, haddock, tango
template="template/document.latex"
beamer="template/beamer.latex"
letter="template/letter.latex"
# Format documents with yaml files
docFormat = [
"template/format_simple.yaml",
"template/color_unihd.yaml",
"template/objects.yaml",
"template/titlepage.yaml",
"template/typography_sans.yaml"
]
# Pandoc parameters
docOptions = [
"-t latex",
"-s ",
# "--columns 1",
# "--filter pandoc-include-code",
# "--filter pandoc-include",
# "--filter pandoc-csv2table",
# "--filter pandoc-crossref",
# "--filter diagrams-pandoc",
# "--filter pandoc-placetable", # compiled with inlinemarkdown
"--filter pandoc-citeproc",
"--template={template}".format(template=template),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
"--number-sections",
"--biblatex",
# "--chapter",
# "-M synctex:yes"
"-M codeBlockCaptions:yes",
]
# Presentations
beamerFormat = [
"template/format_beamer.yaml",
"template/color_unihd.yaml",
"template/objects.yaml",
"template/typography_lmodern.yaml"
]
beamerOptions = [
"-t beamer",
"-s ",
# "-o {out}".format(out=texFile),
# "--columns 1",
# "--filter pandoc-include",
# "--filter pandoc-csv2table",
# "--filter pandoc-crossref",
# "--filter diagrams-pandoc",
# "--filter pandoc-placetable", # compiled with inlinemarkdown
"--filter pandoc-citeproc",
"--template={template}".format(template=beamer),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
"--number-sections",
"--biblatex",
"-M synctex:yes"
]
# Tikzfiles
# Tikz images can be generated from separate files using the same formatting as their documents
# On this way each figure is generated in a separate pdf
tikzFormat = [
"template/format_tikz.yaml", # Just this is different....
"template/color_unihd.yaml",
"template/objects.yaml",
"template/typography_sans.yaml"
]
tikzOptions= [
"-t latex",
"-s ",
# "--columns 1",
"--template={template}".format(template=template),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
]
# PSGraf files
# Files generated with PSGraf - separated in .pdf and .tex file - can be placed in PSGrafIN to generate a standalone image
psgrafFormat = [
"template/format_psgraf.yaml", # Just this is different....
"template/color_unihd.yaml",
"template/objects.yaml",
"template/typography_sans.yaml"
]
psgrafOptions= [
"-t latex",
"-s ",
# "--columns 1",
"--template={template}".format(template=template),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
]
inkscapeFormat = [
"template/format_inkscape.yaml", # Just this is different....
"template/color_unihd.yaml",
"template/objects.yaml",
"template/typography_sans.yaml"
]
inkscapeOptions= [
"-t latex",
"-s ",
# "--columns 1",
"--template={template}".format(template=template),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
]
letterFormat = [
"template/format_letter.yaml",
"template/color_unihd.yaml",
"template/objects.yaml",
"template/titlepage.yaml",
"template/typography_sans.yaml"
]
# Pandoc parameters
letterOptions = [
"-t latex",
"-s ",
# "--columns 1",
# "--filter pandoc-include",
# "--filter pandoc-csv2table",
# "--filter pandoc-crossref",
# "--filter diagrams-pandoc",
# "--filter pandoc-placetable", # compiled with inlinemarkdown
"--filter pandoc-citeproc",
"--template={template}".format(template=letter),
"--highlight-style={hls}".format(hls=highlightstyle),
"--pdf-engine={le}".format(le=latexEngine),
"--number-sections",
"--biblatex",
# "--chapter",
# "-M synctex:yes"
"-M codeBlockCaptions:yes",
"-M documentclass:scrlttr2"
]
################
### Commands ###
################
# Commands are created using the cmdObj defined in src/pandocLatex.py
# cmdObj(cmd, shellOutput, befor, after)
# cmd: command string to be executed
# shellOutput: True/False
# before: action performed before execution
# after: action performed after execution
#
# before and after ta a list of tuples with functions, arguments, and keyword arguments
# E.g.
# before = [
# ...
# (func,(arg1,arg2,arg3),{kw1: karg1, kw2: kwarg2}),
# ...
# ]
def createDirMaybe(name=""):
dir=os.path.dirname(name)
if not os.path.exists(dir):
os.makedirs(dir)
# Convert Markdown to Latex, using a latex template and YAML configuration
def genPandocCmd(out, src, format, options):
return cmdObj(
" ".join(["pandoc"] +
format +
src +
["-o {out}".format(out=out)] +
options),
True,
before=[
(createDirMaybe,(),{"name": out}),
(print,("-----\t","Pandoc\t{out}\t".format(out=out),"-----"),{})],
# Adjust appearance of tables and figures
# applyOnFile is defined in src/pandocLatex.py
# Each line in file is passed through a list of filters...
#after =[(applyOnFile, ([adjustLongtableFilter,centerFigureFilter],texFile) ,{})]
after =[(applyOnFile, ([adjustLongtableFilter,adjustBeamerFootnote,adjustFigureCaption],out) ,{})]
)
# Run latex
def genLatexCmd(src, options, pushDir=""):
return cmdObj(
" ".join([latexEngine] + options +[src]),
True,
before=[
(print,("-----\t","Latex\t{out}\t".format(out=src),"-----"),{})],
after =[],
pushDir =pushDir
)
# Run biber
def genBiberCmd(src, options):
return cmdObj(
" ".join(["biber"] + options + [src]),
True,
before=[(print,("-----\t","Biber\t{out}\t".format(out=src),"-----"),{})],
after =[]
)
# Make Glossary
def genGlossaryCmd(src, options, pushDir=""):
return cmdObj(
" ".join(["makeglossaries"] + options + [src]),
True,
before=[(print,("-----\t","Glossary\t{out}\t".format(out=src),"-----"),{})],
after =[],
pushDir =pushDir
)
def genPSGrafWrapCmd(out, src, format, options):
return cmdObj(
#"echo \"\\begin{{minipage}}[b]{{2.0\\textwidth}}\\PDFfigure{{}}{{{fn}}}{{}}{{}}{{b}}{{T}}{{f}}\\end{{minipage}}\" > {out}".format(fn=src,out=out),
#"echo \"\\PDFfigure{{}}{{{fn}}}{{}}{{}}{{b}}{{T}}{{f}}\" > {out}".format(fn=src,out=out),
"mkdir -p {dir}\n".format(dir=os.path.dirname(out)) +
"echo \"\\PDFfigureStandaloneSTL{{{fn}}}{{T}}\" > {out}".format(fn=src,out=out),
True,
before=[
(createDirMaybe,(),{"name": out}),
(print,("-----\t","Create PSGraf Wrapper File \t{out}\t".format(out=out),"-----"),{})],
after =[]
)
def genInkscapeWrapCmd(out, src, format, options):
return cmdObj(
"mkdir -p {dir}\n".format(dir=os.path.dirname(out)) +
"echo \"\\input{{{fn}}}\" > {out}".format(fn=src,out=out),
True,
before=[
(createDirMaybe,(),{"name": out}),
(print,("-----\t","Create Inkscape Wrapper File \t{out}\t".format(out=out),"-----"),{})],
after =[]
)
def recursiveListDir(path):
p2 = os.path.dirname(path)
for f in os.listdir(p2):
fp = p2+'/'+f;
if(os.path.isdir(fp)):
for f2 in recursiveListDir(fp+'/'):
yield f2
else:
yield fp
def recursiveListDirNoFiles(path):
p2 = os.path.dirname(path)
for f in os.listdir(p2):
fp = p2+'/'+f;
if(os.path.isdir(fp)):
yield(fp)
for f2 in recursiveListDirNoFiles(fp+'/'):
yield f2
#################
### Documents ###
#################
# Take each filename in tikz/ ending with .md and add string to tikzFiles
filterFilesInDir = lambda d, filterf: ("/".join( (".".join(f[:-1])).split("/")[1:] ) for f in map(lambda fn: fn.split("."),
recursiveListDir(d)) if filterf(f))
mdFilesInDir = lambda d: filterFilesInDir(d,lambda f: f[-1] == 'md')
pdfFilesInDir = lambda d: filterFilesInDir(d,lambda f: f[-1] == 'pdf')
pdfTexFilesInDir = lambda d: filterFilesInDir(d,lambda f: f[-1] == 'pdf_tex')
splitPathFile = lambda d: (lambda sl: ( '/'.join(sl[:-1]), sl[-1]))(d.split('/'))
# Has to end with "/"
docInDir = "doc/"
docOutDir = "out/doc/"
presInDir = "pres/"
presOutDir = "out/pres/"
tikzInDir = "tikz/"
tikzOutDir = "out/tikz/"
psgrafInDir = "PSGraf/"
psgrafOutDir = "out/PSGraf"
inkscapeInDir = "Inkscape/"
inkscapeOutDir = "out/Inkscape"
letterInDir = "letter/"
letterOutDir = "out/letter/"
tikzFiles = {"prefix": tikzInDir, "keys": mdFilesInDir(tikzInDir), "suffix": ".md"}
psgrafFiles = {"prefix": psgrafInDir, "keys": pdfFilesInDir(psgrafInDir), "suffix": ".pdf"}
inkscapeFiles = {"prefix": inkscapeInDir, "keys": pdfTexFilesInDir(inkscapeInDir), "suffix": ".pdf_tex"}
docFiles = {"prefix": docInDir, "keys": mdFilesInDir(docInDir), "suffix": ".md"}
presFiles = {"prefix": presInDir, "keys": mdFilesInDir(presInDir), "suffix": ".md"}
letterFiles = {"prefix": letterInDir, "keys": mdFilesInDir(letterInDir), "suffix": ".md"}
# Global document object storing all keys with a list of cmdObjs
docs = {
# **{
# "mydoc": [
# genPandocCmd(
# **{
# "out" : "out/mydoc.tex",
# "src" : [
# "doc/mydoc.md"
# ],
# "format": docFormat,
# "options": docOptions
# }),
# # Just uncomment to not rerun it each time
# genBiberCmd(
# **{
# "src" : "out/mydoc",
# "options": [],
# }),
# genLatexCmd(
# **{
# "src" : "out/mydoc.tex",
# "options": ["-synctex=1",
# "--output-directory=./out/"],
# #"options": ["--interaction=batchmode","-synctex=1"],
# }),
# ]},
# docFiles
**{
keyprefix+key+keysuffix : (
genPandocCmd(
**{
"out": docOutDir+key+".tex",
"src": [
docInDir+key+".md",
],
"format": docFormat,
"options": docOptions,
}),
# Just uncomment to not rerun it each time
genBiberCmd(
**{
"src" : docOutDir+key,
"options": [],
}),
genGlossaryCmd(
**{
"src": key+".glo",
"options": [],
"pushDir": os.path.abspath(docOutDir)
}),
genLatexCmd(
**{
"src": docOutDir+key+".tex",
"options": ["--output-directory="+docOutDir],
#"options": ["--interaction=batchmode","-synctex=1"],
}),
)
for (keyprefix,keys,keysuffix) in [(docFiles["prefix"],docFiles["keys"],docFiles["suffix"])] for key in keys
},
# presFiles
**{
keyprefix+key+keysuffix: (
genPandocCmd(
**{
"out": presOutDir+key+".tex",
"src": [
presInDir+key+".md",
],
"format": beamerFormat,
"options": beamerOptions,
}),
# Just uncomment to not rerun it each time
genBiberCmd(
**{
"src" : presOutDir+key,
"options": [],
}),
genLatexCmd(
**{
"src": presOutDir+key+".tex",
"options": ["--output-directory="+presOutDir],
#"options": ["--interaction=batchmode","-synctex=1"],
}),
)
for (keyprefix,keys,keysuffix) in [(presFiles["prefix"],presFiles["keys"],presFiles["suffix"])] for key in keys
},
# tikzFiles
**{
keyprefix+fullkey+keysuffix: (
genPandocCmd(
**{
"out": tikzOutDir+fullkey+".tex",
"src": [
tikzInDir+fullkey+".md",
],
"format": tikzFormat,
"options": tikzOptions,
}),
genLatexCmd(
**{
"src": tikzOutDir+fullkey+".tex",
"options": ["--output-directory="+os.path.abspath(os.path.dirname(tikzOutDir+fullkey))]
#"pushDir": os.path.abspath(keyprefix+keydir)
}),
)
for (keyprefix,keys,keysuffix) in [(tikzFiles["prefix"],tikzFiles["keys"],tikzFiles["suffix"])] for fullkey in keys for (keydir,key) in [splitPathFile(fullkey)]
},
# psGrafFiles
**{
keyprefix+key+keysuffix: (
genPSGrafWrapCmd(
**{
"out": psgrafOutDir+'/'+key+".md",
"src": keyprefix+key,
"format": [],
"options": [],
}),
genPandocCmd(
**{
"out": psgrafOutDir+'/'+key+".tex",
"src": [
psgrafOutDir+'/'+key+".md",
],
"format": psgrafFormat,
"options": psgrafOptions,
}),
genLatexCmd(
**{
"src": psgrafOutDir+'/'+key+".tex",
#"options": ["--output-directory="+os.path.dirname(psgrafOutDir+'/'.join(key.split('/')[1:]))],
"options": ["--output-directory="+os.path.dirname(psgrafOutDir+'/'+key)],
}),
)
for (keyprefix,keys,keysuffix) in [(psgrafFiles["prefix"],psgrafFiles["keys"],psgrafFiles["suffix"])] for key in keys
},
# inkscapeFiles
**{
keyprefix+fullkey+keysuffix: (
genInkscapeWrapCmd(
**{
"out": inkscapeOutDir+'/'+fullkey+".md",
"src": key+keysuffix,
"format": [],
"options": [],
}),
genPandocCmd(
**{
"out": inkscapeOutDir+'/'+fullkey+".tex",
"src": [
inkscapeOutDir+'/'+fullkey+".md",
],
"format": inkscapeFormat,
"options": inkscapeOptions,
}),
genLatexCmd(
**{
"src": os.path.abspath(inkscapeOutDir+'/'+fullkey+".tex"),
"options": ["--output-directory="+os.path.abspath(os.path.dirname(inkscapeOutDir+'/'+fullkey))],
"pushDir": os.path.abspath(keyprefix+keydir)
}),
)
for (keyprefix,keys,keysuffix) in [(inkscapeFiles["prefix"],inkscapeFiles["keys"],inkscapeFiles["suffix"])] for fullkey in keys for (keydir,key) in [splitPathFile(fullkey)]
},
# letterFiles
**{
keyprefix+key+keysuffix : (
genPandocCmd(
**{
"out": letterOutDir+key+".tex",
"src": [
letterInDir+key+".md",
],
"format": letterFormat,
"options": letterOptions,
}),
# Just uncomment to not rerun it each time
genBiberCmd(
**{
"src" : letterOutDir+key,
"options": [],
}),
genGlossaryCmd(
**{
"src": key+".glo",
"options": [],
"pushDir": os.path.abspath(letterOutDir)
}),
genLatexCmd(
**{
"src": letterOutDir+key+".tex",
"options": ["--output-directory="+letterOutDir],
#"options": ["--interaction=batchmode","-synctex=1"],
}),
)
for (keyprefix,keys,keysuffix) in [(letterFiles["prefix"],letterFiles["keys"],letterFiles["suffix"])] for key in keys
},
}
# Form some groups of keys if desired
docGroups={}
docGroups["all"] = it.chain(*(docs[k] for k in docs.keys()))
docGroups["docs"] = it.chain(*(docs[docFiles["prefix"]+k+docFiles["suffix"]] for k in docFiles["keys"]))
for d in recursiveListDirNoFiles(docInDir):
docGroups[d] = it.chain(*(docs[docFiles["prefix"]+k+docFiles["suffix"]] for k in mdFilesInDir(d+'/')))
docGroups["pres"] = it.chain(*(docs[presFiles["prefix"]+k+presFiles["suffix"]] for k in presFiles["keys"]))
for d in recursiveListDirNoFiles(presInDir):
docGroups[d] = it.chain(*(docs[presFiles["prefix"]+k+presFiles["suffix"]] for k in mdFilesInDir(d+'/')))
docGroups["tikz"] = it.chain(*(docs[tikzFiles["prefix"]+k+tikzFiles["suffix"]] for k in tikzFiles["keys"]))
for d in recursiveListDirNoFiles(tikzInDir):
docGroups[d] = it.chain(*(docs[tikzFiles["prefix"]+k+tikzFiles["suffix"]] for k in mdFilesInDir(d+'/')))
docGroups["psgraf"] = it.chain(*(docs[psGrafFiles["prefix"]+k+psgrafFiles["suffix"]] for k in psgrafFiles["keys"]))
for d in recursiveListDirNoFiles(psgrafInDir):
docGroups[d] = it.chain(*(docs[psgrafFiles["prefix"]+k+psgrafFiles["suffix"]] for k in pdfFilesInDir(d+'/')))
docGroups["inkscape"] = it.chain(*(docs[inkscapeFiles["prefix"]+k+inkscapeFiles["suffix"]] for k in inkscapeFiles["keys"]))
for d in recursiveListDirNoFiles(inkscapeInDir):
docGroups[d] = it.chain(*(docs[inkscapeFiles["prefix"]+k+inkscapeFiles["suffix"]] for k in pdfTexFilesInDir(d+'/')))
docGroups["letter"] = it.chain(*(docs[letterFiles["prefix"]+k+letterFiles["suffix"]] for k in letterFiles["keys"]))
for d in recursiveListDirNoFiles(letterInDir):
docGroups[d] = it.chain(*(docs[letterFiles["prefix"]+k+letterFiles["suffix"]] for k in mdFilesInDir(d+'/')))
def printUsage():
print(bold("Usage:"))
print("\t./run [DocKey] [-g GroupKey]\n")
print(bold("Documets:"))
for k in docs.keys():
print("\t{k}".format(k=k))
print("")
print(bold("Groups:"))
for k in docGroups.keys():
print("\t{k}".format(k=k))
def printKeysNotFound(keys):
print(bold(red("Keys not found:")))
for k in keys:
print(yellow("\t{k}".format(k=k)))
print("")
print(underline("".join(it.repeat(" ",40))))
print("")
printUsage()
bold = lambda str: "\033[1m" + str + "\033[0m"
underline= lambda str: "\033[4m" + str + "\033[0m"
red= lambda str: "\033[31m" + str + "\033[0m"
yellow= lambda str: "\033[33m" + str + "\033[0m"
def main(argv=None):
if argv is None:
argv = sys.argv
try:
opts, args = getopt.getopt(argv[1:], "hg:", ["help","group="])
except getopt.GetoptError as msg:
printUsage()
sys.exit(2)
if(len(argv)==1):
printUsage()
sys.exit(2)
for opt, arg in opts:
if opt in ['-h','--help']:
printUsage()
sys.exit()
if opt in ['-g','--group']:
if arg in docGroups.keys():
processCommands(docGroups[arg])
else:
printKeysNotFound([arg])
sys.exit()
notfound = list(filter(lambda k: not(k in docs.keys()), args))
if len(notfound)>0:
printKeysNotFound(notfound)
sys.exit()
processCommands(it.chain(*(docs[k] for k in args)))
if __name__== "__main__":
sys.exit(main())
|
#!/usr/bin/python
import sys
for line in sys.stdin:
sys.stdout.write(line)
# use like this: ps -ef | ./pipe-in.py
|
#from django.shortcuts import render
from itertools import product
from lzma import FORMAT_ALONE
from random import random
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
#from rest_framework.status import HTTP_200_OK
from .serializers import ProductSerializer
from .models import Product
from .models import User
# Create your views here.
class ProductViewSet(viewsets.ViewSet):
def list (self, request):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data,status=status.HTTP_200_OK)
def create(self, request):
serializer = ProductSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def retrieve(self, request, pk=None):
product = Product.objects.get(id=pk)
serializer = ProductSerializer(product)
return Response(serializer.data,status=status.HTTP_200_OK)
def update(self, request, pk=None):
product = Product.objects.get(id=pk)
serializer = ProductSerializer(instance=product, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
def destroy(self, request, pk=None):
product = Product.objects.get(id=pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class UserAPIView(APIView):
def get(self, _):
users = User.object.all()
user = random.choice(users)
return Response({
'id': user.id
}) |
from locust import HttpLocust, TaskSet, task
import os
import json
class ActionHubTaskSet(TaskSet):
@task
def get_something(self):
uri = "/actions/debug/execute"
data = {
"type": "query",
"form_params": {
"sleep": 0,
"simulated_download_url": os.getenv('ACTION_HUB_LOAD_TESTING_SIMULATED_DOWNLOAD_URL')
}
}
headers = {}
headers['Content-Type'] = "application/json"
headers['User-Agent'] = "looker-actions-load-test/0.1"
headers['X-Looker-Instance'] = "looker-actions-load-test-simulation"
headers['X-Looker-Webhook-Id'] = "looker-actions-load-test-simulation"
headers['Authorization'] = 'Token token="' + os.getenv('ACTION_HUB_LOAD_TESTING_API_KEY') + '"'
with self.client.post(uri, catch_response=True, json=data, headers=headers) as response:
if response.status_code == 200:
rj = json.loads(response.text)
if not rj['looker']['success']:
response.failure(rj['looker']['message'] or response.text)
print("Response status:", response.status_code)
print("Response:", response.text)
class MyLocust(HttpLocust):
task_set = ActionHubTaskSet
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 4 05:59:46 2020
@author: yoelr
"""
__all__ = ('H2O_CAS',)
H2O_CAS = '7732-18-5' |
from django.test import override_settings
from django.urls import reverse
from knox.auth import TokenAuthentication as KnoxTokenAuthentication
from rest_framework.test import APITestCase
from social_core.utils import parse_qs
from .base import BaseFacebookAPITestCase, BaseTwitterApiTestCase
knox_override_settings = dict(
INSTALLED_APPS=[
'django.contrib.contenttypes',
'rest_framework',
'social_django',
'rest_social_auth',
'knox', # For django-rest-knox
'users',
],
MIDDLEWARE=[
],
)
@override_settings(**knox_override_settings)
class TestSocialAuth1Knox(APITestCase, BaseTwitterApiTestCase):
def test_login_social_oauth1_knox(self):
resp = self.client.post(
reverse('login_social_knox_user'), data={'provider': 'twitter'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, parse_qs(self.request_token_body))
resp = self.client.post(reverse('login_social_knox_user'), data={
'provider': 'twitter',
'oauth_token': 'foobar',
'oauth_verifier': 'overifier'
})
self.assertEqual(resp.status_code, 200)
@override_settings(**knox_override_settings)
class TestSocialAuth2Knox(APITestCase, BaseFacebookAPITestCase):
def _check_login_social_knox_only(self, url, data):
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 200)
# check token valid
knox_auth = KnoxTokenAuthentication()
user, auth_data = knox_auth.authenticate_credentials(resp.data['token'].encode('utf8'))
self.assertEqual(user.email, self.email)
def _check_login_social_knox_user(self, url, data):
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['email'], self.email)
# check token valid
knox_auth = KnoxTokenAuthentication()
user, auth_data = knox_auth.authenticate_credentials(resp.data['token'].encode('utf8'))
self.assertEqual(user.email, self.email)
def test_login_social_knox_only(self):
self._check_login_social_knox_only(
reverse('login_social_knox'),
data={'provider': 'facebook', 'code': '3D52VoM1uiw94a1ETnGvYlCw'})
def test_login_social_knox_only_provider_in_url(self):
self._check_login_social_knox_only(
reverse('login_social_knox', kwargs={'provider': 'facebook'}),
data={'code': '3D52VoM1uiw94a1ETnGvYlCw'})
def test_login_social_knox_user(self):
self._check_login_social_knox_user(
reverse('login_social_knox_user'),
data={'provider': 'facebook', 'code': '3D52VoM1uiw94a1ETnGvYlCw'})
def test_login_social_knox_user_provider_in_url(self):
self._check_login_social_knox_user(
reverse('login_social_knox_user', kwargs={'provider': 'facebook'}),
data={'code': '3D52VoM1uiw94a1ETnGvYlCw'})
|
from data_utils import load_tl_extracts, load_tl_extracts_hkkim
#import numpy as np
from tl_classifier_cnn import TLClassifierCNN, TLLabelConverter
# load data
desired_dim = (32,32)
#data_dirs = ['data/tl-extract-test']
data_dirs = 'data/tl-extract-test'
imgs, labels_gt = load_tl_extracts_hkkim(data_dirs, desired_dim)
# imgs is image in OpenCV imread format. pixels are uint8 from 0 to 255. shape is H, W, C. C is ordered BGR
# y here are strings like 'green' etc
# filter data with only labels relevant for us
converter = TLLabelConverter()
imgs, labels_gt = converter.filter(imgs, labels_gt)
# load the model
tlc = TLClassifierCNN()
model_dir = 'model'
tlc.load_model(model_dir)
import cv2
import numpy as np
image = cv2.imread('data/tl-extract-test/000001_green.png')
resized = cv2.resize(image, (32,32), interpolation=cv2.INTER_LINEAR)
assert (resized.shape == (32, 32, 3))
labels, probabilities = tlc.predict(np.array([resized]), batch_size=1)
if labels[0]=='green':
print('correct')
else:
print('incorrect')
# run predictions
batch_size = 50
labels_predict, probs_predict = tlc.predict(imgs, batch_size=batch_size)
# calculate accuracy
correct = sum([1 if labels_gt[i]==labels_predict[i] else 0 for i in range(len(labels_gt))])
accuracy = float(correct) / len(labels_gt)
print('accuracy: {}. correct {} out of {}'.format(accuracy, correct, len(labels_gt)))
tlc.close_session()
|
import json
import datetime
import sys
import os
import random
from utils.node_rpc_wrapper import NodeRpcWrapper
from utils.telegram_wrapper import TelegramWrapper
from utils.discord_wrapper import DiscordWrapper
def check_and_send_reward_collection_message(telegram, discord, cfg, cached_epoch, new_epoch):
channel_id = cfg['telegram_channel_id']
dev_chat_id = cfg['telegram_dev_chat_id']
discord_webhook_url = cfg['discord_channel_webhook']
if new_epoch > cached_epoch:
m = create_reward_collection_message(new_epoch)
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
r = telegram.bot_send_message_to_chat(channel_id, m['message'])
print(
f'Reward collection message sent to Telegram: {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(
discord_webhook_url, m['message'])
print(
f'Reward collection message sent to Discord: {r.status_code}')
def check_and_send_missed_momentums_message(telegram, discord, cfg, cached_pillars, new_pillars, cached_momentum_status_data, cache_file):
channel_id = cfg['telegram_channel_id']
dev_chat_id = cfg['telegram_dev_chat_id']
discord_webhook_url = cfg['discord_channel_webhook']
new_momentum_status_data = {}
inactive_pillars = []
for owner_address in new_pillars:
pillar_name = new_pillars[owner_address]['name']
missed_momentums_in_a_row = 0
is_producing = True
if owner_address in cached_pillars and owner_address in cached_momentum_status_data:
# Get the pillar's information from cache
missed_momentums_in_a_row = cached_momentum_status_data[owner_address]['missedMomentums']
is_producing = cached_momentum_status_data[owner_address]['isProducing']
previous_produced_momentums = cached_pillars[
owner_address]['currentStats']['producedMomentums']
current_produced_momentums = new_pillars[owner_address]['currentStats']['producedMomentums']
previous_expected_momentums = cached_pillars[
owner_address]['currentStats']['expectedMomentums']
current_expected_momentums = new_pillars[owner_address]['currentStats']['expectedMomentums']
# Handle epoch change
if current_produced_momentums == 0 and previous_produced_momentums > 0:
if not is_producing:
inactive_pillars.append(owner_address)
# Handle normal case
else:
# Check if pillar has produced new momentums
if current_produced_momentums == previous_produced_momentums:
# Check if the amount of expected momentums has changed
if current_expected_momentums != previous_expected_momentums:
missed_momentums_in_a_row = missed_momentums_in_a_row + 1
if missed_momentums_in_a_row >= 3:
inactive_pillars.append(owner_address)
is_producing = False
# If expected mometum amount has not changed and pillar was previously inactive, add to inactive pillars list
elif not is_producing:
inactive_pillars.append(owner_address)
# If pillar has produced new momentums set missed momentum count to zero
else:
missed_momentums_in_a_row = 0
is_producing = True
# Add pillar's new momentum status data
new_momentum_status_data[owner_address] = {'name': pillar_name, 'missedMomentums': missed_momentums_in_a_row, 'isProducing': is_producing}
l = []
for address in inactive_pillars:
if address in new_momentum_status_data:
l.append(new_momentum_status_data[address]['name'])
if len(l) > 0:
print('Inactive pillars: ' + str(l))
# Save new data
write_to_file_as_json({'data': new_momentum_status_data, 'timestamp': str(datetime.datetime.now())}, cache_file)
for address in inactive_pillars:
# Verify the inactive pillar was previously producing momentums until sending message
if address in cached_momentum_status_data and cached_momentum_status_data[address]['isProducing']:
m = create_pillar_inactive_message(cached_momentum_status_data[address]['name'])
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
r = telegram.bot_send_message_to_chat(channel_id, m['message'])
print(
f'Pillar inactive message sent to Telegram: {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(discord_webhook_url, m['message'])
print(
f'Pillar inactive message sent to Discord: {r.status_code}')
for address in cached_momentum_status_data:
# Check if a previously inactive pillar is not in the inactive pillars list anymore
if not cached_momentum_status_data[address]['isProducing'] and address not in inactive_pillars:
m = create_pillar_active_message(cached_momentum_status_data[address]['name'])
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
r = telegram.bot_send_message_to_chat(channel_id, m['message'])
print(
f'Pillar active again message sent to Telegram: {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(discord_webhook_url, m['message'])
print(
f'Pillar active again message sent to Discord: {r.status_code}')
def check_and_send_pillar_events(telegram, discord, cfg, cached_pillars, new_pillars):
channel_id = cfg['telegram_channel_id']
dev_chat_id = cfg['telegram_dev_chat_id']
discord_webhook_url = cfg['discord_channel_webhook']
# TODO: Needs work on how to determine a dismantled Pillar.
# Check for dismantled Pillars. Assume Pillar is dismantled if the owner address is not present anymore in the new data.
# for owner_address in cached_pillars:
# if owner_address not in new_pillars and len(new_pillars) < len(cached_pillars):
# m = create_dismantled_pillar_message(
# cached_pillars[owner_address])
# if 'error' in m:
# handle_error(telegram, dev_chat_id, m['error'])
# else:
# name = cached_pillars[owner_address]['name']
# r = telegram.bot_send_message_to_chat(channel_id, m['message'])
# print(
# f'Pillar dismantled message sent to Telegram ({name}): {r.status_code}')
#
# if len(discord_webhook_url) > 0:
# r = discord.webhook_send_message_to_channel(
# discord_webhook_url, m['message'])
# print(
# f'Pillar dismantled message sent to Discord ({name}): {r.status_code}')
# Check for new Pillars. Assume Pillar is new if the owner address was not present in the cached data.
for owner_address in new_pillars:
if owner_address not in cached_pillars and len(new_pillars) > len(cached_pillars):
m = create_new_pillar_message(
new_pillars[owner_address])
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
name = new_pillars[owner_address]['name']
r = telegram.bot_send_message_to_chat(channel_id, m['message'])
print(
f'Pillar created message sent to Telegram ({name}): {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(
discord_webhook_url, m['message'])
print(
f'Pillar created message sent to Discord ({name}): {r.status_code}')
# Check for Pillar name changes
for owner_address in new_pillars:
if owner_address in cached_pillars:
# Get current and cached name
current_name = new_pillars[owner_address]['name']
cached_name = cached_pillars[owner_address]['name']
if current_name != cached_name:
m = create_pillar_name_changed_message(
cached_name, current_name)
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
r = telegram.bot_send_message_to_chat(
channel_id, m['message'])
print(
f'Pillar name changed message sent to Telegram ({cached_name} -> {current_name}): {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(
discord_webhook_url, m['message'])
print(
f'Pillar name changed message sent to Discord ({cached_name} -> {current_name}): {r.status_code}')
# Check for changes in reward sharing
for owner_address in new_pillars:
if owner_address in cached_pillars:
old_momentum_percentage = cached_pillars[
owner_address]['giveMomentumRewardPercentage']
new_momentum_percentage = new_pillars[owner_address]['giveMomentumRewardPercentage']
old_delegate_percentage = cached_pillars[
owner_address]['giveDelegateRewardPercentage']
new_delegate_percentage = new_pillars[owner_address]['giveDelegateRewardPercentage']
name = new_pillars[owner_address]['name']
owner_address = new_pillars[owner_address]['ownerAddress']
changed_shares_data = {}
if old_momentum_percentage != new_momentum_percentage:
changed_shares_data['name'] = name
changed_shares_data['ownerAddress'] = owner_address
changed_shares_data['momentumRewards'] = {
'oldMomentumPercentage': old_momentum_percentage, 'newMomentumPercentage': new_momentum_percentage}
if old_delegate_percentage != new_delegate_percentage:
changed_shares_data['name'] = name
changed_shares_data['ownerAddress'] = owner_address
changed_shares_data['delegateRewards'] = {
'oldDelegatePercentage': old_delegate_percentage, 'newDelegatePercentage': new_delegate_percentage}
if changed_shares_data != {}:
if 'momentumRewards' not in changed_shares_data:
changed_shares_data['momentumRewards'] = {
'oldMomentumPercentage': old_momentum_percentage}
if 'delegateRewards' not in changed_shares_data:
changed_shares_data['delegateRewards'] = {
'oldDelegatePercentage': old_delegate_percentage}
m = create_reward_share_changed_message(changed_shares_data)
if 'error' in m:
handle_error(telegram, dev_chat_id, m['error'])
else:
name = new_pillars[owner_address]['name']
r = telegram.bot_send_message_to_chat(
channel_id, m['message'])
print(
f'Reward share changed message sent to Telegram ({name}): {r.status_code}')
if len(discord_webhook_url) > 0:
r = discord.webhook_send_message_to_channel(
discord_webhook_url, m['message'])
print(
f'Reward share changed message sent to Discord ({name}): {r.status_code}')
def create_dismantled_pillar_message(pillar_data):
try:
m = 'Pillar dismantled!\n'
m = m + 'Pillar: ' + pillar_data['name']
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_dismantled_pillar_message'}
def create_new_pillar_message(pillar_data):
try:
m = 'New pillar spawned!\n'
m = m + 'Say hello to ' + pillar_data['name'] + '\n'
m = m + 'Momentum rewards sharing: ' + \
str(pillar_data['giveMomentumRewardPercentage']) + '%\n'
m = m + 'Delegate rewards sharing: ' + \
str(pillar_data['giveDelegateRewardPercentage']) + '%\n'
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_new_pillar_message'}
def create_pillar_name_changed_message(cached_name, current_name):
try:
m = 'Pillar name changed!\n'
m = m + cached_name + ' \U000027A1 ' + current_name
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_pillar_name_changed_message'}
def create_reward_share_changed_message(changed_shares_data):
try:
m = 'Pillar: ' + changed_shares_data['name'] + '\n'
old_momentum_percentage = changed_shares_data['momentumRewards']['oldMomentumPercentage']
if ('newMomentumPercentage' in changed_shares_data['momentumRewards']):
new_momentum_percentage = changed_shares_data['momentumRewards']['newMomentumPercentage']
m = m + 'Momentum rewards sharing: ' + str(old_momentum_percentage) + \
'% \U000027A1 ' + str(new_momentum_percentage) + '%\n'
else:
m = m + 'Momentum rewards sharing: ' + \
str(old_momentum_percentage) + '%\n'
old_delegate_percentage = changed_shares_data['delegateRewards']['oldDelegatePercentage']
if ('newDelegatePercentage' in changed_shares_data['delegateRewards']):
new_delegate_percentage = changed_shares_data['delegateRewards']['newDelegatePercentage']
m = m + 'Delegate rewards sharing: ' + \
str(old_delegate_percentage) + '% \U000027A1 ' + \
str(new_delegate_percentage) + '%'
else:
m = m + 'Delegate rewards sharing: ' + \
str(old_delegate_percentage) + '%'
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_reward_share_changed_message'}
def create_pinned_stats_message(pillars, momentum_height):
try:
# Only show top 70 Pillars because of Telegram's message character limit (4096 characters)
if len(pillars) > 70:
m = 'Pillar reward sharing rates (top 70)\n'
else:
m = 'Pillar reward sharing rates\n'
m = m + 'Last updated: ' + \
str(datetime.datetime.now(datetime.timezone.utc).strftime(
'%Y-%m-%d %H:%M:%S')) + ' (UTC)\n'
m = m + 'Momentum height: ' + str(momentum_height) + '\n'
m = m + 'M = momentum reward sharing %\n'
m = m + 'D = delegate reward sharing %\n'
m = m + 'W = pillar weight (ZNN) \n'
m = m + 'P/E = produced/expected momentums\n\n'
for owner_address in pillars:
if pillars[owner_address]['rank'] < 70:
weight = int(
round(pillars[owner_address]['weight'] / 100000000))
m = m + str(pillars[owner_address]['rank'] + 1) + ' - ' + str(pillars[owner_address]['name']) + ' -> M: ' + str(pillars[owner_address]['giveMomentumRewardPercentage']) + '% D: ' + str(pillars[owner_address]['giveDelegateRewardPercentage']
) + '% W: ' + str(weight) + ' P/E: ' + str(pillars[owner_address]['currentStats']['producedMomentums']) + '/' + str(pillars[owner_address]['currentStats']['expectedMomentums']) + '\n'
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_pinned_stats_message'}
def create_reward_collection_message(reward_epoch):
try:
emoji = get_emoji(reward_epoch)
m = 'Rewards for epoch ' + \
str(reward_epoch) + ' can now be collected! ' + emoji
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_reward_collection_message'}
def create_pillar_inactive_message(pillar_name):
try:
m = 'Heads up! ' + pillar_name + ' has stopped producing momentums.\n'
m = m + 'The pillar operator should make sure that everything is running smoothly.'
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_pillar_inactive_message'}
def create_pillar_active_message(pillar_name):
try:
m = pillar_name + ' is producing momentums as expected again! \U0001F680'
return {'message': m}
except KeyError:
return {'error': 'KeyError: create_pillar_active_message '}
def get_emoji(epoch):
emojis = ['\U0001f300', '\U0001F680', '\U0001F47D', '\U0001F60E',
'\U0001F525', '\U0001F389', '\U0001F31F', '\U0001F440']
return random.choice(emojis)
def read_file(file_path):
f = open(file_path)
content = json.load(f)
f.close()
return content
def write_to_file_as_json(data, file_name):
with open(file_name, 'w') as outfile:
json.dump(data, outfile, indent=4)
def handle_error(telegram, dev_chat_id, message):
print(message)
# Send the developer a message if a developer chat ID is configured
if len(dev_chat_id) != 0:
telegram.bot_send_message_to_chat(chat_id=dev_chat_id, message=message)
# Exit script on error
sys.exit()
def main():
# Get current file path
path = os.path.dirname(os.path.abspath(__file__))
# Read config
cfg = read_file(f'{path}/config/config.json')
# Data store directory
DATA_STORE_DIR = f'{path}/data_store'
# Pillar cache file
PILLAR_CACHE_FILE = f'{DATA_STORE_DIR}/pillar_data.json'
# Epoch cache file
EPOCH_CACHE_FILE = f'{DATA_STORE_DIR}/epoch_data.json'
# Momentum status cache file
MOMENTUM_STATUS_CACHE_FILE = f'{DATA_STORE_DIR}/momentum_status_data.json'
# Check and create data store directory
if not os.path.exists(DATA_STORE_DIR):
os.makedirs(DATA_STORE_DIR, exist_ok=True)
# Check and create pillar cache file
if not os.path.exists(PILLAR_CACHE_FILE):
open(PILLAR_CACHE_FILE, 'w+').close()
# Check and create epoch cache file
if not os.path.exists(EPOCH_CACHE_FILE):
open(EPOCH_CACHE_FILE, 'w+').close()
# Check and create momentum status cache file
if not os.path.exists(MOMENTUM_STATUS_CACHE_FILE):
open(MOMENTUM_STATUS_CACHE_FILE, 'w+').close()
# Create wrappers
node = NodeRpcWrapper(node_url=cfg['node_url_http'])
telegram = TelegramWrapper(
bot_api_key=cfg['telegram_bot_api_key'])
discord = DiscordWrapper()
# Get latest momentum
latest_momentum = node.get_latest_momentum()
if 'error' in latest_momentum:
handle_error(
telegram, cfg['telegram_dev_chat_id'], latest_momentum['error'])
# Get latest Pillar data
new_pillar_data = node.get_all_pillars()
if 'error' in new_pillar_data:
handle_error(
telegram, cfg['telegram_dev_chat_id'], new_pillar_data['error'])
# Get reward epoch
new_epoch_data = node.get_reward_epoch(cfg['reference_reward_address'])
if 'error' in new_epoch_data:
handle_error(
telegram, cfg['telegram_dev_chat_id'], new_epoch_data['error'])
# Get cached Pillar data from file
if os.stat(PILLAR_CACHE_FILE).st_size != 0:
cached_pillar_data = read_file(PILLAR_CACHE_FILE)
else:
cached_pillar_data = None
# Get cached epoch data from file
if os.stat(EPOCH_CACHE_FILE).st_size != 0:
cached_epoch_data = read_file(EPOCH_CACHE_FILE)
else:
cached_epoch_data = None
# Get cached momentum status data from file
if os.stat(MOMENTUM_STATUS_CACHE_FILE).st_size != 0:
cached_momentum_status_data = read_file(MOMENTUM_STATUS_CACHE_FILE)
else:
cached_momentum_status_data = {'data': {}}
# Cache current Pillar data to file
write_to_file_as_json(new_pillar_data, PILLAR_CACHE_FILE)
# Cache current epoch data to file
write_to_file_as_json(new_epoch_data, EPOCH_CACHE_FILE)
# Create and update the pinned stats message
pinned_stats_message = create_pinned_stats_message(
new_pillar_data['pillars'], latest_momentum['height'])
if 'error' in pinned_stats_message:
handle_error(telegram, cfg['telegram_dev_chat_id'],
pinned_stats_message['error'])
else:
r = telegram.bot_edit_message(
chat_id=cfg['telegram_channel_id'], message_id=cfg['telegram_pinned_message_id'], message=pinned_stats_message['message'])
print(f'Pinned message updated on Telegram: {r.status_code}')
# Check for new Pillar events if cached data exists
if cached_pillar_data is not None:
check_and_send_pillar_events(
telegram, discord, cfg, cached_pillar_data['pillars'], new_pillar_data['pillars'])
# Check if new rewards are available
if cached_epoch_data is not None:
check_and_send_reward_collection_message(
telegram, discord, cfg, cached_epoch_data['epoch'], new_epoch_data['epoch'])
# Check for missed momentums
# TODO: Fix so that momentum status cache is stored on first run as well
if cached_pillar_data is not None:
check_and_send_missed_momentums_message(
telegram, discord, cfg, cached_pillar_data['pillars'], new_pillar_data['pillars'], cached_momentum_status_data['data'], MOMENTUM_STATUS_CACHE_FILE)
if __name__ == '__main__':
print(f'{str(datetime.datetime.now())}: Starting')
main()
print(f'{str(datetime.datetime.now())}: Completed')
|
from osu_parser.beatmap import Beatmap
from osu.ctb.difficulty import Difficulty
from ppCalc import calculate_pp
import os
import threading
import socket
import struct
import sys
import traceback
import time
CMD_KEEP_ALIVE = 0
CMD_KEEP_ALIVE_OK = 1
CMD_CALCULATE_CTB = 2
TIMEOUT = 3
def process_exist(imagename):
p = os.popen('tasklist /FI "IMAGENAME eq %s"' % imagename).read()
if imagename in p:
return True
return False
def check_process_exist():
if not process_exist('Sync.exe'):
os._exit(0)
timer = threading.Timer(3, check_process_exist)
timer.start()
timer = threading.Timer(3, check_process_exist)
timer.start()
def read_string(sock,count,encoding='utf-8'):
total_bytes = b""
while True:
recv_bytes = sock.recv(count - len(total_bytes))
total_bytes += recv_bytes
if len(total_bytes) >= count:
break
return total_bytes.decode(encoding)
def process_tcp(sock):
"""
content_count - 4 bytes (int)
content - string (string)
mods - 4 bytes (int)
"""
try:
while True:
cmd_bytes = sock.recv(4)
cmd = int.from_bytes(cmd_bytes,byteorder="little")
if cmd == CMD_KEEP_ALIVE:
sock.send(struct.pack("<i",CMD_KEEP_ALIVE_OK))
continue
content_count_bytes = sock.recv(4)
content_count = int.from_bytes(content_count_bytes,byteorder="little")
content = read_string(sock,content_count)
mods_bytes = sock.recv(4)
mods = int.from_bytes(mods_bytes,byteorder="little")
beatmap = Beatmap(content)
difficulty = Difficulty(beatmap, mods)
send_ctb_result(sock,beatmap,difficulty)
except Exception as identifier:
traceback.print_exc()
print("[ERROR]%s" % identifier,file=sys.stderr)
finally:
sock.close()
def send_ctb_result(sock,beatmap,difficulty):
"""
stars - 8 bytes (double)
pp - 8 bytes (double)
full_combo 4 bytes (int)
ar - 8 bytes (double)
"""
stars = difficulty.star_rating
sock.send(struct.pack("<d", stars))
sock.send(struct.pack("<i", beatmap.max_combo))
sock.send(struct.pack("<d", difficulty.beatmap.difficulty["ApproachRate"]))
quit_self = False
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 11800))
s.listen(5)
while not quit_self:
sock,addr = s.accept()
sock.settimeout(TIMEOUT)
t = threading.Thread(target=process_tcp,args=(sock,))
t.start()
s.close() |
from molinterrogator.target import _target_df
from molinterrogator.target import _compound_df
from molinterrogator.target import _compound_from_target_df
def _target_id_2_card_dict(db_id=None, client=None):
result = client.target.filter(target_chembl_id__in=db_id)[0]
tmp_dict = {}
tmp_dict['Name'] = result['pref_name']
tmp_dict['Type'] = result['target_type']
tmp_dict['Organism'] = result['organism']
tmp_dict['ChEMBL'] = result['target_chembl_id']
if len(result['target_components'])>1:
print('El target ',tmp_dict['ChemBL'],'tiene más de un target_components y no se que\
es.')
for xref in result['target_components'][0]['target_component_xrefs']:
src_db = xref['xref_src_db']
id_db = xref['xref_id']
if src_db == 'PDBe':
try:
tmp_dict['PDB'].append(id_db)
except:
tmp_dict['PDB']=[id_db]
elif src_db == 'UniProt':
try:
tmp_dict['UniProt'].append(id_db)
except:
tmp_dict['UniProt']=[id_db]
elif src_db == 'IntAct':
try:
tmp_dict['IntAct'].append(id_db)
except:
tmp_dict['IntAct']=[id_db]
elif src_db == 'InterPro':
try:
tmp_dict['InterPro'].append(id_db)
except:
tmp_dict['InterPro']=[id_db]
tmp_dict['BindingDB']=tmp_dict['UniProt']
del(result)
return tmp_dict
def _compound_from_target_2_card_dict(compound_result=None, client=None):
tmp_dict = {}
tmp_dict['Name'] = compound_result['molecule_pref_name']
tmp_dict['Smile'] = compound_result['canonical_smiles']
tmp_dict['Compound ChemBL'] = compound_result['molecule_chembl_id']
tmp_dict['Assay ChemBL'] = compound_result['assay_chembl_id']
tmp_dict['Document ChemBL'] = compound_result['document_chembl_id']
if compound_result['standard_type']=='IC50':
tmp_dict['IC50']=compound_result['standard_value']+' '+compound_result['standard_units']
else:
print('Type of compound value not known for molecule_chembl_id:', compound_result['molecule_chembl_id'])
return tmp_dict
class _target_query():
def __init__(self, query=None):
self.string = query
self.query = None
self.card = _target_df.copy()
self.run_query()
self.update_results(index_result=0)
def run_query(self):
from chembl_webresource_client.new_client import new_client
self.query = new_client.target.filter(target_synonym__icontains=self.string)
del(new_client)
def info_results(self):
from chembl_webresource_client.new_client import new_client
tmp_df = _target_df.copy()
for result in self.query:
chembl_id = result['target_chembl_id']
tmp_df =tmp_df.append(_target_id_2_card_dict(chembl_id, new_client),ignore_index=True)
del(new_client)
return tmp_df
def update_results(self,index_result=0):
from chembl_webresource_client.new_client import new_client
chembl_id = self.query[index_result]['target_chembl_id']
self.card = _target_df.copy()
self.card =self.card.append(_target_id_2_card_dict(chembl_id, new_client),ignore_index=True)
self.activity_filter = new_client.molecule.filter(target_chembl_id__in=chembl_id)
self.molecule_filter = new_client.activity.filter(target_chembl_id__in=chembl_id)
self.compounds = _compound_from_target_df.copy()
for result in self.molecule_filter:
tmp_dict = _compound_from_target_2_card_dict(result, new_client)
self.compounds = self.compounds.append(tmp_dict,ignore_index=True)
|
import json
from urllib.parse import urlparse
from abstract_json_storage import AbstractJsonStorage
class JsonStorageStore(AbstractJsonStorage):
def __init__(self):
super().__init__()
def get_name(self):
return "jsonstorage.net"
def create(self, session, json_data=None, store_id=None):
return self._request(session, "post", "/api/items", json_data)
def update(self, session, json_data=None, store_id=None):
return self._request(session, "put", store_id or self.store_id, json_data)
def read(self, session, json_data=None, store_id=None):
return self._request(session, "get", store_id or self.store_id)
def result_callback(self, method_name, result_content):
print(f"{method_name} : {result_content}")
if method_name == "create":
content = json.loads(result_content)
path_parts = urlparse(content["uri"])
object_id = path_parts.path
self._set_store_id(object_id)
elif method_name == "read":
return result_content
def _get_api_root(self):
# return "https://jsonstorage.net/api"
return "https://jsonstorage.net"
|
import unittest
import sqlalchemy
from records_mover.db.redshift.sql import schema_sql_from_admin_views
from mock import patch, Mock
class TestSQL(unittest.TestCase):
@patch('records_mover.db.redshift.sql.logger')
def test_schema_sql_from_admin_views_not_installed(self,
mock_logger):
mock_db = Mock(name='db')
mock_table = Mock(name='table')
mock_schema = Mock(name='schema')
mock_db.execute.side_effect = sqlalchemy.exc.ProgrammingError('statement', {}, {})
out = schema_sql_from_admin_views(mock_schema, mock_table, mock_db)
self.assertIsNone(out)
mock_logger.debug.assert_called_with('Error while generating SQL', exc_info=True)
mock_logger.warning.\
assert_called_with("To be able to save SQL to a records directory, "
"please install and grant access to 'admin.v_generate_tbl_ddl' "
"from https://github.com/awslabs/amazon-redshift-utils/"
"blob/master/src/AdminViews/v_generate_tbl_ddl.sql")
|
# parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'PLUS MINUS TIMES DIVIDE LPAREN RPAREN SPACE COMMENT STARTP FINISHP ASSIGN LSQB RSQB SEMICOLON ELESS LESS EGREATER GREATER EQUAL NOTEQUAL DOT COMMA LOGIC_AND LOGIC_OR LOGIC_NOT IF THEN ELSE WHILE FOR DO BEGIN END RANGE TURN CMDARG_ON CMDARG_OFF SWITCH_STATE_TO DIMMER IDENT NUMBER STATE LBRACE RBRACE OCTOTHORPE\n programm_struct : COMMENT programm_body\n | programm_body\n iot_device : IDENTiot_dev_control : IDENT\n iot_command : TURN\n | SWITCH_STATE_TO\n | DIMMER\n \n toggle_cmd_args : CMDARG_ON\n | CMDARG_OFF\n \n iot_cmd_argument : toggle_cmd_args\n | NUMBER\n | RANGE DOT LSQB NUMBER COMMA NUMBER RSQB\n \n iot_object_expr : iot_device OCTOTHORPE iot_dev_control\n \n get_device_info : STATE\n \n iot_device_get_info : iot_device OCTOTHORPE iot_dev_control OCTOTHORPE get_device_info\n | iot_device DOT iot_dev_control DOT iot_command DOT LSQB RANGE DOT LSQB NUMBER COMMA NUMBER RSQB RSQB\n \n assigment : iot_object_expr ASSIGN NUMBER\n | iot_object_expr ASSIGN toggle_cmd_args\n \n assigment_stmts : assigment_stmts assigment SEMICOLON\n | assigment SEMICOLON\n \n logical_comp : EGREATER\n | GREATER\n | ELESS\n | LESS\n | EQUAL\n | NOTEQUAL\n \n logical_operator : LOGIC_AND\n | LOGIC_OR\n \n logical_cond : logical_comp\n | logical_operator\n \n condition : LPAREN iot_device_get_info logical_comp iot_device_get_info RPAREN\n | LPAREN iot_device_get_info logical_comp NUMBER RPAREN\n | LPAREN iot_device_get_info logical_comp toggle_cmd_args RPAREN\n \n condition_list : condition_list logical_operator condition\n | condition\n | LPAREN condition_list RPAREN\n \n if_stmt : IF LBRACE condition_list RBRACE THEN\n \n end_if : END\n \n condition_instr : if_stmt BEGIN assigment_stmts end_if SEMICOLON\n | if_stmt BEGIN assigment_stmts end_if ELSE BEGIN assigment_stmts end_if SEMICOLON\n \n programm_body : programm_body condition_instr\n | condition_instr\n | programm_body assigment_stmts\n | assigment_stmts\n '
_lr_action_items = {'LBRACE':([9,],[19,]),'RSQB':([83,84,],[84,85,]),'IDENT':([0,2,4,5,10,12,13,14,16,18,20,21,22,30,41,44,47,48,49,50,51,52,53,54,55,56,62,75,],[3,3,3,-42,3,3,3,-41,23,-20,3,3,-19,3,-39,3,3,-21,-22,-26,-23,-25,-24,23,23,3,3,-40,]),'NOTEQUAL':([39,69,70,85,],[50,-14,-15,-16,]),'$end':([2,4,5,11,13,14,18,20,22,41,75,],[-2,-44,-42,0,-43,-41,-20,-1,-19,-39,-40,]),'CMDARG_OFF':([17,47,48,49,50,51,52,53,],[27,27,-21,-22,-26,-23,-25,-24,]),'SEMICOLON':([8,15,25,26,27,28,32,33,68,],[18,22,-17,-18,-9,-8,41,-38,75,]),'RPAREN':([27,28,31,38,43,46,57,58,59,63,64,65,69,70,85,],[-9,-8,-35,46,-34,-36,63,64,65,-33,-32,-31,-14,-15,-16,]),'COMMENT':([0,],[10,]),'LPAREN':([19,30,34,35,36,],[30,30,-27,44,-28,]),'BEGIN':([1,42,45,],[12,56,-37,]),'LOGIC_AND':([29,31,38,43,46,63,64,65,],[34,-35,34,-34,-36,-33,-32,-31,]),'RBRACE':([29,31,43,46,63,64,65,],[37,-35,-34,-36,-33,-32,-31,]),'STATE':([66,],[69,]),'GREATER':([39,69,70,85,],[49,-14,-15,-16,]),'END':([18,21,22,62,],[-20,33,-19,33,]),'CMDARG_ON':([17,47,48,49,50,51,52,53,],[28,28,-21,-22,-26,-23,-25,-24,]),'DIMMER':([67,],[71,]),'SWITCH_STATE_TO':([67,],[72,]),'OCTOTHORPE':([3,6,23,40,60,],[-3,16,-4,54,66,]),'ASSIGN':([7,23,24,],[17,-4,-13,]),'EQUAL':([39,69,70,85,],[52,-14,-15,-16,]),'LSQB':([76,79,],[77,80,]),'DOT':([3,23,40,61,71,72,73,74,78,],[-3,-4,55,67,-7,-6,-5,76,79,]),'COMMA':([81,],[82,]),'TURN':([67,],[73,]),'THEN':([37,],[45,]),'NUMBER':([17,47,48,49,50,51,52,53,80,82,],[25,58,-21,-22,-26,-23,-25,-24,81,83,]),'RANGE':([77,],[78,]),'ELESS':([39,69,70,85,],[51,-14,-15,-16,]),'LOGIC_OR':([29,31,38,43,46,63,64,65,],[36,-35,36,-34,-36,-33,-32,-31,]),'ELSE':([32,33,],[42,-38,]),'IF':([0,2,4,5,10,13,14,18,20,22,41,75,],[9,9,-44,-42,9,-43,-41,-20,9,-19,-39,-40,]),'EGREATER':([39,69,70,85,],[48,-14,-15,-16,]),'LESS':([39,69,70,85,],[53,-14,-15,-16,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'logical_comp':([39,],[47,]),'if_stmt':([0,2,10,20,],[1,1,1,1,]),'programm_body':([0,10,],[2,20,]),'end_if':([21,62,],[32,68,]),'condition':([19,30,35,],[31,31,43,]),'assigment':([0,2,4,10,12,13,20,21,56,62,],[8,8,15,8,8,15,8,15,8,15,]),'toggle_cmd_args':([17,47,],[26,57,]),'logical_operator':([29,38,],[35,35,]),'condition_list':([19,30,],[29,38,]),'get_device_info':([66,],[70,]),'iot_device_get_info':([30,44,47,],[39,39,59,]),'programm_struct':([0,],[11,]),'iot_command':([67,],[74,]),'iot_object_expr':([0,2,4,10,12,13,20,21,56,62,],[7,7,7,7,7,7,7,7,7,7,]),'assigment_stmts':([0,2,10,12,20,56,],[4,13,4,21,13,62,]),'condition_instr':([0,2,10,20,],[5,14,5,14,]),'iot_device':([0,2,4,10,12,13,20,21,30,44,47,56,62,],[6,6,6,6,6,6,6,6,40,40,40,6,6,]),'iot_dev_control':([16,54,55,],[24,60,61,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> programm_struct","S'",1,None,None,None),
('programm_struct -> COMMENT programm_body','programm_struct',2,'p_programm_struct','flexAndBison.py',238),
('programm_struct -> programm_body','programm_struct',1,'p_programm_struct','flexAndBison.py',239),
('iot_device -> IDENT','iot_device',1,'p_iot_device','flexAndBison.py',243),
('iot_dev_control -> IDENT','iot_dev_control',1,'p_iot_dev_control','flexAndBison.py',248),
('iot_command -> TURN','iot_command',1,'p_iot_command','flexAndBison.py',254),
('iot_command -> SWITCH_STATE_TO','iot_command',1,'p_iot_command','flexAndBison.py',255),
('iot_command -> DIMMER','iot_command',1,'p_iot_command','flexAndBison.py',256),
('toggle_cmd_args -> CMDARG_ON','toggle_cmd_args',1,'p_toggle_cmd_args','flexAndBison.py',263),
('toggle_cmd_args -> CMDARG_OFF','toggle_cmd_args',1,'p_toggle_cmd_args','flexAndBison.py',264),
('iot_cmd_argument -> toggle_cmd_args','iot_cmd_argument',1,'p_iot_cmd_argument','flexAndBison.py',271),
('iot_cmd_argument -> NUMBER','iot_cmd_argument',1,'p_iot_cmd_argument','flexAndBison.py',272),
('iot_cmd_argument -> RANGE DOT LSQB NUMBER COMMA NUMBER RSQB','iot_cmd_argument',7,'p_iot_cmd_argument','flexAndBison.py',273),
('iot_object_expr -> iot_device OCTOTHORPE iot_dev_control','iot_object_expr',3,'p_iot_object_expr','flexAndBison.py',279),
('get_device_info -> STATE','get_device_info',1,'p_get_device_info','flexAndBison.py',286),
('iot_device_get_info -> iot_device OCTOTHORPE iot_dev_control OCTOTHORPE get_device_info','iot_device_get_info',5,'p_iot_device_get_info','flexAndBison.py',291),
('iot_device_get_info -> iot_device DOT iot_dev_control DOT iot_command DOT LSQB RANGE DOT LSQB NUMBER COMMA NUMBER RSQB RSQB','iot_device_get_info',15,'p_iot_device_get_info','flexAndBison.py',292),
('assigment -> iot_object_expr ASSIGN NUMBER','assigment',3,'p_assigment','flexAndBison.py',299),
('assigment -> iot_object_expr ASSIGN toggle_cmd_args','assigment',3,'p_assigment','flexAndBison.py',300),
('assigment_stmts -> assigment_stmts assigment SEMICOLON','assigment_stmts',3,'p_assigment_stmts','flexAndBison.py',306),
('assigment_stmts -> assigment SEMICOLON','assigment_stmts',2,'p_assigment_stmts','flexAndBison.py',307),
('logical_comp -> EGREATER','logical_comp',1,'p_logical_comp','flexAndBison.py',312),
('logical_comp -> GREATER','logical_comp',1,'p_logical_comp','flexAndBison.py',313),
('logical_comp -> ELESS','logical_comp',1,'p_logical_comp','flexAndBison.py',314),
('logical_comp -> LESS','logical_comp',1,'p_logical_comp','flexAndBison.py',315),
('logical_comp -> EQUAL','logical_comp',1,'p_logical_comp','flexAndBison.py',316),
('logical_comp -> NOTEQUAL','logical_comp',1,'p_logical_comp','flexAndBison.py',317),
('logical_operator -> LOGIC_AND','logical_operator',1,'p_logical_operator','flexAndBison.py',322),
('logical_operator -> LOGIC_OR','logical_operator',1,'p_logical_operator','flexAndBison.py',323),
('logical_cond -> logical_comp','logical_cond',1,'p_logical_cond','flexAndBison.py',328),
('logical_cond -> logical_operator','logical_cond',1,'p_logical_cond','flexAndBison.py',329),
('condition -> LPAREN iot_device_get_info logical_comp iot_device_get_info RPAREN','condition',5,'p_condition','flexAndBison.py',334),
('condition -> LPAREN iot_device_get_info logical_comp NUMBER RPAREN','condition',5,'p_condition','flexAndBison.py',335),
('condition -> LPAREN iot_device_get_info logical_comp toggle_cmd_args RPAREN','condition',5,'p_condition','flexAndBison.py',336),
('condition_list -> condition_list logical_operator condition','condition_list',3,'p_condition_list','flexAndBison.py',343),
('condition_list -> condition','condition_list',1,'p_condition_list','flexAndBison.py',344),
('condition_list -> LPAREN condition_list RPAREN','condition_list',3,'p_condition_list','flexAndBison.py',345),
('if_stmt -> IF LBRACE condition_list RBRACE THEN','if_stmt',5,'p_if_stmt','flexAndBison.py',350),
('end_if -> END','end_if',1,'p_end_if','flexAndBison.py',355),
('condition_instr -> if_stmt BEGIN assigment_stmts end_if SEMICOLON','condition_instr',5,'p_condition_instr','flexAndBison.py',369),
('condition_instr -> if_stmt BEGIN assigment_stmts end_if ELSE BEGIN assigment_stmts end_if SEMICOLON','condition_instr',9,'p_condition_instr','flexAndBison.py',370),
('programm_body -> programm_body condition_instr','programm_body',2,'p_programm_body','flexAndBison.py',375),
('programm_body -> condition_instr','programm_body',1,'p_programm_body','flexAndBison.py',376),
('programm_body -> programm_body assigment_stmts','programm_body',2,'p_programm_body','flexAndBison.py',377),
('programm_body -> assigment_stmts','programm_body',1,'p_programm_body','flexAndBison.py',378),
]
|
#!/usr/bin/env python
"""
Created on Apr 10, 2015
@author: Chen Yang
This script generates simulated Oxford Nanopore 2D reads.
"""
from __future__ import print_function
from __future__ import with_statement
import sys
import getopt
import random
import re
from time import strftime
try:
from six.moves import xrange
except ImportError:
pass
try:
import numpy as np
except ImportError:
sys.exit("""You need numpy!
install it from http://www.numpy.org/""")
import mixed_models as mm
PYTHON_VERSION = sys.version_info
VERSION = "1.0.0"
PRORAM = "NanoSim"
AUTHOR = "Chen Yang (UBC & BCGSC)"
CONTACT = "cheny@bcgsc.ca"
BASES = ['A', 'T', 'C', 'G']
# Usage information
def usage():
usage_message = "./simulator.py [command] <options>\n" \
"[command] circular | linear\n" \
"Do not choose 'circular' when there is more than one sequence in the reference\n" \
"<options>: \n" \
"-h : print usage message\n" \
"-r : reference genome in fasta file, specify path and file name, REQUIRED\n" \
"-c : The prefix of training set profiles, same as the output prefix in read_analysis.py, default = training\n" \
"-o : The prefix of output file, default = 'simulated'\n" \
"-n : Number of generated reads, default = 20,000 reads\n" \
"--max_len : Maximum read length, default = Inf\n" \
"--min_len : Minimum read length, default = 50\n" \
"--perfect: Output perfect reads, no mutations, default = False\n" \
"--KmerBias: prohibits homopolymers with length >= n bases in output reads, default = 6\n"
sys.stderr.write(usage_message)
def read_ecdf(profile):
# We need to count the number of zeros. If it's over 10 zeros, l_len/l_ratio need to be changed to higher.
# Because it's almost impossible that the ratio is much lower than the lowest heuristic value.
header = profile.readline()
header_info = header.strip().split()
ecdf_dict = {}
lanes = len(header_info[1:])
for i in header_info[1:]:
boundaries = i.split('-')
ecdf_dict[(int(boundaries[0])), int(boundaries[1])] = {}
ecdf_key = sorted(ecdf_dict.keys())
l_prob = [0.0] * lanes
l_ratio = [0.0] * lanes
for line in profile:
new = line.strip().split('\t')
ratio = [float(x) for x in new[0].split('-')]
prob = [float(x) for x in new[1:]]
for i in xrange(lanes):
if prob[i] == l_prob[i]:
continue
else:
if l_prob[i] != 0:
ecdf_dict[ecdf_key[i]][(l_prob[i], prob[i])] = (l_ratio[i], ratio[1])
else:
ecdf_dict[ecdf_key[i]][(l_prob[i], prob[i])] \
= (max(l_ratio[i], ratio[1] - 10 * (ratio[1] - ratio[0])), ratio[1])
l_ratio[i] = ratio[1]
l_prob[i] = prob[i]
for i in xrange(0, len(ecdf_key)):
last_key = sorted(ecdf_dict[ecdf_key[i]].keys())[-1]
last_value = ecdf_dict[ecdf_key[i]][last_key]
ecdf_dict[ecdf_key[i]][last_key] = (last_value[0], ratio[1])
return ecdf_dict
def get_length(len_dict, num, max_l, min_l):
length_list = []
for i in xrange(num):
middle_ref = 0
key = tuple(len_dict.keys())[0]
while middle_ref <= min_l or middle_ref > max_l:
p = random.random()
for k_p, v_p in len_dict[key].items():
if k_p[0] <= p < k_p[1]:
middle_ref = int(round((p - k_p[0])/(k_p[1] - k_p[0]) * (v_p[1] - v_p[0]) + v_p[0]))
break
length_list.append(middle_ref)
return length_list
def read_profile(number, model_prefix, per, max_l, min_l):
global unaligned_length, number_aligned, aligned_dict
global match_ht_list, align_ratio, ht_dict, error_par
global trans_error_pr, match_markov_model
# Read model profile for match, mismatch, insertion and deletions
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Read error profile\n")
sys.stdout.flush()
error_par = {}
model_profile = model_prefix + "_model_profile"
with open(model_profile, 'r') as mod_profile:
mod_profile.readline()
for line in mod_profile:
new_line = line.strip().split("\t")
if "mismatch" in line:
error_par["mis"] = [float(x) for x in new_line[1:]]
elif "insertion" in line:
error_par["ins"] = [float(x) for x in new_line[1:]]
else:
error_par["del"] = [float(x) for x in new_line[1:]]
trans_error_pr = {}
with open(model_prefix + "_error_markov_model", "r") as error_markov:
error_markov.readline()
for line in error_markov:
info = line.strip().split()
k = info[0]
trans_error_pr[k] = {}
trans_error_pr[k][(0, float(info[1]))] = "mis"
trans_error_pr[k][(float(info[1]), float(info[1]) + float(info[2]))] = "ins"
trans_error_pr[k][(1 - float(info[3]), 1)] = "del"
with open(model_prefix + "_first_match.hist", 'r') as fm_profile:
match_ht_list = read_ecdf(fm_profile)
with open(model_prefix + "_match_markov_model", 'r') as mm_profile:
match_markov_model = read_ecdf(mm_profile)
# Read length of unaligned reads
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Read ECDF of unaligned reads\n")
sys.stdout.flush()
unaligned_length = []
with open(model_prefix + "_unaligned_length_ecdf", 'r') as u_profile:
new = u_profile.readline().strip()
rate = new.split('\t')[1]
# if parameter perfect is used, all reads should be aligned, number_aligned equals total number of reads.
if per or rate == "100%":
number_aligned = number
else:
number_aligned = int(round(number * float(rate) / (float(rate) + 1)))
number_unaligned = number - number_aligned
unaligned_dict = read_ecdf(u_profile)
unaligned_length = get_length(unaligned_dict, number_unaligned, max_l, min_l)
unaligned_dict.clear()
# Read profile of aligned reads
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Read ECDF of aligned reads\n")
sys.stdout.flush()
# Read align ratio profile
with open(model_prefix + "_align_ratio", 'r') as a_profile:
align_ratio = read_ecdf(a_profile)
# Read head/unaligned region ratio
with open(model_prefix + "_ht_ratio", 'r') as ht_profile:
ht_dict = read_ecdf(ht_profile)
# Read length of aligned reads
# If "perfect" is chosen, just use the total length ecdf profile, else use the length of aligned region on reference
if per:
length_profile = model_prefix + "_aligned_reads_ecdf"
else:
length_profile = model_prefix + "_aligned_length_ecdf"
with open(length_profile, 'r') as align_profile:
aligned_dict = read_ecdf(align_profile)
def collapse_homo(seq, k):
read = re.sub("A" * k + "+", "A" * (k - 1), seq)
read = re.sub("C" * k + "+", "C" * (k - 1), read)
read = re.sub("T" * k + "+", "T" * (k - 1), read)
read = re.sub("G" * k + "+", "G" * (k - 1), read)
return read
# Taken from https://github.com/lh3/readfq
def readfq(fp): # this is a generator function
last = None # this is a buffer keeping the last unprocessed line
while True: # mimic closure; is it a bad idea?
if not last: # the first record or a record following a fastq
for l in fp: # search for the start of the next record
if l[0] in '>@': # fasta/q header line
last = l[:-1] # save this line
break
if not last:
break
name, seqs, last = last[1:].partition(" ")[0], [], None
for l in fp: # read the sequence
if l[0] in '@+>':
last = l[:-1]
break
seqs.append(l[:-1])
if not last or last[0] != '+': # this is a fasta record
yield name, ''.join(seqs), None # yield a fasta record
if not last:
break
else: # this is a fastq record
seq, leng, seqs = ''.join(seqs), 0, []
for l in fp: # read the quality
seqs.append(l[:-1])
leng += len(l) - 1
if leng >= len(seq): # have read enough quality
last = None
yield name, seq, ''.join(seqs) # yield a fastq record
break
if last: # reach EOF before reading enough quality
yield name, seq, None # yield a fasta record instead
break
def simulation(ref, out, dna_type, per, kmer_bias, max_l, min_l):
global unaligned_length, number_aligned, aligned_dict
global genome_len, seq_dict, seq_len
global match_ht_list, align_ratio, ht_dict, match_markov_model
global trans_error_pr, error_par
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Read in reference genome\n")
sys.stdout.flush()
seq_dict = {}
seq_len = {}
# Read in the reference genome
with open(ref, 'r') as infile:
for seqN, seqS, seqQ in readfq(infile):
info = re.split(r'[_\s]\s*', seqN)
chr_name = "-".join(info)
seq_dict[chr_name] = seqS
sys.stdout.write(".")
sys.stdout.flush()
sys.stdout.write("\n")
sys.stdout.flush()
if len(seq_dict) > 1 and dna_type == "circular":
sys.stderr.write("Do not choose circular if there is more than one chromosome in the genome!")
sys.exit(1)
for key in seq_dict.keys():
seq_len[key] = len(seq_dict[key])
genome_len = sum(seq_len.values())
# Start simulation
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Start simulation of random reads\n")
sys.stdout.flush()
out_reads = open(out + "_reads.fasta", 'w')
out_error = open(out + "_error_profile", 'w')
out_error.write("Seq_name\tSeq_pos\terror_type\terror_length\tref_base\tseq_base\n")
# Simulate unaligned reads
num_unaligned_length = len(unaligned_length)
for i in xrange(num_unaligned_length):
unaligned = unaligned_length[i]
unaligned, error_dict = unaligned_error_list(unaligned, error_par)
new_read, new_read_name = extract_read(dna_type, unaligned)
new_read_name = new_read_name + "_unaligned_" + str(i)
# Change lowercase to uppercase and replace N with any base
new_read = case_convert(new_read)
read_mutated = mutate_read(new_read, new_read_name, out_error, error_dict, kmer_bias, False)
# Reverse complement half of the reads
p = random.random()
if p < 0.5:
read_mutated = reverse_complement(read_mutated)
new_read_name += "_R"
else:
new_read_name += "_F"
out_reads.write(">" + new_read_name + "_0_" + str(unaligned) + "_0" + '\n')
out_reads.write(read_mutated + "\n")
del unaligned_length
# Simulate aligned reads
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Start simulation of aligned reads\n")
sys.stdout.flush()
if per:
ref_length = get_length(aligned_dict, number_aligned, max_l, min_l)
del aligned_dict
for i in xrange(number_aligned):
new_read, new_read_name = extract_read(dna_type, ref_length[i])
new_read_name = new_read_name + "_perfect_" + str(i)
# Reverse complement half of the reads
p = random.random()
if p < 0.5:
new_read = reverse_complement(new_read)
new_read_name += "_R"
else:
new_read_name += "_F"
out_reads.write(">" + new_read_name + "_0_" + str(ref_length[i]) + "_0" + '\n')
# Change lowercase to uppercase and replace N with any base
new_read = case_convert(new_read)
out_reads.write(new_read + "\n")
out_reads.close()
out_error.close()
return
i = 0
while i < number_aligned:
ref = get_length(aligned_dict, 1, max_l, min_l)[0]
middle, middle_ref, error_dict = error_list(ref, match_markov_model, match_ht_list, error_par,
trans_error_pr)
for k_align in sorted(align_ratio.keys()):
if k_align[0] <= middle < k_align[1]:
break
p = random.random()
for k_r, v_r in align_ratio[k_align].items():
if k_r[0] <= p < k_r[1]:
a_ratio = (p - k_r[0])/(k_r[1] - k_r[0]) * (v_r[1] - v_r[0]) + v_r[0]
total = int(round(middle / a_ratio))
remainder = total - int(round(middle))
break
if total > max_l:
continue
if remainder == 0:
head = 0
tail = 0
else:
for k_ht in sorted(ht_dict.keys()):
if k_ht[0] <= remainder < k_ht[1]:
p = random.random()
for k_h, v_h in ht_dict[k_ht].items():
if k_h[0] <= p < k_h[1]:
ratio = (p - k_h[0])/(k_h[1] - k_h[0]) * (v_h[1] - v_h[0]) + v_h[0]
head = int(round(remainder * ratio))
tail = remainder - head
break
break
# if remainder is larger than any empirical value, then randomly divide it into head and tail
try:
head
except NameError:
p = random.random()
head = int(round(remainder * p))
tail = remainder - head
# Extract middle region from reference genome
new_read, new_read_name = extract_read(dna_type, middle_ref)
new_read_name = new_read_name + "_aligned_" + str(i + num_unaligned_length)
# Mutate read
new_read = case_convert(new_read)
read_mutated = mutate_read(new_read, new_read_name, out_error, error_dict, kmer_bias)
# Reverse complement half of the reads
p = random.random()
if p < 0.5:
read_mutated = reverse_complement(read_mutated)
new_read_name += "_R"
else:
new_read_name += "_F"
# Add head and tail region
read_mutated = ''.join(np.random.choice(BASES, head)) + read_mutated
read_mutated += ''.join(np.random.choice(BASES, tail))
if kmer_bias:
read_mutated = collapse_homo(read_mutated, kmer_bias)
out_reads.write(">" + new_read_name + "_" + str(head) + "_" + str(middle_ref) + "_" +
str(tail) + '\n')
out_reads.write(read_mutated + '\n')
i += 1
out_reads.close()
out_error.close()
align_ratio.clear()
ht_dict.clear()
def reverse_complement(seq):
comp = {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'}
seq_list = list(seq)
reverse_seq_list = reversed([comp.get(base, base) for base in seq_list])
reverse_seq = ''.join(reverse_seq_list)
return reverse_seq
def extract_read(dna_type, length):
global seq_dict, seq_len, genome_len
if length > max(seq_len.values()):
length = max(seq_len.values())
# Extract the aligned region from reference
if dna_type == "circular":
ref_pos = random.randint(0, genome_len)
chromosome = list(seq_dict.keys())[0]
new_read_name = chromosome + "_" + str(ref_pos)
if length + ref_pos <= genome_len:
new_read = seq_dict[chromosome][ref_pos: ref_pos + length]
else:
new_read = seq_dict[chromosome][ref_pos:]
new_read = new_read + seq_dict[chromosome][0: length - genome_len + ref_pos]
else:
# Generate a random number within the size of the genome. Suppose chromosomes are connected
# tail to head one by one in the order of the dictionary. If the start position fits in one
# chromosome, but the end position does not, then restart generating random number.
while True:
new_read = ""
ref_pos = random.randint(0, genome_len)
for key in seq_len.keys():
if ref_pos + length <= seq_len[key]:
new_read = seq_dict[key][ref_pos: ref_pos + length]
new_read_name = key + "_" + str(ref_pos)
break
elif ref_pos < seq_len[key]:
break
else:
ref_pos -= seq_len[key]
if new_read != "":
break
return new_read, new_read_name
def unaligned_error_list(length, error_p):
e_dict = {}
error_rate = {(0, 0.4): "match", (0.4, 0.7): "mis", (0.7, 0.85): "ins", (0.85, 1): "del"}
pos = 0
last_is_ins = False
while pos < length:
p = random.random()
for k_error in error_rate.keys():
if k_error[0] <= p < k_error[1]:
error_type = error_rate[k_error]
break
if error_type == "match":
step = 1
elif error_type == "mis":
step = mm.pois_geom(error_p["mis"][0], error_p["mis"][2], error_p["mis"][3])
e_dict[pos] = ["mis", step]
elif error_type == "ins":
step = mm.wei_geom(error_p["ins"][0], error_p["ins"][1], error_p["ins"][2], error_p["ins"][3])
if last_is_ins:
e_dict[pos + 0.1][1] += step
else:
e_dict[pos + 0.1] = ["ins", step]
last_is_ins = True
else:
step = mm.wei_geom(error_p["del"][0], error_p["del"][1], error_p["del"][2], error_p["del"][3])
e_dict[pos] = ["del", step]
if error_type != "ins":
pos += step
last_is_ins = False
if pos > length:
length = pos
return length, e_dict
def error_list(m_ref, m_model, m_ht_list, error_p, trans_p):
# l_old is the original length, and l_new is used to control the new length after introducing errors
l_new = m_ref
pos = 0
e_dict = {}
middle_ref = m_ref
prev_error = "start"
# The first match come from m_ht_list
p = random.random()
k1 = list(m_ht_list.keys())[0]
for k2, v2 in m_ht_list[k1].items():
if k2[0] < p <= k2[1]:
prev_match = int(np.floor((p - k2[0])/(k2[1] - k2[0]) * (v2[1] - v2[0]) + v2[0]))
if prev_match < 2:
prev_match = 2
pos += prev_match
# Select an error, then the step size, and then a match and so on so forth.
while pos < middle_ref:
# pick the error based on Markov chain
p = random.random()
for k in trans_p[prev_error].keys():
if k[0] <= p < k[1]:
error = trans_p[prev_error][k]
break
if error == "mis":
step = mm.pois_geom(error_p["mis"][0], error_p["mis"][2], error_p["mis"][3])
elif error == "ins":
step = mm.wei_geom(error_p[error][0], error_p[error][1], error_p[error][2], error_p[error][3])
l_new += step
else:
step = mm.wei_geom(error_p[error][0], error_p[error][1], error_p[error][2], error_p[error][3])
l_new -= step
if error != "ins":
e_dict[pos] = [error, step]
pos += step
if pos >= middle_ref:
l_new += pos - middle_ref
middle_ref = pos
else:
e_dict[pos - 0.5] = [error, step]
prev_error = error
# Randomly select a match length
for k1 in m_model.keys():
if k1[0] <= prev_match < k1[1]:
break
p = random.random()
for k2, v2 in m_model[k1].items():
if k2[0] < p <= k2[1]:
step = int(np.floor((p - k2[0])/(k2[1] - k2[0]) * (v2[1] - v2[0]) + v2[0]))
break
# there are no two 0 base matches together
if prev_match == 0 and step == 0:
step = 1
prev_match = step
if pos + prev_match > middle_ref:
l_new += pos + prev_match - middle_ref
middle_ref = pos + prev_match
pos += prev_match
if prev_match == 0:
prev_error += "0"
return l_new, middle_ref, e_dict
def mutate_read(read, read_name, error_log, e_dict, k, aligned=True):
search_pattern = "A" * k + "+|" + "T" * k + "+|" + "C" * k + "+|" + "G" * k
for key in sorted(e_dict.keys(), reverse=True):
val = e_dict[key]
key = int(round(key))
if val[0] == "mis":
ref_base = read[key: key + val[1]]
while True:
new_bases = ""
for i in xrange(val[1]):
tmp_bases = list(BASES)
tmp_bases.remove(read[key + i])
new_base = random.choice(tmp_bases)
new_bases += new_base
check_kmer = read[max(key - k + 1, 0): key] + new_bases + read[key + val[1]: key + val[1] + k - 1]
if not k or not re.search(search_pattern, check_kmer):
break
new_read = read[:key] + new_bases + read[key + val[1]:]
elif val[0] == "del":
new_bases = val[1] * "-"
ref_base = read[key: key + val[1]]
new_read = read[: key] + read[key + val[1]:]
elif val[0] == "ins":
ref_base = val[1] * "-"
while True:
new_bases = ""
for i in xrange(val[1]):
new_base = random.choice(BASES)
new_bases += new_base
check_kmer = read[max(key - k + 1, 0): key] + new_bases + read[key: key + k - 1]
if not k or not re.search(search_pattern, check_kmer):
break
new_read = read[:key] + new_bases + read[key:]
read = new_read
if aligned and val[0] != "match":
error_log.write(read_name + "\t" + str(key) + "\t" + val[0] + "\t" + str(val[1]) +
"\t" + ref_base + "\t" + new_bases + "\n")
# If choose to have kmer bias, then need to compress homopolymers to 5-mer
if k:
read = collapse_homo(read, k)
return read
def case_convert(seq):
base_code = {'Y': ['C', 'T'], 'R': ['A', 'G'], 'W': ['A', 'T'], 'S': ['G', 'C'], 'K': ['T', 'G'], 'M': ['C', 'A'],
'D': ['A', 'G', 'T'], 'V': ['A', 'C', 'G'], 'H': ['A', 'C', 'T'], 'B': ['C', 'G', 'T'],
'N': ['A', 'T', 'C', 'G'], 'X': ['A', 'T', 'C', 'G']}
up_string = seq.upper()
up_list = list(up_string)
for i in xrange(len(up_list)):
if up_list[i] in base_code:
up_list[i] = random.choice(base_code[up_list[i]])
out_seq = ''.join(up_list)
return out_seq
def main():
ref = ""
model_prefix = "training"
out = "simulated"
number = 20000
perfect = False
# ins, del, mis rate represent the weight tuning in mix model
ins_rate = 1
del_rate = 1
mis_rate = 1
max_readlength = float("inf")
min_readlength = 50
kmer_bias = 0
# Parse options and parameters
if len(sys.argv) < 4:
usage()
sys.exit(1)
else:
dna_type = sys.argv[1]
if dna_type not in ["circular", "linear"]:
usage()
sys.exit(1)
try:
opts, args = getopt.getopt(sys.argv[2:], "hr:c:o:n:i:d:m:",
["max_len=", "min_len=", "perfect", "KmerBias="])
except getopt.GetoptError:
usage()
sys.exit(1)
for opt, arg in opts:
if opt == "-r":
ref = arg
elif opt == "-c":
model_prefix = arg
elif opt == "-o":
out = arg
elif opt == "-n":
number = int(arg)
elif opt == "-i":
ins_rate = float(arg)
elif opt == "-d":
del_rate = float(arg)
elif opt == "-m":
mis_rate = float(arg)
elif opt == "--max_len":
max_readlength = int(arg)
elif opt == "--min_len":
min_readlength = int(arg)
elif opt == "--perfect":
perfect = True
elif opt == "--KmerBias":
kmer_bias = int(arg)
elif opt == "-h":
usage()
sys.exit(0)
else:
usage()
sys.exit(1)
# Generate log file
sys.stdout = open(out + ".log", 'w')
# Record the command typed to log file
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ': ' + ' '.join(sys.argv) + '\n')
sys.stdout.flush()
if ref == "":
print("must provide a reference genome!")
usage()
sys.exit(1)
if max_readlength < min_readlength:
print("maximum read length must be longer than minimum read length!")
sys.exit(1)
# Read in reference genome and generate simulated reads
read_profile(number, model_prefix, perfect, max_readlength, min_readlength)
simulation(ref, out, dna_type, perfect, kmer_bias, max_readlength, min_readlength)
sys.stdout.write(strftime("%Y-%m-%d %H:%M:%S") + ": Finished!")
sys.stdout.close()
if __name__ == "__main__":
main()
|
import pygame
from spellingquiz.apilogin import *
import random
import time
import cv2
import sys
import datetime
from pygame.locals import *
import os
import json
import pathlib
pygame.init()
FPS = 30
FramePerSec = pygame.time.Clock()
cap0 = cv2.VideoCapture(0)
#cap1 = cv2.VideoCapture(1)
#create a
completedTests = 0
newpath = str(pathlib.Path(__file__).parent.absolute()) + r'\data\testing'
while os.path.exists(newpath + str(completedTests) ):
completedTests += 1
newpath = newpath + str(completedTests)
os.makedirs(newpath)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
BLACK = (0, 0, 0)
color = pygame.Color('dodgerblue2')
fontSize = 32
font = pygame.font.Font(None, fontSize)
DisplayWidth = 600
DisplayHeight = 600
DISPLAYSURF = pygame.display.set_mode((DisplayWidth, DisplayHeight), pygame.RESIZABLE)
DISPLAYSURF.fill(WHITE)
pygame.display.set_caption('Spelling Game')
def reddit_scrapper(reddit):
subreddit = reddit.subreddit('python')
reddittitles = []
hot_python = subreddit.hot()
for submission in hot_python:
if not submission.stickied:
removeNonString = ''.join([i if ord(i) < 128 else ' ' for i in submission.title])
reddittitles.append(removeNonString)
return reddittitles
allRedditTexts = reddit_scrapper(reddit)
def fresh_game(completedTests, reddit, allRedditTexts):
#set all variable back to normal
newpath = str(pathlib.Path(__file__).parent.absolute()) + r'\data\typingtest'
i = 0
while os.path.exists(newpath + str(completedTests)):
completedTests += 1
newpath = newpath + str(completedTests)
os.makedirs(newpath)
currentUserLocation = 0
logger = ''
longtext = random.choice(allRedditTexts)
jsonData = {
'text': longtext,
'datetime': datetime.datetime.now(),
'typeData': []
}
start = datetime.datetime.now()
text = ''
typeDataWord = []
filename = newpath + '/spellingQuiz-' + datetime.datetime.now().strftime("%Y-%m-%d@%H#%M#%S") + '.txt'
freshgame = {
'newpath' : newpath,
'currentUserLocation': currentUserLocation,
'logger' : logger,
'jsonData' : jsonData,
'typeDataWord' : typeDataWord,
'start' : start,
'filename' : filename,
'longtext' : longtext,
'i' : i,
'text' : text
}
return freshgame
class gameClass():
#class to hold all of the game data and allow you to refresh the game
pass
def blit_text(surface, text, pos, font, currentuserlocation, color=pygame.Color('green')):
#https://stackoverflow.com/questions/42014195/rendering-text-with-multiple-lines-in-pygame
words = [word.split(' ') for word in text.splitlines()] # 2D array where each row is a list of words.
space = font.size(' ')[0] # The width of a space.
max_width, max_height = surface.get_size()
x, y = pos
upto = 0
currentword = []
for line in words:
for word in line:
upto += 1
if upto > currentuserlocation:
currentword.append(word)
color = pygame.Color('black')
word_surface = font.render(word, 0, color)
word_width, word_height = word_surface.get_size()
if x + word_width >= max_width:
x = pos[0] # Reset the x.
y += word_height # Start on new row.
surface.blit(word_surface, (x, y))
x += word_width + space
x = pos[0] # Reset the x.
y += word_height # Start on new row.
if currentword:
return currentword[0]
else:
return ''
currentUserLocation = 0
logger = ''
typeDataWord = []
filename = newpath + '/spellingQuiz-'+ datetime.datetime.now().strftime("%Y-%m-%d@%H#%M#%S") + '.txt'
startgame = True
rectangle = [[50, 50], [200, 200]]
while startgame:
ret, frame = cap0.read()
cv2.rectangle(frame, (rectangle[0][0], rectangle[0][1]), (rectangle[1][0], rectangle[1][1]), (255, 0, 0), 4)
cv2.imshow('setup', frame)
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key in (pygame.K_RETURN, pygame.K_SPACE):
cv2.destroyAllWindows()
startgame = False
if event.key == pygame.K_w:
#move box up
rectangle[0][1] -= 10
rectangle[1][1] -= 10
if event.key == pygame.K_s:
# move box down
rectangle[0][1] += 10
rectangle[1][1] += 10
if event.key == pygame.K_a:
# move box left
rectangle[0][0] -= 10
rectangle[1][0] -= 10
if event.key == pygame.K_d:
# move box right
rectangle[0][0] += 10
rectangle[1][0] += 10
if event.key == pygame.K_q:
# make box taller
rectangle[0][0] -= 10
if event.key == pygame.K_z:
# make box shorter
rectangle[0][0] += 10
if event.key == pygame.K_e:
# make box wider
rectangle[1][1] -= 10
if event.key == pygame.K_c:
# make box thinner
rectangle[1][1] += 10
start = datetime.datetime.now()
text = ''
refreshgame = fresh_game(completedTests, reddit, allRedditTexts)
jsonData = {
'text' : refreshgame['longtext'],
'datetime' : datetime.datetime.now(),
'keyboard_location' : rectangle,
'typeData' : []
}
def build():
'''this function is currently a stopgap,
eventually to be turned into building spellingquiz'''
pass
while True: # making a loop
ret, frame = cap0.read()
#ret, frame = cap1.read()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key in (pygame.K_RETURN, pygame.K_SPACE):
print(text , currentWord)
if text == currentWord:
jsonData['typeData'].append(typeDataWord)
refreshgame['currentUserLocation'] += 1
text = ''
logger = ''
typeDataWord = []
elif event.key == pygame.K_BACKSPACE:
if len(text)>0:
start = refreshgame['start']
logger += f'[{datetime.datetime.now()-start},DELETE]\t'
typeDataWord.append([datetime.datetime.now() - start, 'DELETE'])
text = text[0:-1]
else:
ret0, frame0 = cap0.read()
#ret1, frame1 = cap1.read()
if ret0 == True:
newpath = refreshgame['newpath']
img_name = f"{newpath}/{int(round(time.time() * 1000))}{event.unicode}"
img_name0 = img_name +'.png'
#img_name1 = img_name + 'b.png'
cv2.imwrite(img_name0, frame0)
#scv2.imwrite(img_name1, frame1)
logger += f'[{datetime.datetime.now()-start}, {event.unicode}]\t'
typeDataWord.append([datetime.datetime.now()-start, event.unicode])
text += event.unicode
DISPLAYSURF.fill((WHITE))
currentWord = blit_text(DISPLAYSURF, refreshgame['longtext'], (20,20),font,refreshgame['currentUserLocation'])
txt_current = font.render(text, True, color)
txt_spell_word = font.render(currentWord,True,color)
DISPLAYSURF.blit(txt_current,(DisplayWidth//2,5*DisplayHeight//6))
DISPLAYSURF.blit(txt_spell_word, (DisplayWidth // 2, 2*DisplayHeight // 3))
pygame.display.flip()
FramePerSec.tick(FPS)
if currentWord == '':
print(f'total time: {datetime.datetime.now()-start}')
with open(newpath+r'\dataSet' + '.json','w+', encoding='utf-8' ) as f:
jsonData['text'] = refreshgame['longtext']
jsonData['datetime'] = datetime.datetime.now()
json.dump(jsonData, f, ensure_ascii=False, indent=4 , default=str)
jsonData['typeData'] = []
refreshgame = fresh_game(completedTests, reddit, allRedditTexts)
pygame.quit()
sys.exit()
cap0.release()
out.release()
cv2.destroyAllWindows() |
from bs4 import BeautifulSoup
from collections import namedtuple
import hashlib
class Provider(object):
def __init__(self, name, link):
self.name = name
self.link = link
def serialize(self):
return {'name': self.name, 'link': self.link}
def __str__(self):
return '<Provider {}>'.format(self.name)
class Version(object):
def __init__(self, quality, providers):
self.quality = quality
self.providers = providers
def serialize(self):
return {'quality': self.quality, 'providers': map(lambda p: p.serialize(), self.providers)}
def __str__(self):
return '<Version {}>'.format(self.quality)
class Anime(object):
def __init__(self, title, release_date, episode):
self.title = title
self.release_date = release_date
self.episode = episode
self.id = hashlib.new("md5",
"{}{}{}"\
.format(release_date, title, episode)).hexdigest()
def serialize(self):
if not hasattr(self, 'versions'):
raise RuntimeError('Versions are not specified! Cannot serialize!')
return {
'title': self.title,
'release_date': self.release_date,
'episode': self.episode,
'id': self.id,
'versions': map(lambda v: v.serialize(), self.versions)
}
def __str__(self):
return '<Anime {} - {}>'.format(self.title, self.episode)
def parse_anime_info(entry):
release_date = entry[0]
title = ' '.join(entry[1:-2])
episode_id = int(entry[-1])
unique_id = hashlib.new("md5",
"{}{}{}"\
.format(release_date, title, episode_id)).hexdigest()
return Anime(title,
release_date,
episode_id)
def parse_providers(providers):
for provider_block in providers:
provider = provider_block.a['title']
link = provider_block.a['href']
yield Provider(name=provider, link=link)
def parse_versions(versions):
for version in versions:
quality = version.text.split(' ')[-1]
providers_nodes = version.parent.find_all(attrs={'class': 'dl-type'})
yield Version(quality=quality,
providers=set(parse_providers(providers_nodes)))
def parse_animes(pageContent):
try:
soup = BeautifulSoup(pageContent, 'html.parser')
episodes = soup.find_all(attrs={'class': 'release-info'})
for episode in episodes:
try:
entry = episode.td.text.split(' ')
anime = parse_anime_info(entry)
matching_versions_nodes = filter(lambda n: n.text.startswith(anime.title), soup.find_all(attrs={'class': 'dl-label'}))
anime.versions = set(parse_versions(matching_versions_nodes))
yield anime
except Exception as e:
print ('Failed while parsing an anime, skipping it ({})'.format(e))
except Exception as e:
print ('Fatal failure while parsing animes, aborting the parsing. ({})'.format(e))
if __name__ == '__main__':
import requests
animes = list(parse_animes(requests.get('http://horriblesubs.info/lib/latest.php').text))
print (animes)
import pdb;
pdb.set_trace()
|
import json
import socket
import time
import os
import subprocess
dir_path = os.path.dirname(os.path.realpath(__file__))
def send_json(host, filename):
print(host, filename)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
with open(filename, 'r') as json_file:
data = json.load(json_file)
sock.connect((host, 11211))
sock.sendall(bytes(json.dumps(data), 'utf-8'))
while True:
response = str(sock.recv(4096), 'utf-8')
print('Received: {}'.format(response))
if response:
print('Received: {}'.format(response))
else:
break
sock.close()
hosts = {
'program': '34.242.140.18',
'database1': '34.242.206.68',
'database2': '52.211.63.195'
}
print('Recording test:')
send_json(hosts['program'], os.path.join(dir_path, 'samples', 'showcase', 'sample_start_record-program.json'))
send_json(hosts['database1'], os.path.join(dir_path, 'samples', 'showcase', 'sample_start_record-database.json'))
send_json(hosts['database2'], os.path.join(dir_path, 'samples', 'showcase', 'sample_start_record-database.json'))
input('Continue... ')
for host in hosts:
send_json(hosts[host], os.path.join(dir_path, 'samples', 'sample_finish_record.json'))
# time.sleep(3)
#
# print('\n\nInstructions test:')
# send_json(os.path.join(dir_path, 'samples', 'udp', 'sample_instructions0.json'))
# time.sleep(3)
# send_json(os.path.join(dir_path, 'samples', 'udp', 'sample_instructions1.json'))
# time.sleep(3)
# send_json(os.path.join(dir_path, 'samples', 'udp', 'sample_instructions2.json'))
# time.sleep(3)
#
# print('\n\nExperiment test:')
# send_json(os.path.join(dir_path, 'samples', 'udp', 'sample_start_experiment.json'))
# time.sleep(3)
# time.sleep(3)
# send_json(os.path.join(dir_path, 'samples', 'sample_finish_experiment.json'))
# time.sleep(3)
#
# print('\n\nReset test:')
# send_json(os.path.join(dir_path, 'samples', 'sample_reset.json'))
#
# print('\n\nRecording test:')
# send_json(os.path.join(dir_path, 'samples', 'tcp', 'sample_start_record.json'))
# time.sleep(10)
# data = subprocess.Popen(["python", os.path.join(dir_path, 'tcp', 'client.py')])
# time.sleep(5)
# send_json(os.path.join(dir_path, 'samples', 'sample_finish_record.json'))
# time.sleep(3)
|
# coding: utf-8
import unittest
import os
import sys
import requests_mock
from unittest import mock
sys.path.append("../yyetsbot")
from fansub import BaseFansub, YYeTsOnline
class TestBaseFunsub(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.ins = BaseFansub()
cls.cookie_jar = dict(name="hello")
cls.ins.cookie_file = "test_cookies.dump" # generate on tests/test_cookies.dump
@classmethod
def tearDownClass(cls) -> None:
cls().ins.redis.flushall()
os.unlink(cls().ins.cookie_file)
def test_save_cookies(self):
self.ins.__save_cookies__(self.cookie_jar)
exists = os.path.exists(self.ins.cookie_file)
self.assertTrue(exists)
def test_load_cookies(self):
self.test_save_cookies()
cookie = self.ins.__load_cookies__()
self.assertEqual(cookie, self.cookie_jar)
def test_get_from_cache(self):
value = self.ins.__get_from_cache__("http://test.url", "__hash__")
self.assertEqual(value, self.ins.__hash__())
def test_save_to_cache(self):
# never expire
url = "http://test2.url"
self.ins.__save_to_cache__(url, self.cookie_jar)
cache_copy = self.ins.__get_from_cache__(url, "never mind method")
self.assertEqual(cache_copy, self.cookie_jar)
class YYeTsTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.ins = YYeTsOnline()
cls.cookie_jar = dict(name="hello yyets")
cls.ins.cookie_file = "test_cookies.dump" # generate on tests/test_cookies.dump
cls.ins.url = "http://www.rrys2020.com/resource/1988"
@classmethod
def tearDownClass(cls) -> None:
cls().ins.redis.flushall()
# os.unlink(cls().ins.cookie_file)
def test_get_id(self):
self.assertEqual(self.ins.id, "1988")
@requests_mock.mock()
def test_get_search_html(self, m):
with open("yyets_search.html") as f:
html = f.read()
m.get('http://www.rrys2020.com/search?keyword=abc&type=resource', text=html)
response = self.ins.__get_search_html__("abc")
self.assertEqual(html, response)
if __name__ == '__main__':
unittest.main()
|
import logging
import os
import sys
from collections import OrderedDict
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pyfmi
from mshoot import SimModel
class SimFMU(SimModel):
def __init__(self, fmupath, outputs=[], states=[], parameters={},
verbose=False):
"""
:param fmupath: str, path to FMU
:param outputs: list(str), monitored outputs names
:param states: list(str), monitored states names
:param parameters: dict, parameters names and values
:param verbose: bool, whether to suppress pyfmi prints
"""
print(fmupath)
self.fmu = pyfmi.load_fmu(fmupath)
self.outputs = outputs
self.states = states
self.verbose = verbose
# Get initial state
# Comment:
# The model has to be initialized to read the state variables.
# The easiest way to initialize is to run a short simulation.
dummy_result = self.fmu.simulate(start_time=0, final_time=1)
self.x0 = self._get_state()
# Reset the FMU
self.fmu.reset()
# Set parameters
for n in parameters:
self.fmu.set(n, parameters[n])
def _get_state(self):
"""
Return an ordered dictionary with state names as keys
and state values as values.
"""
# Return dictionary, keys - state names, values - state values
x = OrderedDict()
# Get dictionary, keys - state names,
# values - ScalarViariable instances (svi)
x_svi = self.fmu.get_states_list()
for s in x_svi:
vr = x_svi[s]._get_value_reference()
x[s] = self.fmu.get_real(vr)[0] # [0] because 1-element array
return x
def simulate(self, udf, x0, save_state=False):
"""
Simulate the model using the provided inputs `udf`
and initial state `x0`.
The DataFrame should have the following content:
- index - time in seconds and equal steps, named 'time',
- columns - input data,
- column names - input variable names.
The order of `x0` should reflect the one used in `states`.
Return two DataFrames, `ydf` and `xdf`, with
outputs and states, respectively, and with the same
structure as `udf`.
:param udf: DataFrame, shape (n_steps, n_variables)
:param x0: vector, size (n_states, )
:return: ydf, xdf
"""
assert udf.index.name == 'time'
start = udf.index[0] # Start time
stop = udf.index[-1] # Final time
ncp = udf.index.size - 1 # Number of communication points
# Prepare inputs for pyfmi:
# From pyfmi documentation:
# "The input should be a 2-tuple consisting of first the names
# of the input variable(s) and then the data matrix"
# df = udf.reset_index()
inp = (udf.columns, udf.reset_index().values)
# FMI options
opts = self.fmu.simulate_options()
opts['ncp'] = ncp
opts['result_handling'] = 'memory' # Prevents saving result file
opts['result_handler'] = 'ResultHandlerMemory' # Prevents saving result file
# if 'solver' in opts:
# # Model Exchange
# opts['solver'] = 'CVode'
# opts['CVode_options'] = {'rtol': 1e-6, 'atol': 1e-6}
# Initial states from previous FMU simulation
for n in self.x0:
self.fmu.set(n, self.x0[n])
# Initial states overriden by the user
i = 0
for n in self.states:
self.fmu.set(n, x0[i])
i += 1
# Simulate
if not self.verbose:
nullf = open(os.devnull, 'w')
sys.stdout = nullf
res = self.fmu.simulate(start_time=start, final_time=stop,
input=inp, options=opts)
if not self.verbose:
sys.stdout = sys.__stdout__
nullf.close()
# Update state (use only in emulation)
if save_state:
self.x0 = self._get_state()
# Outputs
t = res['time']
ydf = pd.DataFrame(index=pd.Index(t, name='time'))
xdf = pd.DataFrame(index=pd.Index(t, name='time'))
for n in self.outputs:
ydf[n] = res[n]
for n in self.states:
xdf[n] = res[n]
# Align time with udf
# BUG: round-off errors for large numbers! is this code even needed?
# ydf = ydf.loc[[i for i in t if i in udf.index]]
# xdf = xdf.loc[[i for i in t if i in udf.index]]
# Reset (note: won't work with E+)
self.fmu.reset()
return ydf, xdf
if __name__ == "__main__":
# DEMO: SIMULATE
# ==============
# Load FMU
fmupath = os.path.join('resources', 'fmus', 'R2C2', 'R2C2.fmu')
parameters = {'C': 1e6}
model = SimFMU(
fmupath,
outputs=['qout', 'Tr'],
states=['heatCapacitor1.T'],
parameters=parameters,
verbose=True)
# Inputs
t = np.arange(0, 86401, 3600)
udf = pd.DataFrame(index=pd.Index(t, name='time'), columns=['q', 'Tout'])
udf['q'] = np.full(t.size, 100)
udf['Tout'] = np.full(t.size, 273.15)
# Initial state
x0 = [273.15 + 20]
ydf, xdf = model.simulate(udf, x0)
ydf.plot(subplots=True, title='ydf')
xdf.plot(subplots=True, title='xdf')
plt.show()
|
import os
def pwm_dir():
"""
Returns the directory of the expression dir
"""
return os.path.dirname(__file__)
|
# Program to perform breadth first traversal in a graph
from collections import defaultdict, deque
class Graph:
def __init__(self, directed=False):
self.graph = defaultdict(list)
self.directed = directed
def addEdge(self, frm, to):
self.graph[frm].append(to)
if self.directed is False:
self.graph[to].append(frm)
else:
self.graph[to] = self.graph[to]
def bfsUtil(self, s, visited):
queue = deque([])
queue.append(s)
visited[s] = True
while queue:
vertex = queue.popleft()
print(vertex, end=' ')
# traverse vertices adjacent to vertex
for i in self.graph[vertex]:
if not visited[i]:
visited[i] = True
queue.append(i)
print()
def bfs(self, s=None):
visited = {i: False for i in self.graph}
# do bfs from the node specified
if s is not None:
self.bfsUtil(s, visited)
# traverse for all the vertices in other components of graph
for v in self.graph:
if not visited[v]:
self.bfsUtil(v, visited)
if __name__ == '__main__':
graph = Graph()
# component 1 of the graph
graph.addEdge(0, 1)
graph.addEdge(0, 2)
graph.addEdge(1, 2)
graph.addEdge(2, 3)
graph.addEdge(3, 3)
graph.addEdge(1, 4)
graph.addEdge(1, 5)
graph.addEdge(3, 6)
# component 2 of the graph
graph.addEdge(7, 8)
graph.addEdge(8, 9)
graph.addEdge(7, 10)
# call bfs from 2 vertex
print("Breadth First Traversal:")
graph.bfs(2)
|
"""
Kaan Altan - Resume.py
Author: Kaan Altan
Date: 2019-11-26
Description
===========
This script is a Python version of my resume featuring a command line interface
to interact with the information
History
=======
2019-11-25 Initial prototype of the script
Information is entered in the form of dictionaries
Command line interface allows to access specific information:
-i : Prints all content in the info dictionary
-n : Prints "Name" value in the info dictionary
-p : Prints "Phone" value in the info dictionary
-e : Prints "Email" value in the info dictionary
-l : Prints "Linkedin" value in the info dictionary
-f : Prints all content in the features dictionary
-lsf : Lists keys in the features dictionary (Feature titles)
-fid : Prints specified feature when a feature title is passed in
-s : Prints all content in the skills dictionary
-lss : Lists keys in the skills dictionary (Skill titles)
-sid : Prints specified skills when a skill title is passed in
"""
import argparse
"""Dictionary containing identifying information"""
kaan_info = { "Name" : "Kaan Altan",
"Phone" : "+1 (415) 707-9854",
"Email" : "me@kaanaltan.com",
"Linkedin" : "linkedin.com/in/kaanaltan/"}
"""Dictionary containing feature information"""
kaan_features = { "OBJECTIVE":
"""Data Analyst with a mechanical engineering background;
currently seeking a position in an innovative, fast-paced, growing organization
to leverage my creative problem-solving skills to achieve key initiatives""",
"PROFESSIONAL EXPERIENCE":
"""Luminex Corporation Chicago, IL
Systems Engineer June 2019 – Present
• Performed structured troubleshooting on multiple aspects of the Verigene II platform – including hardware, software, consumables, and reagents – to ultimately improve assay reliability
• Contributed to a root-cause analysis that ultimately mitigated a prominent failure mode by 90%
• Delivered production-ready Python scripts to augment the R&D organization’s ETL pipeline
• Designed and published Tableau dashboards for various stakeholders in Systems R&D, Assay Development, and Manufacturing
• Performed ad hoc data aggregation, cleaning, and analysis from diverse sources (flat files, relational databases, equipment logs, spreadsheets, etc.)
• Developed a Python app to streamline multiple scientist and technician workflows
• Served as point-of-contact for equipment and software issues
• Facilitated communication between cross-functional teams with clear, concise data visualizations
• Led Python educational sessions to introduce coworkers to the fundamentals of coding""",
"RESEARCH":
"""Energy Transport Research Laboratory | British Petroleum PLC Champaign, IL
Researcher, Prof. Nenad Miljkovic May 2018 - February 2019
• Performed applied research on nano-engineered superhydrophobic surfaces that substantially enhance heat exchange efficiency of two-phase systems by utilizing coalescence-induced droplet jumping phenomena
• Designed, conducted and analyzed results of abrasion experiments with the principal of contact angle hysteresis to successfully quantify the durability of fabricated surfaces
• Presented research results at the 2018 Materials Research Society Fall Meeting & Exhibit held between Nov 25-30, 2018 in Boston, Massachusetts""",
"INTERNSHIP EXPERIENCE":
"""Ford Motor Company Istanbul, Turkey
Vehicle Safety Intern July 2017 - August 2017
• Utilized Altair HyperWorks CAE Software Suite to run CAE models in conformity with the Euro NCAP standards on RADIOSS finite element solver for the safety analysis of the Ford Truck, Transit, Transit Custom PHEV (Plug-In Hybrid Electrical Vehicle) and Courier projects
• Prepared 15 includes (CAE models that are subsets of an assembly) for assembly (editing meshing, defining material properties, environmental boundaries and assembly parameters) for the 2018 Ford Truck project
• Assisted occupant safety engineers in the Ford Courier project and side impact engineers in the Ford Transit project by preparing individual parts to go into the includes
• Contributed in the Ford Transit Custom PHEV project by meshing and assembling numerous parts for individual simulations""",
"EDUCATION":
"""University of Illinois at Urbana-Champaign December 2018
Bachelor of Science in Mechanical Engineering
Technical GPA (Engineering and fundamental courses): 3.0/4.0""",
"AWARDS & ACCOMPLISHMENTS":
"""Layered Fuse, Senior Design Project August 2018 - December 2018
Littelfuse, Inc., Team Lead
• Led a team of 4 mechanical engineering seniors to successfully complete an outsourced engineering project assigned by Littelfuse Inc. to design and manufacture attachment tabs for the assembly of high voltage multi-layered fuses
• Structured team operations and served as a liaison between the University and Littelfuse
• Contributed in the design and CAD phases of the project and took ownership of failure analysis by dynamically modeling failure modes utilizing Abaqus
Mechanical Design Project Competition 1st Place May 2017
• As a team of 4 mechanical engineering students, designed and fabricated a DC powered robot that utilizes Chebyshev’s Lambda Mechanism to climb a cable for a competition held by the department of Mechanical Science and Engineering, UIUC
• Contributed in the design and CAD phases of the project, simulated mechanism motion in MATLAB
• Received first place award for building the fastest mechanism in the competition with 40 participant teams""",
"PROFESSIONAL ORGANIZATIONS & ACTIVITIES":
"""American Society of Mechanical Engineers, Member April 2017 - Present
Society of Automotive Engineers International, Member May 2017 - Present
AKL – Alpha Kappa Lambda Fraternity September 2013 - May 2015""",
}
"""Dictionary containing skills information"""
kaan_skills = { "Languages" : ["Python",
"SQL",
"MATLAB",
"R",
"HTML/CSS"],
"Tools, Frameworks & Libraries" : ["Git & Github",
"Pandas",
"Flask",
"Regular Expressions",
"Requests",
"Beautiful Soup",
"Tkinter",
"Matplotlib",
"Seaborn",],
"Software" : ["Windows",
"Linux",
"VS Code",
"Tableau",],
"Engineering" : ["CAE",
"CAD",
"FEA",
"FMEA",
"G-Code",
"GD&T",
"Photolithography",],
"Engineering Software" : ["Abaqus",
"PTC Creo",
"Altair HyperWorks",
"Simulink",
"FAMAS",],
}
def parse_arguments():
"""Sets up and configures argparse object
Parameters
==========
None
Returns
=======
arguments
Dictionary containing parsed arguments
"""
parser = argparse.ArgumentParser()
parser._actions[0].help = """============== Kaan Altan - Resume.py ==============
====================================================
This script is a version of my resume written in Python. It
provides a command line interface for interaction. You can
use the following flags to display various items of my resume
or simply run it without flags to display full resume.
===================================================="""
parser.add_argument("-i", "--info",
help = "Displays applicant information",
action = "store_true",)
parser.add_argument("-n", "--name",
help = "Display applicant name",
action = "store_true",)
parser.add_argument("-p", "--phone",
help = "Display applicant phone number",
action = "store_true",)
parser.add_argument("-e", "--email",
help = "Display applicant email address",
action = "store_true",)
parser.add_argument("-l", "--linkedin",
help = "Display applicant linkedin address",
action = "store_true",)
parser.add_argument("-f", "--features",
help = "Display all applicant features",
action = "store_true",)
parser.add_argument("-fid", "--featureid",
help="Pass in title of the feature you wish to view (copy & paste from feature titles)",
nargs='+')
parser.add_argument("-lsf", "--listFeatureTitles",
help = "Display applicant feature titles",
action = "store_true",)
parser.add_argument("-s", "--skills",
help = "Display all applicant skills",
action = "store_true",)
parser.add_argument("-sid", "--skillsid",
help = "Pass in title of the skills you wish to view (copy & paste from skills titles)",
nargs='+')
parser.add_argument("-lss", "--listSkillTitles",
help = "Display applicant skills titles",
action = "store_true",)
args = parser.parse_args()
info = args.info
name = args.name
phone = args.phone
email = args.email
linkedin = args.linkedin
features = args.features
feature_titles = args.listFeatureTitles
skills = args.skills
skill_titles = args.listSkillTitles
if args.featureid:
feature_id = ' '.join(args.featureid)
else:
feature_id = None
if args.skillsid:
skills_id = ' '.join(args.skillsid)
else:
skills_id = None
arguments = { "Info":info,
"Name":name,
"Phone":phone,
"Email":email,
"Linkedin":linkedin,
"Features":features,
"Feature_Titles":feature_titles,
"Skills":skills,
"Skill_Titles":skill_titles,
"Feature_ID":feature_id,
"Skills_ID":skills_id,
}
return arguments
def print_resume(arguments):
"""Function that handles printing logic according to the flags passed into the command line
Parameters
==========
arguments : dictionary
Dictionary containing parsed arguments
Returns
=======
None
"""
if any(arguments.values()):
if arguments["Feature_Titles"]:
for key in kaan_features.keys():
print(key)
if arguments["Skill_Titles"]:
for key in kaan_skills.keys():
print(key)
if arguments["Info"]:
for key in kaan_info.keys():
print(key + ' : ' + kaan_info[key])
else:
for key in list(arguments.keys())[1:5]:
if arguments[key]:
print(key + ' : ' + kaan_info[key])
if arguments["Feature_ID"]:
if arguments["Feature_ID"] in kaan_features.keys():
print(arguments["Feature_ID"] + ':\n')
print(kaan_features[arguments["Feature_ID"]])
print('\n')
else:
print('Invalid title entered')
elif arguments["Features"]:
for key in kaan_features.keys():
print(key + '\n')
print(kaan_features[key])
print('\n')
if arguments["Skills_ID"]:
if arguments["Skills_ID"] in kaan_skills.keys():
print(arguments["Skills_ID"] + ':\n')
print(kaan_skills[arguments["Skills_ID"]])
print('\n')
else:
print('Invalid title entered')
elif arguments["Skills"]:
for key in kaan_skills.keys():
print(key + '\n')
print(kaan_skills[key])
print('\n')
else:
for key in kaan_info.keys():
print(key + ' : ' + kaan_info[key])
print('\n')
for key in kaan_features.keys():
print(key + '\n')
print(kaan_features[key])
print('\n')
for key in kaan_skills.keys():
print(key + '\n')
print(kaan_skills[key])
print('\n')
def main():
arguments = parse_arguments()
print_resume(arguments)
if __name__ == '__main__':
main() |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^mobile_captcha/$', views.get_mobile_captcha, name="mobile_captcha")
] |
"""ASCII table generator"""
class ASCIITableRenderer(object):
def render(self, table):
total_width = 0
for col in table.cols:
total_width += col.width
# Header
out = self._format_separator(total_width)
cols_widths = [col.width for col in table.cols]
out += self._format_row([col.label for col in table.cols], cols_widths)
out += self._format_separator(total_width)
# Body
for row in table.rows:
if isinstance(row, ASCIITableRowComment):
out += self._format_row_comment(row.text, total_width)
else:
out += self._format_row(
[row.dict[key] for key in table.col_keys],
cols_widths
)
# Footer
out += self._format_separator(total_width)
return out
def _format_separator(self, width):
return '-' * width + '\n'
def _format_row_comment(self, text, width):
actual_width = width-4
out = ''
out += '| ' + ('-' * actual_width) + ' |\n'
form_str = '| {:' + '{:d}'.format(actual_width) + '} |\n'
out += form_str.format(text)
out += '| ' + ('-' * actual_width) + ' |\n'
return out
def _format_row(self, values, widths):
out = ''
for i, width in enumerate(widths):
actual_width = width - 3
# last column loses one more space for the trailing `|`
if i == len(widths) - 1:
actual_width -= 1
val = self._trunc(values[i], actual_width)
form_str = '| {:' + '{:d}'.format(actual_width) + 's} '
out += form_str.format(val)
out += '|\n'
return out
def _trunc(self, contents, width):
if contents is None:
return ''
if len(contents) <= width:
return contents
return contents[:width]
class ASCIITableColumn(object):
def __init__(self, label, width):
self.label = label
self.width = width
class ASCIITableRow(object):
def __init__(self, dict):
self.dict = dict
class ASCIITableRowComment(ASCIITableRow):
def __init__(self, text):
ASCIITableRow.__init__(self, {})
self.text = text
class ASCIITable(object):
def __init__(self):
# list of strings
self.col_keys = []
# list of ASCIITableColumn
self.cols = []
# list of ASCIITableRow
self.rows = []
def add_column(self, key, col):
if len(self.rows) != 0:
raise StandardError(
'cannot add columns after rows have been added'
)
if not isinstance(col, ASCIITableColumn):
raise TypeError()
self.col_keys.append(key)
self.cols.append(col)
def add_comment_row(self, text):
row = ASCIITableRowComment(text)
self.rows.append(row)
def add_row(self, dict):
dict_keys = dict.keys()
expected_keys = self.col_keys
if set(dict_keys) != set(expected_keys):
self._find_missing_key(expected_keys, dict_keys)
self._find_unknown_key(expected_keys, dict_keys)
row = ASCIITableRow(dict)
self.rows.append(row)
def _find_missing_key(self, expected_keys, row_keys):
for expected_key in expected_keys:
if expected_key in row_keys:
continue
raise ValueError('key `{}` is missing'.format(expected_key))
def _find_unknown_key(self, expected_keys, row_keys):
for row_key in row_keys:
if row_key in expected_keys:
continue
raise ValueError('key `{}` is not defined'.format(row_key))
|
from django import forms
from accounts.models import UserCred, AnnotatorProfile
from django_countries.data import COUNTRIES
from captcha.fields import CaptchaField
class PasswordResetRequestForm(forms.Form):
email_or_username = forms.CharField(label=("Enter registered email"), max_length=254)
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
error_messages = {
'password_mismatch': ("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=("New password"),
widget=forms.PasswordInput)
new_password2 = forms.CharField(label=("New password confirmation"),
widget=forms.PasswordInput)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
class AccountRetrieveForm(forms.Form):
"""
Form for sending known information about account upon lost credentials.
"""
contact_email = forms.EmailField(widget=forms.widgets.TextInput(), label="Contact email", required=True)
nickname = forms.CharField( widget=forms.widgets.TextInput(), label="Annotator pseudonym", required=False)
first_name = forms.CharField( widget=forms.widgets.TextInput(), label="First name", required=False)
last_name = forms.CharField( widget=forms.widgets.TextInput(), label="Last name", required=True)
username = forms.EmailField(widget=forms.widgets.TextInput(), label="Registered email", required=False)
job_title = forms.CharField( widget=forms.widgets.TextInput(), label="Job title", required=False)
organization = forms.CharField( widget=forms.widgets.TextInput(), label="Organization", required=False)
captcha = CaptchaField()
def clean(self):
"""
Verifies that the values entered into the password fields match
NOTE: Errors here will appear in ``non_field_errors()`` because it applies to more than one field.
"""
cleaned_data = super(AccountRetrieveForm, self).clean()
return self.cleaned_data |
def add_letters(*letters):
return chr( (sum(ord(c)-96 for c in letters)-1)%26 + 97)
|
import os
from pymongo import MongoClient
from random import randint
from pprint import pprint
from models import Users, Roles
import testdata
#Step 2: Connect to MongoDB - Note: Change connection string as needed
client = MongoClient(
os.environ['DB_PORT_27017_TCP_ADDR'],
27017)
# Issue the serverStatus command and print the results
db = client.tododb
def add_users():
for user in Users().generate(10): # let say we only want 10 users
db.users.insert_one(user)
print(user)
print('finished creating users')
def add_roles():
for role in Roles().generate(4): # let say we only want 4 roles
db.roles.insert_one(role)
print(role)
print('finished creating roles')
if __name__ == '__main__':
add_users()
add_roles()
|
# -*- coding: utf-8 -*-
import unittest
import time
from pprint import pprint
from flask.json import loads as json_load
from flask.json import dumps as json_dump
try:
from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX
except:
from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX
__license__ = "GPLv3"
__author__ = "Sören Gebbert"
__copyright__ = "Copyright 2016, Sören Gebbert"
__maintainer__ = "Soeren Gebbert"
__email__ = "soerengebbert@googlemail.com"
JSON = {
"type": "FeatureCollection",
"crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::3358"}},
"features": [
{"type": "Feature", "properties": {"fid": "swwake_10m.0"}, "geometry": {"type": "Polygon", "coordinates": [
[[630000.0, 215000.0], [630000.0, 228500.0], [645000.0, 228500.0], [645000.0, 215000.0],
[630000.0, 215000.0]]]}}
]
}
class RasterAreaStatsTestCase(ActiniaResourceTestCaseBase):
def test_async_raster_area_stats_json(self):
rv = self.server.post(URL_PREFIX + '/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/landuse96_28m/'
'area_stats_async',
headers=self.admin_auth_header,
data=json_dump(JSON),
content_type="application/json")
rv = self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header)
self.assertEqual(len(rv["process_results"]), 16)
time.sleep(1)
def test_sync_raster_area_stats_1(self):
rv = self.server.post(URL_PREFIX + '/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/landuse96_28m/'
'area_stats_sync',
headers=self.admin_auth_header,
data=json_dump(JSON),
content_type="application/json")
pprint(json_load(rv.data))
self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i" % rv.status_code)
self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype)
value_list = json_load(rv.data)["process_results"]
self.assertEqual(len(value_list), 16)
def test_sync_raster_area_stats_2(self):
rv = self.server.post(URL_PREFIX + '/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/towns/'
'area_stats_sync',
headers=self.admin_auth_header,
data=json_dump(JSON),
content_type="application/json")
pprint(json_load(rv.data))
self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i" % rv.status_code)
self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype)
value_list = json_load(rv.data)["process_results"]
self.assertEqual(len(value_list), 6)
#################### ERRORS ###############################################
def test_sync_raster_area_stats_error_wrong_content_type(self):
rv = self.server.post(URL_PREFIX + '/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/towns/'
'area_stats_sync',
headers=self.admin_auth_header,
data="{}",
content_type="application/json")
pprint(json_load(rv.data))
self.assertEqual(rv.status_code, 400, "HTML status code is wrong %i" % rv.status_code)
self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype)
def test_sync_raster_area_stats_error_wrong_request_missing_json(self):
rv = self.server.post(URL_PREFIX + '/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/towns/'
'area_stats_sync',
headers=self.admin_auth_header,
data=None,
content_type="application/json")
pprint(json_load(rv.data))
self.assertEqual(rv.status_code, 400, "HTML status code is wrong %i" % rv.status_code)
self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
from odoo import api, fields, models, tools, _
class Company(models.Model):
_inherit = "res.company"
# material_agentid = fields.Char("Material Agent Id", default="0000000",)
# material_secret = fields.Char(
# "Material Secret", default="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
# )
# 素材库
material_app_id = fields.Many2one(
"wecom.apps",
string="Material Application",
# required=True,
# default=lambda self: self.env.company,
# domain="[('company_id', '=', current_company_id)]",
domain="[('company_id', '=', current_company_id)]",
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.