text
stringlengths 0
1.05M
| meta
dict |
---|---|
from __future__ import absolute_import, division, print_function
from _pytest.main import EXIT_NOTESTSCOLLECTED
import pytest
def test_version(testdir, pytestconfig):
result = testdir.runpytest("--version")
assert result.ret == 0
#p = py.path.local(py.__file__).dirpath()
result.stderr.fnmatch_lines([
'*pytest*%s*imported from*' % (pytest.__version__, )
])
if pytestconfig.pluginmanager.list_plugin_distinfo():
result.stderr.fnmatch_lines([
"*setuptools registered plugins:",
"*at*",
])
def test_help(testdir):
result = testdir.runpytest("--help")
assert result.ret == 0
result.stdout.fnmatch_lines("""
*-v*verbose*
*setup.cfg*
*minversion*
*to see*markers*pytest --markers*
*to see*fixtures*pytest --fixtures*
""")
def test_hookvalidation_unknown(testdir):
testdir.makeconftest("""
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
'*unknown hook*pytest_hello*'
])
def test_hookvalidation_optional(testdir):
testdir.makeconftest("""
import pytest
@pytest.hookimpl(optionalhook=True)
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_traceconfig(testdir):
result = testdir.runpytest("--traceconfig")
result.stdout.fnmatch_lines([
"*using*pytest*py*",
"*active plugins*",
])
def test_debug(testdir, monkeypatch):
result = testdir.runpytest_subprocess("--debug")
assert result.ret == EXIT_NOTESTSCOLLECTED
p = testdir.tmpdir.join("pytestdebug.log")
assert "pytest_sessionstart" in p.read()
def test_PYTEST_DEBUG(testdir, monkeypatch):
monkeypatch.setenv("PYTEST_DEBUG", "1")
result = testdir.runpytest_subprocess()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stderr.fnmatch_lines([
"*pytest_plugin_registered*",
"*manager*PluginManager*"
])
| {
"repo_name": "flub/pytest",
"path": "testing/test_helpconfig.py",
"copies": "1",
"size": "2094",
"license": "mit",
"hash": 1256378373410094600,
"line_mean": 28.9142857143,
"line_max": 64,
"alpha_frac": 0.6337153773,
"autogenerated": false,
"ratio": 3.950943396226415,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0030808207854133704,
"num_lines": 70
} |
from __future__ import absolute_import, division, print_function
from _pytest.main import EXIT_NOTESTSCOLLECTED
import pytest
def test_version(testdir, pytestconfig):
result = testdir.runpytest("--version")
assert result.ret == 0
# p = py.path.local(py.__file__).dirpath()
result.stderr.fnmatch_lines([
'*pytest*%s*imported from*' % (pytest.__version__, )
])
if pytestconfig.pluginmanager.list_plugin_distinfo():
result.stderr.fnmatch_lines([
"*setuptools registered plugins:",
"*at*",
])
def test_help(testdir):
result = testdir.runpytest("--help")
assert result.ret == 0
result.stdout.fnmatch_lines("""
*-v*verbose*
*setup.cfg*
*minversion*
*to see*markers*pytest --markers*
*to see*fixtures*pytest --fixtures*
""")
def test_hookvalidation_unknown(testdir):
testdir.makeconftest("""
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
'*unknown hook*pytest_hello*'
])
def test_hookvalidation_optional(testdir):
testdir.makeconftest("""
import pytest
@pytest.hookimpl(optionalhook=True)
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_traceconfig(testdir):
result = testdir.runpytest("--traceconfig")
result.stdout.fnmatch_lines([
"*using*pytest*py*",
"*active plugins*",
])
def test_debug(testdir, monkeypatch):
result = testdir.runpytest_subprocess("--debug")
assert result.ret == EXIT_NOTESTSCOLLECTED
p = testdir.tmpdir.join("pytestdebug.log")
assert "pytest_sessionstart" in p.read()
def test_PYTEST_DEBUG(testdir, monkeypatch):
monkeypatch.setenv("PYTEST_DEBUG", "1")
result = testdir.runpytest_subprocess()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stderr.fnmatch_lines([
"*pytest_plugin_registered*",
"*manager*PluginManager*"
])
| {
"repo_name": "fiji-flo/servo",
"path": "tests/wpt/web-platform-tests/tools/third_party/pytest/testing/test_helpconfig.py",
"copies": "15",
"size": "2102",
"license": "mpl-2.0",
"hash": 5549260522175880000,
"line_mean": 26.2987012987,
"line_max": 64,
"alpha_frac": 0.6313035205,
"autogenerated": false,
"ratio": 3.9660377358490564,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy import QtCore
from qtpy.QtWidgets import QApplication
from addie.processing.mantid.master_table.import_from_database.conflicts_solver import ConflictsSolverHandler
from addie.processing.mantid.master_table.table_row_handler import TableRowHandler
from addie.utilities.gui_handler import TableHandler
class LoadIntoMasterTable:
def __init__(self, parent=None, json=None, with_conflict=False, ignore_conflicts=False):
self.parent = parent
self.json = json
self.with_conflict = with_conflict
self.table_ui = parent.processing_ui.h3_table
if ignore_conflicts:
self.load()
else:
if with_conflict:
ConflictsSolverHandler(parent=self.parent,
json_conflicts=self.json)
else:
self.load()
def load(self):
QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
o_table = TableRowHandler(main_window=self.parent)
if self.parent.clear_master_table_before_loading:
TableHandler.clear_table(self.table_ui)
for _row, _key in enumerate(self.json.keys()):
_entry = self.json[_key]
run_number = _key
title = _entry['title']
chemical_formula = _entry['resolved_conflict']['chemical_formula']
# geometry = _entry['resolved_conflict']['geometry']
mass_density = _entry['resolved_conflict']['mass_density']
# sample_env_device = _entry['resolved_conflict']['sample_env_device']
o_table.insert_row(row=_row,
title=title,
sample_runs=run_number,
sample_mass_density=mass_density,
sample_chemical_formula=chemical_formula)
QApplication.restoreOverrideCursor()
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/mantid/master_table/import_from_database/load_into_master_table.py",
"copies": "1",
"size": "1958",
"license": "mit",
"hash": -2789050570318356500,
"line_mean": 35.9433962264,
"line_max": 109,
"alpha_frac": 0.6098059244,
"autogenerated": false,
"ratio": 4.312775330396476,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008290562793419015,
"num_lines": 53
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import (QThread)
from mantid.simpleapi import *
from addie.processing.idl.mantid_script_handler import MantidScriptHandler
class MantidThread(QThread):
def setup(self, runs=None, parameters=None):
self.runs = runs
self.parameters = parameters
def run(self):
parameters = self.parameters
print("[LOG] Running Mantid script:")
o_mantid_script = MantidScriptHandler(parameters=parameters)
script = o_mantid_script.script
print(script)
SNSPowderReduction(Filename=self.runs,
MaxChunkSize=parameters['max_chunk_size'],
PreserveEvents=parameters['preserve_events'],
PushDataPositive=parameters['push_data_positive'],
CalibrationFile=parameters['calibration_file'],
CharacterizationRunsFile=parameters['characterization_file'],
BackgroundNumber=parameters['background_number'],
VanadiumNumber=parameters['vanadium_number'],
VanadiumBackgroundNumber=parameters['vanadium_background_number'],
ExpIniFileName=parameters['exp_ini_filename'],
RemovePromptPulseWidth=int(parameters['remove_prompt_pulse_width']),
ResampleX=int(parameters['resamplex']),
BinInDSpace=parameters['bin_in_d_space'],
FilterBadPulses=int(parameters['filter_bad_pulses']),
CropWavelengthMin=float(parameters['crop_wavelength_min']),
CropWavelengthMax=float(parameters['crop_wavelength_max']),
SaveAs=parameters['save_as'],
OutputDirectory=parameters['output_directory'],
StripVanadiumPeaks=parameters['strip_vanadium_peaks'],
VanadiumRadius=parameters['vanadium_radius'],
NormalizeByCurrent=parameters['normalize_by_current'],
FinalDataUnits=parameters['final_data_units'])
| {
"repo_name": "neutrons/FastGR",
"path": "addie/mantid_handler/mantid_thread.py",
"copies": "1",
"size": "2268",
"license": "mit",
"hash": 1155608298769764000,
"line_mean": 50.5454545455,
"line_max": 95,
"alpha_frac": 0.5824514991,
"autogenerated": false,
"ratio": 4.856531049250536,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020912579149340456,
"num_lines": 44
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import (QThread) # noqa
from qtpy.QtWidgets import (QTableWidgetItem) # noqa
import time
import psutil
class JobMonitorThread(QThread):
def __init__(self):
QThread.__init__(self)
def setup(self, parent=None, job_monitor_interface=None, refresh_rate_s=2):
self.parent = parent
self.job_monitor_interafce = job_monitor_interface
self.job_monitor_interface = self.parent.job_monitor_interface
self.refresh_rate_s = refresh_rate_s
def run(self):
while(True):
time.sleep(self.refresh_rate_s)
self._checking_status_of_jobs()
def stop(self):
self.terminate()
def _checking_status_of_jobs(self):
_job_list = self.parent.job_list
for _row, _job in enumerate(_job_list):
_pid = _job['pid']
process = psutil.Process(_pid)
if process is None:
self.job_monitor_interafce.ui.tableWidget.removeCellWidget(_row, 2)
_item = QTableWidgetItem("Done!")
self.job_monitor_interafce.ui.tableWidget.setItem(_row, 2, _item)
else:
if not process.status() == 'sleeping':
self.job_monitor_interafce.ui.tableWidget.removeCellWidget(_row, 2)
_item = QTableWidgetItem("Done!")
self.job_monitor_interafce.ui.tableWidget.setItem(_row, 2, _item)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/job_monitor_thread.py",
"copies": "1",
"size": "1493",
"license": "mit",
"hash": 4149053003785855500,
"line_mean": 36.325,
"line_max": 87,
"alpha_frac": 0.6068318821,
"autogenerated": false,
"ratio": 3.68641975308642,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47932516351864196,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (QCheckBox, QComboBox, QHBoxLayout, QTableWidgetItem, QWidget) # noqa
import os
from addie.utilities.file_handler import FileHandler
class ImportTable(object):
file_contain = []
table_contain = []
contain_parsed = []
full_contain_parsed = []
def __init__(self, parent=None, filename=''):
self.parent = parent
self.filename = filename
self.table = self.parent.postprocessing_ui.table
def run(self):
self.load_ascii()
self.parse_contain()
self.change_path()
self.populate_gui()
def load_ascii(self):
_filename = self.filename
o_file = FileHandler(filename=_filename)
o_file.retrieve_contain()
self.file_contain = o_file.file_contain
def parse_config_table(self):
self._list_row = eval(self.table_contain)
self.parser()
def parse_contain(self):
_contain = self.file_contain
self._list_row = _contain.split("\n")
self.parser()
def parser(self):
_list_row = self._list_row
_contain_parsed = []
for _row in _list_row:
_row_split = _row.split('|')
_contain_parsed.append(_row_split)
self.contain_parsed = _contain_parsed[2:]
self.full_contain_parsed = _contain_parsed
def change_path(self):
full_contain_parsed = self.full_contain_parsed
try:
_path_string_list = full_contain_parsed[0][0].split(':')
self.parent.current_folder = _path_string_list[1].strip()
os.chdir(self.parent.current_folder)
except:
pass
def populate_gui(self):
_contain_parsed = self.contain_parsed
for _row, _entry in enumerate(_contain_parsed):
if _entry == ['']:
continue
self.table.insertRow(_row)
# select
_layout = QHBoxLayout()
_widget = QCheckBox()
_widget.setEnabled(True)
_layout.addWidget(_widget)
_layout.addStretch()
_new_widget = QWidget()
_new_widget.setLayout(_layout)
_widget.stateChanged.connect(lambda state=0, row=_row:
self.parent.table_select_state_changed(state, row))
self.table.setCellWidget(_row, 0, _new_widget)
# name
_item = QTableWidgetItem(_entry[1])
self.table.setItem(_row, 1, _item)
# runs
_item = QTableWidgetItem(_entry[2])
self.table.setItem(_row, 2, _item)
# Sample formula
if _entry[3]:
_item = QTableWidgetItem(_entry[3])
else:
_item = QTableWidgetItem("")
self.table.setItem(_row, 3, _item)
# mass density
if _entry[4]:
_item = QTableWidgetItem(_entry[4])
else:
_item = QTableWidgetItem("")
self.table.setItem(_row, 4, _item)
# radius
if _entry[5]:
_item = QTableWidgetItem(_entry[5])
else:
_item = QTableWidgetItem("")
self.table.setItem(_row, 5, _item)
# packing fraction
if _entry[6]:
_item = QTableWidgetItem(_entry[6])
else:
_item = QTableWidgetItem("")
self.table.setItem(_row, 6, _item)
# sample shape
_widget = QComboBox()
_widget.addItem("Cylinder")
_widget.addItem("Sphere")
if _entry[7] == "Sphere":
_widget.setCurrentIndex(1)
self.table.setCellWidget(_row, 7, _widget)
# do abs corr
_layout = QHBoxLayout()
_widget = QCheckBox()
if _entry[8] == "True":
_widget.setCheckState(Qt.Checked)
_widget.setStyleSheet("border: 2px; solid-black")
_widget.setEnabled(True)
_layout.addStretch()
_layout.addWidget(_widget)
_layout.addStretch()
_new_widget = QWidget()
_new_widget.setLayout(_layout)
self.table.setCellWidget(_row, 8, _new_widget)
for _row, _entry in enumerate(_contain_parsed):
if _entry == ['']:
continue
# select
_widget = self.table.cellWidget(_row, 0).children()[1]
if _entry[0] == "True":
_widget.setChecked(True)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/idl/import_table.py",
"copies": "1",
"size": "4649",
"license": "mit",
"hash": -3972940144027832000,
"line_mean": 29.7880794702,
"line_max": 97,
"alpha_frac": 0.5246289525,
"autogenerated": false,
"ratio": 4.1032656663724625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0004851171402122054,
"num_lines": 151
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import (Qt)
from qtpy.QtWidgets import (QFileDialog)
import os
import addie.processing.idl.table_handler
from addie.utilities.math_tools import is_int, is_float
from addie.help_handler.help_gui import check_status
class Step2GuiHandler(object):
hidrogen_range = [1, 50]
no_hidrogen_range = [10, 50]
current_folder = ""
default_q_range = [0.2, 31.4]
default_ndabs_output_file_name = "sample_name"
user_canceled = False
def __init__(self, main_window=None):
self.main_window = main_window
self.current_folder = main_window.current_folder
def move_to_folder(self):
_new_folder = QFileDialog.getExistingDirectory(parent=self.main_window,
caption="Select working directory",
directory=self.current_folder)
if not _new_folder:
self.user_canceled = True
else:
if isinstance(_new_folder, tuple):
_new_folder = _new_folder[0]
os.chdir(_new_folder)
self.main_window.current_folder = _new_folder
self.main_window.setWindowTitle(_new_folder)
def is_hidrogen_clicked(self):
return self.main_window.postprocessing_ui.hydrogen_yes.isChecked()
def hidrogen_clicked(self):
_range = self.hidrogen_range
self.populate_hidrogen_range(_range)
def no_hidrogen_clicked(self):
_range = self.no_hidrogen_range
self.populate_hidrogen_range(_range)
def populate_hidrogen_range(self, fit_range):
min_value, max_value = fit_range
self.main_window.postprocessing_ui.plazcek_fit_range_min.setText("%d" % min_value)
self.main_window.postprocessing_ui.plazcek_fit_range_max.setText("%d" % max_value)
def get_plazcek_range(self):
fit_range_min = self.main_window.postprocessing_ui.plazcek_fit_range_min.text().strip()
fit_range_max = self.main_window.postprocessing_ui.plazcek_fit_range_max.text().strip()
return [fit_range_min, fit_range_max]
def get_q_range(self):
q_range_min = self.main_window.postprocessing_ui.q_range_min.text().strip()
q_range_max = self.main_window.postprocessing_ui.q_range_max.text().strip()
return [q_range_min, q_range_max]
def step2_background_flag(self):
if self.main_window.postprocessing_ui.background_no.isChecked():
self.no_background_clicked()
else:
self.yes_background_clicked()
def yes_background_clicked(self):
self.main_window.postprocessing_ui.background_line_edit.setEnabled(True)
self.main_window.postprocessing_ui.background_comboBox.setEnabled(True)
def no_background_clicked(self):
self.main_window.postprocessing_ui.background_line_edit.setEnabled(False)
self.main_window.postprocessing_ui.background_comboBox.setEnabled(False)
def background_index_changed(self, row_index=-1):
if row_index == -1:
return
if self.main_window.postprocessing_ui.table.item(row_index, 2) is None:
return
_item = self.main_window.postprocessing_ui.table.item(row_index, 2)
self.main_window.postprocessing_ui.background_line_edit.setText(_item.text())
def step2_update_background_dropdown(self):
row_index = self.main_window.postprocessing_ui.background_comboBox.currentIndex()
self.background_index_changed(row_index=row_index)
def check_gui(self):
self.check_run_ndabs_button()
self.check_run_sum_scans_button()
self.check_import_export_buttons()
def define_new_ndabs_output_file_name(self):
"""retrieve name of first row selected and use it to define output file name"""
_output_file_name = self.define_new_output_file_name()
self.main_window.postprocessing_ui.run_ndabs_output_file_name.setText(_output_file_name)
def define_new_sum_scans_output_file_name(self):
"""retrieve name of first row selected and use it to define output file name"""
_output_file_name = self.define_new_output_file_name()
self.main_window.postprocessing_ui.sum_scans_output_file_name.setText(_output_file_name)
def define_new_output_file_name(self):
"""retrieve name of first row selected and use it to define output file name"""
o_table_handler = addie.processing.idl.table_handler.TableHandler(parent=self.main_window)
o_table_handler.retrieve_list_of_selected_rows()
list_of_selected_row = o_table_handler.list_selected_row
if len(list_of_selected_row) > 0:
_metadata_selected = o_table_handler.list_selected_row[0]
_output_file_name = _metadata_selected['name']
else:
_output_file_name = self.default_ndabs_output_file_name
return _output_file_name
def check_import_export_buttons(self):
_export_status = False
if self.main_window.postprocessing_ui.table.rowCount() > 0:
_export_status = True
self.main_window.postprocessing_ui.export_button.setEnabled(_export_status)
def check_run_mantid_reduction_button(self):
_status = True
if not self.main_window.postprocessing_ui.table.rowCount() > 0:
_status = False
if _status and (not self.at_least_one_row_checked()):
_status = False
if _status and (self.main_window.postprocessing_ui.mantid_calibration_value.text() == 'N/A'):
_status = False
if _status and (self.main_window.postprocessing_ui.mantid_characterization_value.text() == 'N/A'):
_status = False
if _status and (self.main_window.postprocessing_ui.vanadium.text() == ""):
_status = False
if _status and (self.main_window.postprocessing_ui.vanadium_background.text() == ""):
_status = False
if _status and (not is_int(self.main_window.postprocessing_ui.mantid_number_of_bins.text())):
_status = False
if _status and (not is_float(self.main_window.postprocessing_ui.mantid_min_crop_wavelength.text())):
_status = False
if _status and (not is_float(self.main_window.postprocessing_ui.mantid_max_crop_wavelength.text())):
_status = False
if _status and (not is_float(self.main_window.postprocessing_ui.mantid_vanadium_radius.text())):
_status = False
if _status and (self.main_window.postprocessing_ui.mantid_output_directory_value.text() == "N/A"):
_status = False
self.main_window.postprocessing_ui.mantid_run_reduction.setEnabled(_status)
check_status(parent=self.main_window, button_name='mantid')
def check_run_sum_scans_button(self):
_status = True
if not self.main_window.postprocessing_ui.table.rowCount() > 0:
_status = False
if _status and (not self.at_least_one_row_checked()):
_status = False
if _status and self.main_window.postprocessing_ui.sum_scans_output_file_name.text() == "":
_status = False
self.main_window.postprocessing_ui.run_sum_scans_button.setEnabled(_status)
check_status(parent=self.main_window, button_name='scans')
def check_run_ndabs_button(self):
_status = True
if not self.main_window.postprocessing_ui.table.rowCount() > 0:
_status = False
if not self.at_least_one_row_checked():
_status = False
if self.any_fourier_filter_widgets_empty():
_status = False
if self.any_plazcek_widgets_empty():
_status = False
if self.any_q_range_widgets_empty():
_status = False
# make sure the row checked have none empty metadata fields
if _status:
for _row in range(self.main_window.postprocessing_ui.table.rowCount()):
_this_row_status_ok = self.check_if_this_row_is_ok(_row)
if not _this_row_status_ok:
_status = False
break
if self.main_window.postprocessing_ui.run_ndabs_output_file_name.text() == '':
_status = False
self.main_window.postprocessing_ui.run_ndabs_button.setEnabled(_status)
check_status(parent=self.main_window, button_name='ndabs')
def at_least_one_row_checked(self):
o_table_handler = addie.processing.idl.table_handler.TableHandler(parent=self.main_window)
o_table_handler.retrieve_list_of_selected_rows()
list_of_selected_row = o_table_handler.list_selected_row
if len(list_of_selected_row) > 0:
return True
else:
return False
def check_if_this_row_is_ok(self, row):
_status_ok = True
_selected_widget = self.main_window.postprocessing_ui.table.cellWidget(row, 0).children()
if len(_selected_widget) > 0:
if (_selected_widget[1].checkState() == Qt.Checked):
_table_handler = addie.processing.idl.table_handler.TableHandler(parent=self.main_window)
for _column in range(1, 7):
if _table_handler.retrieve_item_text(row, _column) == '':
_status_ok = False
break
return _status_ok
def any_plazcek_widgets_empty(self):
_min = str(self.main_window.postprocessing_ui.plazcek_fit_range_min.text()).strip()
if _min == "":
return True
_max = str(self.main_window.postprocessing_ui.plazcek_fit_range_max.text()).strip()
if _max == "":
return True
return False
def any_q_range_widgets_empty(self):
_min = str(self.main_window.postprocessing_ui.q_range_min.text()).strip()
if _min == "":
return True
_max = str(self.main_window.postprocessing_ui.q_range_max.text()).strip()
if _max == "":
return True
return False
def any_fourier_filter_widgets_empty(self):
_from = str(self.main_window.postprocessing_ui.fourier_filter_from.text()).strip()
if _from == "":
return True
_to = str(self.main_window.postprocessing_ui.fourier_filter_to.text()).strip()
if _to == "":
return True
return False
def reset_q_range(self):
_q_min = "%s" % str(self.default_q_range[0])
_q_max = "%s" % str(self.default_q_range[1])
self.main_window.postprocessing_ui.q_range_min.setText(_q_min)
self.main_window.postprocessing_ui.q_range_max.setText(_q_max)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/idl/step2_gui_handler.py",
"copies": "1",
"size": "10703",
"license": "mit",
"hash": 1598554625238822400,
"line_mean": 39.0861423221,
"line_max": 108,
"alpha_frac": 0.627581052,
"autogenerated": false,
"ratio": 3.5428666004634226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9661644316699802,
"avg_score": 0.0017606671527240767,
"num_lines": 267
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import Qt
import os
import numpy as np
import glob
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.cm as cm
from addie.processing.idl.sample_environment_handler import SampleEnvironmentHandler
class TablePlotHandler:
def __init__(self, parent=None):
self.parent = parent
def plot_sofq(self):
sofq_datasets = self._plot_fetch_data()
self._plot_datasets(sorted(sofq_datasets,
key=lambda k: int(k['run'])),
title='S(Q)')
def plot_sofq_diff_first_run_row(self):
sofq_datasets = self._plot_fetch_data()
sofq_base = dict(sofq_datasets[0])
for sofq in sorted(sofq_datasets,
key=lambda k: int(k['run'])):
sofq['y'] = sofq['y'] - sofq_base['y']
self._plot_datasets(sofq_datasets,
shift_value=0.2,
title='S(Q) - S(Q) for run '+sofq_base['run'])
def plot_sofq_diff_average_row(self):
sofq_datasets = self._plot_fetch_data()
sofq_data = [ sofq['y'] for sofq in sofq_datasets ]
sofq_avg = np.average(sofq_data,axis=0)
for sofq in sorted(sofq_datasets, key=lambda k: int(k['run'])):
sofq['y'] = sofq['y'] - sofq_avg
self._plot_datasets(sofq_datasets,
shift_value=0.2,
title='S(Q) - <S(Q)>')
def _plot_temperature(self, samp_env_choice=None):
file_list = self._plot_fetch_files(file_type='nexus')
samp_env = SampleEnvironmentHandler(samp_env_choice)
datasets = list()
for data in file_list:
samp_x, samp_y = samp_env.getDataFromFile(data['file'], 'samp')
envi_x, envi_y = samp_env.getDataFromFile(data['file'], 'envi')
datasets.append({'run': data['run'] + '_samp',
'x': samp_x,
'y': samp_y,
'linestyle': '-'})
datasets.append({'run': None,
'x': envi_x,
'y': envi_y,
'linestyle': '--'})
self._plot_datasets(sorted(datasets,
key=lambda k: k['run']),
shift_value=0.0,
title='Temperature: ' + samp_env_choice)
# utilities functions
def _plot_fetch_data(self):
file_list = self._plot_fetch_files(file_type='SofQ')
for data in file_list:
with open(data['file'], 'r') as handle:
x, y, e = np.loadtxt(handle, unpack=True)
data['x'] = x
data['y'] = y
return file_list
def _plot_fetch_files(self, file_type='SofQ'):
if file_type == 'SofQ':
search_dir = './SofQ'
prefix = 'NOM_'
suffix = 'SQ.dat'
elif file_type == 'nexus':
cwd = os.getcwd()
search_dir = cwd[:cwd.find('shared')] + '/nexus'
prefix = 'NOM_'
suffix = '.nxs.h5'
# ipts = int(re.search(r"IPTS-(\d*)\/", os.getcwd()).group(1))
_row = self.current_row
_row_runs = self._collect_metadata(row_index=_row)['runs'].split(',')
output_list = list()
file_list = [a_file for a_file in glob.glob(search_dir + '/' + prefix + '*')]
for run in _row_runs:
the_file = search_dir + '/' + prefix + str(run) + suffix
if the_file in file_list:
output_list.append({'file': the_file, 'run': run})
return output_list
def _plot_datasets(self,datasets,shift_value=1.0,cmap_choice='inferno',title=None):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
# configure plot
cmap = plt.get_cmap(cmap_choice)
cNorm = colors.Normalize(vmin=0, vmax=len(datasets) )
scalarMap = cm.ScalarMappable(norm=cNorm, cmap=cmap)
mrks=[0,-1]
# plot data
shifter = 0.0
for idx, data in enumerate(datasets):
data['y'] += shifter
colorVal = scalarMap.to_rgba(idx)
if 'linestyle' in data:
ax.plot(data['x'],data['y'],data['linestyle']+'o',label=data['run'],color=colorVal,markevery=mrks,)
else:
ax.plot(data['x'],data['y'],label=data['run'],color=colorVal,markevery=mrks)
shifter += shift_value
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1], labels[::-1], title='Runs', loc='center left',bbox_to_anchor=(1,0.5))
if title:
fig.suptitle(title)
plt.show()
def _collect_metadata(self, row_index=-1):
if row_index == -1:
return []
_name = self.retrieve_item_text(row_index, 1)
_runs = self.retrieve_item_text(row_index, 2)
_sample_formula = self.retrieve_item_text(row_index, 3)
_mass_density = self.retrieve_item_text(row_index, 4)
_radius = self.retrieve_item_text(row_index, 5)
_packing_fraction = self.retrieve_item_text(row_index, 6)
_sample_shape = self._retrieve_sample_shape(row_index)
_do_abs_correction = self._retrieve_do_abs_correction(row_index)
_metadata = {'name': _name,
'runs': _runs,
'sample_formula': _sample_formula,
'mass_density': _mass_density,
'radius': _radius,
'packing_fraction': _packing_fraction,
'sample_shape': _sample_shape,
'do_abs_correction': _do_abs_correction}
return _metadata
def retrieve_item_text(self, row, column):
_item = self.parent.table.item(row, column)
if _item is None:
return ''
else:
return str(_item.text())
def _retrieve_sample_shape(self, row_index):
_widget = self.parent.table.cellWidget(row_index, 7)
_selected_index = _widget.currentIndex()
_sample_shape = _widget.itemText(_selected_index)
return _sample_shape
def _retrieve_do_abs_correction(self, row_index):
_widget = self.parent.table.cellWidget(row_index, 8).children()[1]
if (_widget.checkState() == Qt.Checked):
return 'go'
else:
return 'nogo'
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/mantid/master_table/table_plot_handler.py",
"copies": "1",
"size": "6631",
"license": "mit",
"hash": -6224542620616821000,
"line_mean": 35.8388888889,
"line_max": 115,
"alpha_frac": 0.5248077213,
"autogenerated": false,
"ratio": 3.6195414847161573,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46443492060161573,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtCore import (Qt)
from addie.utilities.math_tools import is_int, is_float
class Step2Utilities(object):
def __init__(self, parent=None):
self.parent = parent.ui.postprocessing_ui
def is_table_empty(self):
if not self.parent.table.rowCount() > 0:
return True
else:
return False
def at_least_one_row_checked(self):
o_table_handler = TableHandler(parent=self.parent)
o_table_handler.retrieve_list_of_selected_rows()
list_of_selected_row = o_table_handler.list_selected_row
if len(list_of_selected_row) > 0:
return True
else:
return False
def is_fourier_filter_from_empty(self):
_from = str(self.parent.fourier_filter_from.text()).strip()
if _from == "":
return True
else:
return False
def is_fourier_filter_to_empty(self):
_to = str(self.parent.fourier_filter_to.text()).strip()
if _to == "":
return True
else:
return False
def any_fourier_filter_widgets_empty(self):
_from = str(self.parent.fourier_filter_from.text()).strip()
if _from == "":
return True
_to = str(self.parent.fourier_filter_to.text()).strip()
if _to == "":
return True
return False
def is_plazcek_from_empty(self):
_min = str(self.parent.plazcek_fit_range_min.text()).strip()
if _min == "":
return True
else:
return False
def is_plazcek_to_empty(self):
_max = str(self.parent.plazcek_fit_range_max.text()).strip()
if _max == "":
return True
else:
return False
def any_plazcek_widgets_empty(self):
_min = str(self.parent.plazcek_fit_range_min.text()).strip()
if _min == "":
return True
_max = str(self.parent.plazcek_fit_range_max.text()).strip()
if _max == "":
return True
return False
def is_q_min_empty(self):
_min = str(self.parent.q_range_min.text()).strip()
if _min == "":
return True
else:
return False
def is_q_max_empty(self):
_max = str(self.parent.q_range_max.text()).strip()
if _max == "":
return True
else:
return False
def any_q_range_widgets_empty(self):
_min = str(self.parent.q_range_min.text()).strip()
if _min == "":
return True
_max = str(self.parent.q_range_max.text()).strip()
if _max == "":
return True
return False
def are_row_checked_have_missing_fields(self):
for _row in range(self.parent.table.rowCount()):
_this_row_status_ok = self.check_if_this_row_is_ok(_row)
if not _this_row_status_ok:
return True
return False
def check_if_this_row_is_ok(self, row):
_status_ok = True
_selected_widget = self.parent.table.cellWidget(row, 0).children()
if len(_selected_widget) > 0:
if (_selected_widget[1].checkState() == Qt.Checked):
_table_handler = TableHandler(parent=self.parent)
for _column in range(1, 7):
if _table_handler.retrieve_item_text(row, _column) == '':
_status_ok = False
break
return _status_ok
def is_ndabs_output_empty(self):
if self.parent.run_ndabs_output_file_name.text().strip() == "":
return True
else:
return False
def is_scans_output_file_name_empty(self):
if self.parent.sum_scans_output_file_name.text().strip() == "":
return True
else:
return False
# mantid
def is_mantid_calibration_empty(self):
if (self.parent.mantid_calibration_value.text() == 'N/A'):
return True
else:
return False
def is_mantid_characterization_empty(self):
if (self.parent.mantid_characterization_value.text() == 'N/A'):
return True
else:
return False
def is_mantid_number_of_bins_no_int(self):
if not is_int(self.parent.mantid_number_of_bins.text()):
return True
else:
return False
def is_mantid_min_crop_wavelength_no_float(self):
if not is_float(self.parent.mantid_min_crop_wavelength.text()):
return True
else:
return False
def is_mantid_max_crop_wavelength_no_float(self):
if not is_float(self.parent.mantid_max_crop_wavelength.text()):
return True
else:
return False
def is_mantid_vanadium_radius_not_float(self):
if not is_float(self.parent.mantid_vanadium_radius.text()):
return True
else:
return False
def is_mantid_output_directory_empty(self):
if self.parent.mantid_output_directory_value.text() == "N/A":
return True
else:
return False
class TableHandler(object):
def __init__(self, parent=None):
self.parent = parent
def retrieve_list_of_selected_rows(self):
self.list_selected_row = []
for _row_index in range(self.parent.table.rowCount()):
_widgets = self.parent.table.cellWidget(_row_index, 0).children()
if len(_widgets) > 0:
_selected_widget = self.parent.table.cellWidget(_row_index, 0).children()[1]
if (_selected_widget.checkState() == Qt.Checked):
_entry = self._collect_metadata(row_index=_row_index)
self.list_selected_row.append(_entry)
def _collect_metadata(self, row_index=-1):
if row_index == -1:
return []
_name = self.retrieve_item_text(row_index, 1)
_runs = self.retrieve_item_text(row_index, 2)
_sample_formula = self.retrieve_item_text(row_index, 3)
_mass_density = self.retrieve_item_text(row_index, 4)
_radius = self.retrieve_item_text(row_index, 5)
_packing_fraction = self.retrieve_item_text(row_index, 6)
_sample_shape = self._retrieve_sample_shape(row_index)
_do_abs_correction = self._retrieve_do_abs_correction(row_index)
_metadata = {'name': _name,
'runs': _runs,
'sample_formula': _sample_formula,
'mass_density': _mass_density,
'radius': _radius,
'packing_fraction': _packing_fraction,
'sample_shape': _sample_shape,
'do_abs_correction': _do_abs_correction}
return _metadata
def retrieve_item_text(self, row, column):
_item = self.parent.table.item(row, column)
if _item is None:
return ''
else:
return str(_item.text())
def _retrieve_sample_shape(self, row_index):
_widget = self.parent.table.cellWidget(row_index, 7)
_selected_index = _widget.currentIndex()
_sample_shape = _widget.itemText(_selected_index)
return _sample_shape
def _retrieve_do_abs_correction(self, row_index):
_widget = self.parent.table.cellWidget(row_index, 8).children()[1]
if (_widget.checkState() == Qt.Checked):
return 'go'
else:
return 'nogo'
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/idl/step2_utilities.py",
"copies": "1",
"size": "7542",
"license": "mit",
"hash": -6835943565423899000,
"line_mean": 31.0936170213,
"line_max": 92,
"alpha_frac": 0.5534341024,
"autogenerated": false,
"ratio": 3.698871996076508,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4752306098476508,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QApplication) # noqa
import subprocess
import time
from addie.utilities.job_monitor_interface import JobMonitorInterface
class JobStatusHandler(object):
def __init__(self, parent=None, job_name='', script_to_run=None, thread_index=-1):
self.parent = parent
if self.parent.job_monitor_interface is None:
job_ui = JobMonitorInterface(parent=self.parent)
job_ui.show()
QApplication.processEvents()
self.parent.job_monitor_interface = job_ui
job_ui.launch_logbook_thread()
else:
self.parent.job_monitor_interface.activateWindow()
job_ui = self.parent.job_monitor_interface
if job_name == '':
return
job_list = self.parent.job_list
p = subprocess.Popen(script_to_run.split())
new_job = {'job_name': job_name,
'time': self.get_launch_time(),
'status': 'processing',
'pid': p.pid,
'subprocess': p}
job_list.append(new_job)
self.parent.job_list = job_list
job_ui.refresh_table(job_list)
def update_logbook_text(self, text):
print(text)
def get_local_time(self):
local_hour_offset = time.timezone / 3600.
_gmt_time = time.gmtime()
[year, month, day, hour, minute, seconds, _wday, _yday, _isds] = _gmt_time
return [year, month, day, hour-local_hour_offset, minute, seconds]
def get_launch_time(self):
local_time = self.get_local_time()
return "%d %d %d %d:%d:%d" % (local_time[0], local_time[1], local_time[2],
local_time[3], local_time[4], local_time[5])
def start(self):
pass
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/job_status_handler.py",
"copies": "1",
"size": "1853",
"license": "mit",
"hash": 2416959355040520700,
"line_mean": 32.0892857143,
"line_max": 87,
"alpha_frac": 0.5796006476,
"autogenerated": false,
"ratio": 3.6693069306930695,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4748907578293069,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QDialog) # noqa
from addie.plot.constants import BASIC_COLORS, LINE_MARKERS
from addie.utilities import load_ui
class PlotStyleDialog(QDialog):
"""
Dialog class for user to specify the color and marker of a certain line
"""
def __init__(self, parent=None):
"""
Parameters
----------
parent
"""
super(PlotStyleDialog, self).__init__(parent)
self.ui = load_ui('colorStyleSetup.ui', baseinstance=self)
# init widgets
self._init_widgets()
# define event handlers
self.ui.pushButton_apply.clicked.connect(self.do_accept_quit)
self.ui.pushButton_quit.clicked.connect(self.do_cancel_quit)
# class variable
self._acceptSelection = False
# plot ID list
self._plotIDList = list()
return
def _init_widgets(self):
"""
Init the color and marker
Returns:
"""
color_list = BASIC_COLORS[:]
color_list.insert(0, 'No Change')
marker_list = LINE_MARKERS[:]
marker_list.insert(0, 'No Change')
for color in color_list:
self.ui.comboBox_color.addItem(color)
for marker in marker_list:
self.ui.comboBox_style.addItem(marker)
self.ui.pushButton_quit.setText('Cancel')
return
def do_accept_quit(self):
"""
Returns
-------
"""
self._acceptSelection = True
self.close()
return
def do_cancel_quit(self):
"""
Returns
-------
"""
self._acceptSelection = False
self.close()
return
def get_color_marker(self):
"""
Returns: 3-tuple. Line ID (None for all lines),
color (string, None for no change),
mark (string, None for no change)
"""
# plot IDs
plot_index = self.ui.comboBox_lines.currentIndex()
plot_id = self._plotIDList[plot_index]
if plot_id == -1:
return_list = self._plotIDList[1:]
else:
return_list = [plot_id]
# color
color = str(self.ui.comboBox_color.currentText())
if color == 'No Change':
color = None
# marker
mark = str(self.ui.comboBox_style.currentText())
if mark == 'No Change':
mark = None
else:
mark = mark.split('(')[0].strip()
return return_list, color, mark
def is_to_apply(self):
"""
Check whether the user wants to apply the changes to the canvas or not
Returns: boolean to apply the change or not.
"""
return self._acceptSelection
def set_plot_labels(self, plot_labels):
"""
Add the plots
Args:
plot_labels:
Returns:
"""
# check
assert isinstance(plot_labels, list), 'Plot lines\' labels must be list but not %s.' % type(plot_labels)
assert len(plot_labels) > 0, 'Input plot lines cannot be an empty list.'
# clear combo box and plot ID list
self.ui.comboBox_lines.clear()
self._plotIDList = list()
# add lines
plot_labels.insert(0, (-1, 'All'))
for line_info in plot_labels:
plot_id, label = line_info
self.ui.comboBox_lines.addItem(label)
self._plotIDList.append(plot_id)
return
def get_plots_color_marker(parent_window, plot_label_list):
"""
Launch a dialog to get the new color and marker for all or a specific plot.
Note:
1. use 2-tuple list for input plot is to avoid 2 lines with same label
Args:
parent_window: parent window (main UI)
plot_label_list: list of 2-tuples: line ID (integer) and line label (string)
Returns:
3-tuples: line ID (None for all lines, -1 for canceling), color (string) and marker (string)
"""
# check input
assert isinstance(plot_label_list, list), 'List of plots\' labels must be a list but not a %s.' \
'' % plot_label_list.__class__.__name__
# Launch window
child_window = PlotStyleDialog(parent_window)
# init set up
child_window.set_plot_labels(plot_labels=plot_label_list)
# launch window
child_window.exec_()
# get result
if child_window.is_to_apply():
# set the close one
plot_id_list, color, marker = child_window.get_color_marker()
else:
# not accept
plot_id_list, color, marker = None, None, None
return plot_id_list, color, marker
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/specify_plots_style.py",
"copies": "1",
"size": "4752",
"license": "mit",
"hash": 4822843388173947000,
"line_mean": 25.2541436464,
"line_max": 112,
"alpha_frac": 0.5606060606,
"autogenerated": false,
"ratio": 4.071979434447301,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.51325854950473,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QDialog, QFileDialog) # noqa
from addie.utilities import load_ui
from addie.utilities.job_status_handler import JobStatusHandler
import os
class IptsFileTransferDialog(QDialog):
ipts_folder = '/SNS/NOM'
script = '/SNS/NOM/shared/autoNOM/stable/copystuff.py'
def __init__(self, parent=None):
self.parent = parent
QDialog.__init__(self, parent=parent)
self.ui = load_ui('iptsFileTransfer.ui', baseinstance=self)
def transfer_clicked(self):
_input_ipts = str(self.ui.source_ipts_value.text())
_input_autonom = str(self.ui.source_autonom_value.text())
_target_autonom = str(self.ui.target_autonom_value.text())
_full_script = self.script + " -i " + _input_ipts
_full_script += " -f " + _input_autonom
_full_script += " -t " + _target_autonom
JobStatusHandler(parent=self.parent,
script_to_run=_full_script,
job_name='IPTS File Transfer')
self.close()
def cancel_clicked(self):
self.close()
def source_ipts_clicked(self):
_ipts_folder = QFileDialog.getExistingDirectory(caption="Select Input IPTS Folder ...",
directory=self.ipts_folder,
options=QFileDialog.ShowDirsOnly)
if not _ipts_folder:
return
if isinstance(_ipts_folder, tuple):
_ipts_folder = _ipts_folder[0]
self.ipts_folder = _ipts_folder
_ipts_folder = os.path.basename(_ipts_folder)
_ipts_number = _ipts_folder.split('-')[1]
self.ui.source_ipts_value.setText(str(_ipts_number))
self.check_status_transfer_button()
def source_autonom_clicked(self):
_autonom_folder = QFileDialog.getExistingDirectory(caption="Select Input autoNOM Folder ...",
directory=self.ipts_folder,
options=QFileDialog.ShowDirsOnly)
if not _autonom_folder:
return
if isinstance(_autonom_folder, tuple):
_autonom_folder = _autonom_folder[0]
_autonom_folder = os.path.basename(_autonom_folder)
self.ui.source_autonom_value.setText(str(_autonom_folder))
self.check_status_transfer_button()
def target_autonom_clicked(self):
_autonom_folder = QFileDialog.getExistingDirectory(caption="Select Output autoNOM Folder ...",
directory=self.parent.current_folder,
options=QFileDialog.ShowDirsOnly)
if not _autonom_folder:
return
if isinstance(_autonom_folder, tuple):
_autonom_folder = _autonom_folder[0]
_autonom_folder = os.path.basename(_autonom_folder)
self.ui.target_autonom_value.setText(str(_autonom_folder))
self.check_status_transfer_button()
def check_status_transfer_button(self):
_source_ipts = str(self.ui.source_ipts_value.text())
if _source_ipts == 'N/A':
self.set_transfer_button_status(status='disable')
return
_source_autonom = str(self.ui.source_autonom_value.text())
if _source_autonom == 'N/A':
self.set_transfer_button_status(status='disable')
return
_target_autonom = str(self.ui.target_autonom_value.text())
if _target_autonom == 'N/A':
self.set_transfer_button_status(status='disable')
return
self.set_transfer_button_status(status='enable')
def set_transfer_button_status(self, status='enable'):
if status == 'enable':
self.ui.transfer_button.setEnabled(True)
else:
self.ui.transfer_button.setEnabled(False)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/ipts_file_transfer_dialog.py",
"copies": "1",
"size": "3999",
"license": "mit",
"hash": 3109639780148489700,
"line_mean": 40.65625,
"line_max": 102,
"alpha_frac": 0.5811452863,
"autogenerated": false,
"ratio": 3.8938656280428434,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49750109143428434,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QFileDialog)
import os
from addie.autoNOM.auto_populate_widgets import AutoPopulateWidgets
from addie.help_handler.help_gui import check_status
class Step1GuiHandler(object):
def __init__(self, main_window=None):
self.main_window = main_window
#self.main_window = parent
def new_autonom_group_box(self, status=True):
self.main_window.autonom_ui.name_of_output_folder.setEnabled(status)
def set_main_window_title(self):
self.main_window.setWindowTitle("working folder: " + self.main_window.current_folder)
def check_go_button(self):
if self.all_mandatory_fields_non_empty():
self.main_window.autonom_ui.run_autonom_script.setEnabled(True)
self.main_window.autonom_ui.create_exp_ini_button.setEnabled(True)
else:
self.main_window.autonom_ui.run_autonom_script.setEnabled(False)
self.main_window.autonom_ui.create_exp_ini_button.setEnabled(False)
check_status(parent=self.main_window, button_name='autonom')
def all_mandatory_fields_non_empty(self):
_diamond_field = str(self.main_window.autonom_ui.diamond.text()).strip().replace(" ", "")
if _diamond_field == "":
return False
_diamond_background_field = str(self.main_window.autonom_ui.diamond_background.text()).strip().replace(" ", "")
if _diamond_background_field == "":
return False
_vanadium_field = str(self.main_window.autonom_ui.vanadium.text()).strip().replace(" ", "")
if _vanadium_field == "":
return False
_vanadium_background_field = str(self.main_window.autonom_ui.vanadium_background.text()).strip().replace(" ", "")
if _vanadium_background_field == "":
return False
_sample_background_field = str(self.main_window.autonom_ui.sample_background.text()).strip().replace(" ", "")
if _sample_background_field == "":
return False
if self.main_window.autonom_ui.create_folder_button.isChecked():
if self.main_window.autonom_ui.manual_output_folder.isChecked() \
and (str(self.main_window.autonom_ui.manual_output_folder_field.text()).strip() == ""):
return False
return True
def manual_output_folder_button_handler(self):
if self.main_window.autonom_ui.manual_output_folder.isChecked():
status = True
else:
status = False
self.main_window.autonom_ui.manual_output_folder_field.setEnabled(status)
self.main_window.autonom_ui.manual_output_folder_button.setEnabled(status)
def select_working_folder(self):
_current_folder = self.main_window.current_folder
_new_folder = QFileDialog.getExistingDirectory(parent=self.main_window,
caption="Select working directory",
directory=_current_folder)
if not _new_folder:
return
if isinstance(_new_folder, tuple):
_new_folder = _new_folder[0]
self.main_window.current_folder = _new_folder
# o_gui = Step1GuiHandler(parent=self.main_window)
# o_gui.set_main_window_title()
self.set_main_window_title()
# move to new folder specifiy
os.chdir(_new_folder)
o_auto_populate = AutoPopulateWidgets(main_window=self.main_window)
o_auto_populate.run()
def select_manual_output_folder(self):
# _current_folder = self.main_window.current_folder
dlg = QFileDialog(parent=self.main_window,
caption="Select or Define Output Directory")
dlg.setFileMode(QFileDialog.Directory)
if dlg.exec_():
output_folder_name = str(dlg.selectedFiles()[0])
self.main_window.autonom_ui.manual_output_folder_field.setText(output_folder_name)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/autoNOM/step1_gui_handler.py",
"copies": "1",
"size": "4021",
"license": "mit",
"hash": -8819001373649031000,
"line_mean": 42.2365591398,
"line_max": 121,
"alpha_frac": 0.6321810495,
"autogenerated": false,
"ratio": 3.779135338345865,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4911316387845865,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QFileDialog)
import os
from addie.utilities.file_handler import FileHandler
from addie.widgets.filedialog import get_save_file
class ConfigFileNameHandler(object):
filename = ''
def __init__(self, parent=None):
self.parent = parent
def request_config_file_name(self, open_flag=True):
_caption = 'Select or Define a Configuration File Name'
_current_folder = self.parent.configuration_folder
if open_flag:
_file = QFileDialog.getOpenFileName(parent=self.parent,
filter='config (*.cfg)',
caption=_caption,
directory=_current_folder)
if isinstance(_file, tuple):
_file = _file[0]
else:
_file, _ = get_save_file(parent=self.parent,
filter={'config (*.cfg)':'cfg'},
caption=_caption,
directory=_current_folder)
if not _file:
self.filename = ''
return
_new_path = os.path.dirname(_file)
self.parent.configuration_folder = _new_path
o_file_handler = FileHandler(filename=_file)
o_file_handler.check_file_extension(ext_requested='cfg')
self.filename = o_file_handler.filename
| {
"repo_name": "neutrons/FastGR",
"path": "addie/menu/file/configuration/config_file_name_handler.py",
"copies": "1",
"size": "1502",
"license": "mit",
"hash": -6436196004707812000,
"line_mean": 36.55,
"line_max": 74,
"alpha_frac": 0.5386151798,
"autogenerated": false,
"ratio": 4.635802469135802,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00035714285714285714,
"num_lines": 40
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QFileDialog)
import os
class BrowseFileFolderHandler(object):
_output_ext = '/rietveld'
def __init__(self, parent=None):
self.parent = parent
self.current_folder = parent.file_path
def browse_file(self, type='calibration'):
if type == 'calibration':
_current_folder = self.parent.calibration_folder
_filter = "calib (*.h5);;all (*.*)"
_caption = "Select Calibration File"
_output_ui = self.parent.ui.mantid_calibration_value
else:
_current_folder = self.parent.characterization_folder
_filter = "characterization (*-rietveld.txt);;all (*.*)"
_caption = "Select Characterization File"
_output_ui = self.parent.ui.mantid_characterization_value
_file = QFileDialog.getOpenFileName(parent=self.parent,
filter=_filter,
caption=_caption,
directory=_current_folder)
if not _file:
return
if isinstance(_file, tuple):
_file = _file[0]
_output_ui.setText(str(_file))
_path = os.path.dirname(str(_file))
if type == 'calibration':
self.parent.calibration_current_folder = _path
else:
self.parent.characterization_current_folder = _path
def browse_folder(self):
_current_folder = self.current_folder
_caption = "Select Output Folder"
_folder = QFileDialog.getExistingDirectory(parent=self.parent,
caption=_caption,
directory=_current_folder)
if not _folder:
return
if isinstance(_folder, tuple):
_folder = _folder[0]
self.parent.ui.mantid_output_directory_value.setText(str(_folder) + self._output_ext)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/mantid_handler/browse_file_folder_handler.py",
"copies": "1",
"size": "2051",
"license": "mit",
"hash": 3057414565389344300,
"line_mean": 35.625,
"line_max": 93,
"alpha_frac": 0.5470502194,
"autogenerated": false,
"ratio": 4.410752688172043,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5457802907572044,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QFileDialog)
from addie.utilities.file_handler import FileHandler
from addie.menu.preview_ascii.preview_ascii_window import PreviewAsciiWindow
class Step3GuiHandler(object):
def __init__(self, parent=None):
self.parent_no_ui = parent
self.parent = parent.ui
self.current_folder = parent.current_folder
def browse_file(self):
_ascii_file = QFileDialog.getOpenFileName(parent=self.parent_no_ui,
caption='Select file to display',
directory=self.current_folder)
if not _ascii_file:
return
if isinstance(_ascii_file, tuple):
_ascii_file = _ascii_file[0]
_ascii_file = str(_ascii_file)
o_file_handler = FileHandler(filename=_ascii_file)
o_file_handler.retrieve_contain()
text_contain = o_file_handler.file_contain
o_preview = PreviewAsciiWindow(parent=self.parent_no_ui, text=text_contain, filename=_ascii_file)
o_preview.show()
| {
"repo_name": "neutrons/FastGR",
"path": "addie/menu/preview_ascii/step3_gui_handler.py",
"copies": "1",
"size": "1153",
"license": "mit",
"hash": -8685937461161785000,
"line_mean": 37.4333333333,
"line_max": 105,
"alpha_frac": 0.6244579358,
"autogenerated": false,
"ratio": 4.017421602787456,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001122813439375284,
"num_lines": 30
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QMainWindow)
from addie.utilities import load_ui
from addie.help_handler.help_gui_table_initialization import HelpGuiTableInitialization
class HelpGui(QMainWindow):
'''
button_name = ['autonom', 'ndabs', 'scans', 'mantid']
'''
column_widths = [330, 40]
def __init__(self, parent=None, button_name=''):
QMainWindow.__init__(self, parent)
self.parent = parent
self.button_name = button_name
self.ui = load_ui('helpGui.ui', baseinstance=self)
self.init_global_gui()
self.init_table()
def init_table(self):
self.o_table = HelpGuiTableInitialization(parent=self, button_name=self.button_name)
self.o_table.fill()
def init_global_gui(self):
for index, col_width in enumerate(self.column_widths):
self.ui.table_status.setColumnWidth(index, col_width)
self.setWindowTitle("Button Status: {}".format(self.button_name))
def refresh(self):
self.o_table.refill()
def closeEvent(self, event=None):
if self.button_name == 'autonom':
self.parent.o_help_autonom = None
elif self.button_name == 'ndabs':
self.parent.o_help_ndabs = None
elif self.button_name == 'scans':
self.parent.o_help_scans = None
elif self.button_name == 'mantid':
self.parent.o_help_mantid = None
def hide_button_clicked(self):
self.closeEvent(event=None)
self.close()
def help_button_activator(parent=None, button_name='autonom'):
if button_name == 'autonom':
if parent.o_help_autonom is None:
o_help = HelpGui(parent=parent, button_name=button_name)
o_help.show()
parent.o_help_autonom = o_help
else:
parent.o_help_autonom.refresh()
parent.o_help_autonom.activateWindow()
elif button_name == 'ndabs':
if parent.o_help_ndabs is None:
o_help = HelpGui(parent=parent, button_name=button_name)
o_help.show()
parent.o_help_ndabs = o_help
else:
parent.o_help_ndabs.refresh()
parent.o_help_ndabs.activateWindow()
elif button_name == 'scans':
if parent.o_help_scans is None:
o_help = HelpGui(parent=parent, button_name=button_name)
o_help.show()
parent.o_help_scans = o_help
else:
parent.o_help_scans.refresh()
parent.o_help_scans.activateWindow()
elif button_name == 'mantid':
if parent.o_help_mantid is None:
o_help = HelpGui(parent=parent, button_name=button_name)
o_help.show()
parent.o_help_mantid = o_help
else:
parent.o_help_mantid.refresh()
parent.o_help_mantid.activateWindow()
def check_status(parent=None, button_name='autonom'):
if (button_name == 'autonom') and parent.o_help_autonom:
parent.o_help_autonom.refresh()
if (button_name == 'ndabs') and parent.o_help_ndabs:
parent.o_help_ndabs.refresh()
if (button_name == 'scans') and parent.o_help_scans:
parent.o_help_scans.refresh()
if (button_name == 'mantid') and parent.o_help_mantid:
parent.o_help_mantid.refresh()
| {
"repo_name": "neutrons/FastGR",
"path": "addie/help_handler/help_gui.py",
"copies": "1",
"size": "3347",
"license": "mit",
"hash": 2865293408460738000,
"line_mean": 34.2315789474,
"line_max": 92,
"alpha_frac": 0.6077083956,
"autogenerated": false,
"ratio": 3.4363449691991788,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4544053364799179,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import QMainWindow
from addie.utilities import load_ui
from addie.processing.idl.mantid_script_handler import MantidScriptHandler
from addie.utilities.file_handler import FileHandler
from addie.widgets.filedialog import get_save_file
class MantidReductionView(QMainWindow):
def __init__(self, parent=None, father=None):
self. parent = parent
self.father = father
QMainWindow.__init__(self, parent=parent)
self.ui = load_ui('previewMantid.ui', baseinstance=self)
self.populate_view()
def populate_view(self):
_parameters = self.father.parameters
_script = ''
runs = _parameters['runs']
for _run in runs:
o_mantid_script = MantidScriptHandler(parameters=_parameters, run=_run)
_script += o_mantid_script.script
_script += "\n\n"
_script = "from mantid.simpleapi import *\nimport mantid\n\n" + _script
self.ui.preview_mantid_script_textedit.setText(_script)
def save_as_clicked(self):
_current_folder = self.parent.current_folder
_python_file, _ = get_save_file(parent=self.parent,
caption='Output File Name',
directory=_current_folder,
filter={'python (*.py)':'py',
'All Files (*.*)':''})
if not _python_file:
return
_script = str(self.ui.preview_mantid_script_textedit.toPlainText())
o_file_handler = FileHandler(filename=_python_file)
o_file_handler.create_ascii(contain=_script, carriage_return=False)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/idl/mantid_reduction_view.py",
"copies": "1",
"size": "1765",
"license": "mit",
"hash": 631986975396051000,
"line_mean": 37.3695652174,
"line_max": 83,
"alpha_frac": 0.6016997167,
"autogenerated": false,
"ratio": 3.993212669683258,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008755431137549937,
"num_lines": 46
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import QMainWindow
from addie.utilities import load_ui
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_INELASTIC_CORRECTION
class PlaczekHandler:
def __init__(self, parent=None, key=None, data_type='sample'):
if parent.placzek_ui is None:
parent.placzek_ui = PlaczekWindow(parent=parent, key=key, data_type=data_type)
parent.placzek_ui.show()
else:
parent.placzek_ui.activateWindow()
parent.placzek_ui.setFocus()
class PlaczekWindow(QMainWindow):
parent = None
data_type = None
key = None
column = -1
def __init__(self, parent=None, key=None, data_type='sample'):
self.parent = parent
self.data_type = data_type
self.key = key
QMainWindow.__init__(self, parent=parent)
self.ui = load_ui('placzek.ui', baseinstance=self)
self.init_widgets()
self.set_column()
def set_column(self):
self.column = INDEX_OF_INELASTIC_CORRECTION[0] if self.data_type=='sample' else INDEX_OF_INELASTIC_CORRECTION[1]
def init_widgets(self):
'''initialize the widgets in the state we left them last time (for the same row)'''
master_table_list_ui = self.parent.master_table_list_ui[self.key]
if master_table_list_ui[self.data_type]['placzek_infos'] is None:
return
# initialize the widgets using previous values set
info_dict = master_table_list_ui[self.data_type]['placzek_infos']
order_index = info_dict['order_index']
self.ui.order_comboBox.setCurrentIndex(order_index)
is_self = info_dict['is_self']
self.ui.self_checkBox.setChecked(is_self)
is_interference = info_dict['is_interference']
self.ui.interference_checkBox.setChecked(is_interference)
fit_spectrum_index = info_dict['fit_spectrum_index']
self.ui.fit_spectrum_comboBox.setCurrentIndex(fit_spectrum_index)
lambda_fit_min = str(info_dict['lambda_fit_min'])
self.ui.lambda_fit_min.setText(lambda_fit_min)
lambda_fit_max = str(info_dict['lambda_fit_max'])
self.ui.lambda_fit_max.setText(lambda_fit_max)
lambda_fit_delta = str(info_dict['lambda_fit_delta'])
self.ui.lambda_fit_delta.setText(lambda_fit_delta)
lambda_calc_min = str(info_dict['lambda_calc_min'])
self.ui.lambda_calc_min.setText(lambda_calc_min)
lambda_calc_max = str(info_dict['lambda_calc_max'])
self.ui.lambda_calc_max.setText(lambda_calc_max)
lambda_calc_delta = str(info_dict['lambda_calc_delta'])
self.ui.lambda_calc_delta.setText(lambda_calc_delta)
def save_widgets(self):
master_table_list_ui = self.parent.master_table_list_ui[self.key]
order_combobox_text = self.ui.order_comboBox.currentText()
order_combobox_index = self.ui.order_comboBox.currentIndex()
is_self_checked = self.ui.self_checkBox.isChecked()
is_interference_checked = self.ui.interference_checkBox.isChecked()
fit_spectrum_combobox_text = self.ui.fit_spectrum_comboBox.currentText()
fit_spectrum_combobox_index = self.ui.fit_spectrum_comboBox.currentIndex()
lambda_fit_min = self.ui.lambda_fit_min.text()
lambda_fit_max = self.ui.lambda_fit_max.text()
lambda_fit_delta = self.ui.lambda_fit_delta.text()
lambda_calc_min = self.ui.lambda_calc_min.text()
lambda_calc_max = self.ui.lambda_calc_max.text()
lambda_calc_delta = self.ui.lambda_calc_delta.text()
info_dict = {'order_text': order_combobox_text,
'order_index': order_combobox_index,
'is_self': is_self_checked,
'is_interference': is_interference_checked,
'fit_spectrum_text': fit_spectrum_combobox_text,
'fit_spectrum_index': fit_spectrum_combobox_index,
'lambda_fit_min': lambda_fit_min,
'lambda_fit_max': lambda_fit_max,
'lambda_fit_delta': lambda_fit_delta,
'lambda_calc_min': lambda_calc_min,
'lambda_calc_max': lambda_calc_max,
'lambda_calc_delta': lambda_calc_delta}
master_table_list_ui[self.data_type]['placzek_infos'] = info_dict
self.parent.master_table_list_ui[self.key] = master_table_list_ui
def ok_pressed(self):
self.save_widgets()
self.parent.check_master_table_column_highlighting(column=self.column)
self.close()
def cancel_pressed(self):
self.close()
def closeEvent(self, c):
self.parent.placzek_ui = None
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/mantid/master_table/placzek_handler.py",
"copies": "1",
"size": "4787",
"license": "mit",
"hash": -7999110216305752000,
"line_mean": 38.5619834711,
"line_max": 120,
"alpha_frac": 0.6342176729,
"autogenerated": false,
"ratio": 3.390226628895184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9521415058431584,
"avg_score": 0.0006058486727201711,
"num_lines": 121
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import QMainWindow, QFileDialog
from addie.utilities import load_ui
from addie.initialization.widgets import main_tab as main_tab_initialization
class AdvancedWindowLauncher(object):
def __init__(self, parent=None):
self.parent = parent
if self.parent.advanced_window_ui is None:
_advanced = AdvancedWindow(parent=self.parent)
_advanced.show()
self.parent.advanced_window_ui = _advanced
else:
self.parent.advanced_window_ui.setFocus()
self.parent.advanced_window_ui.activateWindow()
class AdvancedWindow(QMainWindow):
def __init__(self, parent=None):
self.parent = parent
QMainWindow.__init__(self, parent=parent)
self.ui = load_ui('advanced_window.ui', baseinstance=self)
self.setWindowTitle("Advanced Window for Super User Only !")
self.init_widgets()
def init_widgets(self):
_idl_status = False
_mantid_status = False
if self.parent.post_processing == 'idl':
_idl_status = True
else:
_mantid_status = True
self.ui.idl_groupbox.setVisible(self.parent.advanced_window_idl_groupbox_visible)
self.ui.idl_post_processing_button.setChecked(_idl_status)
self.ui.mantid_post_processing_button.setChecked(_mantid_status)
instrument = self.parent.instrument["full_name"]
list_instrument_full_name = self.parent.list_instrument["full_name"]
self.list_instrument_full_name = list_instrument_full_name
list_instrument_short_name = self.parent.list_instrument["short_name"]
self.list_instrument_short_name = list_instrument_short_name
self.ui.instrument_comboBox.addItems(list_instrument_full_name)
index_instrument = self.ui.instrument_comboBox.findText(instrument)
self.ui.instrument_comboBox.setCurrentIndex(index_instrument)
self.parent.instrument["short_name"] = list_instrument_short_name[index_instrument]
self.parent.instrument["full_name"] = list_instrument_full_name[index_instrument]
self.ui.cache_dir_label.setText(self.parent.cache_folder)
self.ui.output_dir_label.setText(self.parent.output_folder)
self.ui.centralwidget.setContentsMargins(10, 10, 10, 10)
def is_idl_selected(self):
return self.ui.idl_post_processing_button.isChecked()
def post_processing_clicked(self):
if self.is_idl_selected():
_post = 'idl'
_idl_groupbox_visible = True
else:
_post = 'mantid'
_idl_groupbox_visible = False
self.ui.idl_groupbox.setVisible(_idl_groupbox_visible)
self.parent.post_processing = _post
self.parent.activate_reduction_tabs() # hide or show right tabs
self.parent.advanced_window_idl_groupbox_visible = _idl_groupbox_visible
def instrument_changed(self, index):
self.parent.instrument["short_name"] = self.list_instrument_short_name[index]
self.parent.instrument["full_name"] = self.list_instrument_full_name[index]
main_tab_initialization.set_default_folder_path(self.parent)
def cache_dir_button_clicked(self):
_cache_folder = QFileDialog.getExistingDirectory(caption="Select Cache Folder ...",
directory=self.parent.cache_folder,
options=QFileDialog.ShowDirsOnly)
if _cache_folder:
self.ui.cache_dir_label.setText(str(_cache_folder))
self.parent.cache_folder = str(_cache_folder)
def output_dir_button_clicked(self):
_output_folder = QFileDialog.getExistingDirectory(caption="Select Output Folder ...",
directory=self.parent.output_folder,
options=QFileDialog.ShowDirsOnly)
if _output_folder:
self.ui.output_dir_label.setText(str(_output_folder))
self.parent.output_folder = str(_output_folder)
def closeEvent(self, c):
self.parent.advanced_window_ui = None
| {
"repo_name": "neutrons/FastGR",
"path": "addie/menu/file/settings/advanced_file_window.py",
"copies": "1",
"size": "4263",
"license": "mit",
"hash": -3609811916335244300,
"line_mean": 41.2079207921,
"line_max": 94,
"alpha_frac": 0.6403940887,
"autogenerated": false,
"ratio": 4.025495750708215,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014604402720792566,
"num_lines": 101
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QMainWindow, QPushButton, QTableWidgetItem) # noqa
import psutil
from addie.utilities import load_ui
class JobMonitorInterface(QMainWindow):
column_width = [200, 250]
def __init__(self, parent=None):
self.parent = parent
QMainWindow.__init__(self, parent=parent)
self.ui = load_ui('jobStatus.ui', baseinstance=self)
self.init_table()
self.launch_table_update_thread()
def launch_table_update_thread(self):
_run_thread = self.parent.job_monitor_thread
_run_thread.setup(parent=self.parent, job_monitor_interface=self)
_run_thread.start()
def init_table(self):
for _index, _width in enumerate(self.column_width):
self.ui.tableWidget.setColumnWidth(_index, _width)
def closeEvent(self, event=None):
self.parent.job_monitor_thread.stop()
self.parent.job_monitor_interface = None
def clear_table_clicked(self):
self.parent.job_list = []
for _row in range(self.ui.tableWidget.rowCount()):
self.ui.tableWidget.removeRow(0)
def launch_logbook_thread(self):
self.parent.start_refresh_text_thread()
def refresh_table(self, job_list):
for _row in range(self.ui.tableWidget.rowCount()):
self.ui.tableWidget.removeRow(0)
nbr_row = len(job_list)
for _row in range(nbr_row):
_row_job = job_list[_row]
self.ui.tableWidget.insertRow(_row)
# job name
_item = QTableWidgetItem(_row_job['job_name'])
self.ui.tableWidget.setItem(_row, 0, _item)
# time
_item = QTableWidgetItem(_row_job['time'])
self.ui.tableWidget.setItem(_row, 1, _item)
# action
_pid = _row_job['pid']
process = psutil.Process(_pid) # TODO check status result
if not process.is_running():
_item = QTableWidgetItem("Done!")
self.ui.tableWidget.setItem(_row, 2, _item)
else:
if _row_job['status'] == 'processing':
_widget = QPushButton()
_widget.setText("Abort!")
_widget.clicked.connect(lambda row=_row:
self.parent.kill_job(row))
self.ui.tableWidget.setCellWidget(_row, 2, _widget)
else:
_item = QTableWidgetItem("Killed!")
self.ui.tableWidget.setItem(_row, 2, _item)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/job_monitor_interface.py",
"copies": "1",
"size": "2618",
"license": "mit",
"hash": 3065337109207150600,
"line_mean": 34.3783783784,
"line_max": 79,
"alpha_frac": 0.5741023682,
"autogenerated": false,
"ratio": 3.8785185185185185,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9952620886718518,
"avg_score": 0,
"num_lines": 74
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QMessageBox)
from qtpy import PYQT_VERSION, PYQT4, PYQT5
from numpy.version import version as numpy_version_str
from matplotlib import __version__ as matplotlib_version_str
import sys
import mantid
from addie import __version__ as addie_version
# qtpy does not provide an abstraction for this
if PYQT5:
from PyQt5.QtCore import QT_VERSION_STR # noqa
elif PYQT4:
from PyQt4.QtCore import QT_VERSION_STR # noqa
else:
raise RuntimeError('unknown qt version')
try:
from pyoncat import __version__ as pyoncat_version # noqa
except ImportError:
pyoncat_version = 'disabled'
class AboutDialog(object):
def __init__(self, parent=None):
self.parent = parent
def display(self):
addie_version = self.get_appli_version()
python_version = self.get_python_version()
numpy_version = numpy_version_str
mantid_version = mantid.__version__
matplotlib_version = matplotlib_version_str
qt_version = QT_VERSION_STR
pyqt_version = PYQT_VERSION
message = ''' Addie
version %s
Library versions:
- Python: %s
- Numpy: %s
- Mantid: %s
- Matplotlib: %s
- pyoncat: %s
- Qt: %s
- PyQt: %s
''' % (addie_version,
python_version,
numpy_version,
mantid_version,
matplotlib_version,
pyoncat_version,
qt_version,
pyqt_version)
QMessageBox.about(self.parent, "About Addie", message)
def get_appli_version(self):
if '+' in addie_version:
return addie_version.split('+')[0] + ' dev'
else:
return addie_version
def get_python_version(self):
str_version = sys.version_info
str_array = []
for value in str_version:
str_array.append(str(value))
return ".".join(str_array[0:3])
| {
"repo_name": "neutrons/FastGR",
"path": "addie/about.py",
"copies": "1",
"size": "2050",
"license": "mit",
"hash": -5234525687835011000,
"line_mean": 27.8732394366,
"line_max": 66,
"alpha_frac": 0.5965853659,
"autogenerated": false,
"ratio": 3.8679245283018866,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4964509894201886,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import (QPushButton, QTextEdit) # noqa
from addie.autoNOM.step1_utilities import Step1Utilities
from addie.processing.idl.step2_utilities import Step2Utilities
class HelpGuiTableInitialization(object):
row_height = 50
widget_bad = "color: rgb(255, 0, 0)"
widget_ok = "color: rgb(33, 118, 0)"
jump_message = "Jump There!"
def __init__(self, parent=None, button_name='autonom'):
self.parent = parent
self.ui = self.parent.parent.postprocessing_ui
self.button_name = button_name
def fill(self):
if self.button_name == 'autonom':
self.fill_autonom()
elif self.button_name == 'ndabs':
self.fill_ndabs()
elif self.button_name == 'scans':
self.fill_scans()
elif self.button_name == 'mantid':
self.fill_mantid()
def refill(self):
nbr_row = self.parent.ui.table_status.rowCount()
for _row in range(nbr_row):
self.parent.ui.table_status.removeRow(0)
self.fill()
# STEP 1
def jump_to_step1_diamond(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.diamond.setFocus()
self.parent.parent.activateWindow()
def jump_to_step1_diamond_background(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.diamond_background.setFocus()
self.parent.parent.activateWindow()
def jump_to_step1_vanadium(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.vanadium.setFocus()
self.parent.parent.activateWindow()
def jump_to_step1_vanadium_background(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.vanadium_background.setFocus()
self.parent.parent.activateWindow()
def jump_to_step1_sample_background(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.sample_background.setFocus()
self.parent.parent.activateWindow()
def jump_to_step1_create_folder(self):
self.ui.tabWidget_2.setCurrentIndex(0)
self.parent.parent.autonom_ui.manual_output_folder_field.setFocus()
self.parent.parent.activateWindow()
def fill_autonom(self):
o_step1_handler = Step1Utilities(main_window=self.parent.parent)
# diamond
self.parent.ui.table_status.insertRow(0)
self.parent.ui.table_status.setRowHeight(0, self.row_height)
_widget = QTextEdit()
_text = "Diamond Field Empty?<br/><b>AutoNom>Diamond</b>"
_widget.setHtml(_text)
if o_step1_handler.is_diamond_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(0, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_diamond_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_diamond)
self.parent.ui.table_status.setCellWidget(0, 1, _widget_2)
# diamond background
self.parent.ui.table_status.insertRow(1)
self.parent.ui.table_status.setRowHeight(1, self.row_height)
_widget = QTextEdit()
_text = "Diamond Background Field Empty?<br/><b>AutoNom>Diamond Background</b>"
_widget.setHtml(_text)
if o_step1_handler.is_diamond_background_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(1, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_diamond_background_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_diamond_background)
self.parent.ui.table_status.setCellWidget(1, 1, _widget_2)
# vanadium
self.parent.ui.table_status.insertRow(2)
self.parent.ui.table_status.setRowHeight(2, self.row_height)
_widget = QTextEdit()
_text = "Vanadium Field Empty?<br/><b>AutoNom>Vanadium</b>"
_widget.setHtml(_text)
if o_step1_handler.is_vanadium_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(2, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_vanadium_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_vanadium)
self.parent.ui.table_status.setCellWidget(2, 1, _widget_2)
# vanadium background
self.parent.ui.table_status.insertRow(3)
self.parent.ui.table_status.setRowHeight(3, self.row_height)
_widget = QTextEdit()
_text = "Vanadium Background Field Empty?<br/><b>AutoNom>Vanadium Background</b>"
_widget.setHtml(_text)
if o_step1_handler.is_vanadium_background_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(3, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_vanadium_background_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_vanadium_background)
self.parent.ui.table_status.setCellWidget(3, 1, _widget_2)
# sample background
self.parent.ui.table_status.insertRow(4)
self.parent.ui.table_status.setRowHeight(4, self.row_height)
_widget = QTextEdit()
_text = "Sample Background Field Empty?<br/><b>AutoNom>Sample Background</b>"
_widget.setHtml(_text)
if o_step1_handler.is_sample_background_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(4, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_sample_background_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_sample_background)
self.parent.ui.table_status.setCellWidget(4, 1, _widget_2)
# create folder button
self.parent.ui.table_status.insertRow(5)
self.parent.ui.table_status.setRowHeight(5, self.row_height + 20)
_widget = QTextEdit()
_text = "Create Folder Button Status?<br/><b>AutoNom>Create New AutoNom Folder</b>"
_widget.setHtml(_text)
if o_step1_handler.is_create_folder_button_status_ok():
_widget.setStyleSheet(self.widget_ok)
else:
_widget.setStyleSheet(self.widget_bad)
self.parent.ui.table_status.setCellWidget(5, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(not o_step1_handler.is_create_folder_button_status_ok())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_create_folder)
self.parent.ui.table_status.setCellWidget(5, 1, _widget_2)
# STEP 2
def jump_to_step2_table(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.table.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_fourier_from(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.fourier_filter_from.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_fourier_to(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.fourier_filter_to.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_plazcek_from(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.plazcek_fit_range_min.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_plazcek_to(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.plazcek_fit_range_max.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_q_min(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.q_range_min.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_q_max(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.q_range_max.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_output_empty(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.run_ndabs_output_file_name.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_scans_output_file_name(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(0)
self.ui.sum_scans_output_file_name.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_browse_calibration(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_browse_calibration_button.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_browse_characterization(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_browse_characterization_button.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_number_of_bins(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_number_of_bins.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_min_crop_wavelength(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_min_crop_wavelength.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_max_crop_wavelength(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_max_crop_wavelength.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_vanadium_radius(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_vanadium_radius.setFocus()
self.parent.parent.activateWindow()
def jump_to_step2_mantid_output_directory_button(self):
self.ui.tabWidget_2.setCurrentIndex(1)
self.ui.tabWidget.setCurrentIndex(1)
self.ui.mantid_output_directoy_button.setFocus()
self.parent.parent.activateWindow()
def fill_scans(self):
o_step2_handler = Step2Utilities(parent=self.parent.parent)
# table status
self.parent.ui.table_status.insertRow(0)
self.parent.ui.table_status.setRowHeight(0, self.row_height)
_widget = QTextEdit()
_text = "Main Table Empty?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.is_table_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(0, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_table_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(0, 1, _widget_2)
# at least one row checked
self.parent.ui.table_status.insertRow(1)
self.parent.ui.table_status.setRowHeight(1, self.row_height)
_widget = QTextEdit()
_text = "Main Table Row Selected?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.at_least_one_row_checked():
_widget.setStyleSheet(self.widget_ok)
else:
_widget.setStyleSheet(self.widget_bad)
self.parent.ui.table_status.setCellWidget(1, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(not o_step2_handler.at_least_one_row_checked())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(1, 1, _widget_2)
# output file name
self.parent.ui.table_status.insertRow(2)
self.parent.ui.table_status.setRowHeight(2, self.row_height)
_widget = QTextEdit()
_text = "Output File Name?<br/><b>Post Processing>Output File Name</b>"
_widget.setHtml(_text)
if not o_step2_handler.is_scans_output_file_name_empty():
_widget.setStyleSheet(self.widget_ok)
else:
_widget.setStyleSheet(self.widget_bad)
self.parent.ui.table_status.setCellWidget(2, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_scans_output_file_name_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_scans_output_file_name)
self.parent.ui.table_status.setCellWidget(2, 1, _widget_2)
def fill_ndabs(self):
o_step2_handler = Step2Utilities(parent=self.parent.parent)
# table status
self.parent.ui.table_status.insertRow(0)
self.parent.ui.table_status.setRowHeight(0, self.row_height)
_widget = QTextEdit()
_text = "Main Table Empty?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.is_table_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(0, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_table_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(0, 1, _widget_2)
# at least one row checked
self.parent.ui.table_status.insertRow(1)
self.parent.ui.table_status.setRowHeight(1, self.row_height)
_widget = QTextEdit()
_text = "Main Table Row Selected?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.at_least_one_row_checked():
_widget.setStyleSheet(self.widget_ok)
else:
_widget.setStyleSheet(self.widget_bad)
self.parent.ui.table_status.setCellWidget(1, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(not o_step2_handler.at_least_one_row_checked())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(1, 1, _widget_2)
# missing fields in row checked
self.parent.ui.table_status.insertRow(2)
self.parent.ui.table_status.setRowHeight(2, self.row_height)
_widget = QTextEdit()
_text = "Is missing metadata in row checked?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.are_row_checked_have_missing_fields():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(2, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.are_row_checked_have_missing_fields())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(2, 1, _widget_2)
# fourier filter from
self.parent.ui.table_status.insertRow(3)
self.parent.ui.table_status.setRowHeight(3, self.row_height)
_widget = QTextEdit()
_text = "Is Fourier From Widgets Empty?<br/><b>Post Processing>Fourier Filter From</b>"
_widget.setHtml(_text)
if o_step2_handler.is_fourier_filter_from_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(3, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_fourier_filter_from_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_fourier_from)
self.parent.ui.table_status.setCellWidget(3, 1, _widget_2)
# fourier filter to
self.parent.ui.table_status.insertRow(4)
self.parent.ui.table_status.setRowHeight(4, self.row_height)
_widget = QTextEdit()
_text = "Is Fourier To Widgets Empty?<br/><b>Post Processing>Fourier Filter To</b>"
_widget.setHtml(_text)
if o_step2_handler.is_fourier_filter_to_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(4, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_fourier_filter_to_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_fourier_to)
self.parent.ui.table_status.setCellWidget(4, 1, _widget_2)
# plazcek filter from
self.parent.ui.table_status.insertRow(5)
self.parent.ui.table_status.setRowHeight(5, self.row_height)
_widget = QTextEdit()
_text = "Is Plazcek From Widgets Empty?<br/><b>Post Processing>Plazcek Filter From</b>"
_widget.setHtml(_text)
if o_step2_handler.is_plazcek_from_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(5, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_plazcek_from_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_plazcek_from)
self.parent.ui.table_status.setCellWidget(5, 1, _widget_2)
# plazcek filter to
self.parent.ui.table_status.insertRow(6)
self.parent.ui.table_status.setRowHeight(6, self.row_height)
_widget = QTextEdit()
_text = "Is Plazcek To Widgets Empty?<br/><b>Post Processing>Plazcek Filter To</b>"
_widget.setHtml(_text)
if o_step2_handler.is_plazcek_to_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(6, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_plazcek_to_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_plazcek_to)
self.parent.ui.table_status.setCellWidget(6, 1, _widget_2)
# q min
self.parent.ui.table_status.insertRow(7)
self.parent.ui.table_status.setRowHeight(7, self.row_height)
_widget = QTextEdit()
_text = "Is Q min Widgets Empty?<br/><b>Post Processing>Q min</b>"
_widget.setHtml(_text)
if o_step2_handler.is_q_min_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(7, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_q_min_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_q_min)
self.parent.ui.table_status.setCellWidget(7, 1, _widget_2)
# q max
self.parent.ui.table_status.insertRow(8)
self.parent.ui.table_status.setRowHeight(8, self.row_height)
_widget = QTextEdit()
_text = "Is Q max Widgets Empty?<br/><b>Post Processing>Q max</b>"
_widget.setHtml(_text)
if o_step2_handler.is_q_max_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(8, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_q_max_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_q_max)
self.parent.ui.table_status.setCellWidget(8, 1, _widget_2)
# output file name
self.parent.ui.table_status.insertRow(9)
self.parent.ui.table_status.setRowHeight(9, self.row_height)
_widget = QTextEdit()
_text = "Is Output File Name Empty?<br/><b>Post Processing>Output File Name</b>"
_widget.setHtml(_text)
if o_step2_handler.is_ndabs_output_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(9, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_ndabs_output_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_output_empty)
self.parent.ui.table_status.setCellWidget(9, 1, _widget_2)
def fill_mantid(self):
self.row_height = 62
o_step1_handler = Step1Utilities(main_window=self.parent.parent)
o_step2_handler = Step2Utilities(parent=self.parent.parent)
# vanadium
_row = 0
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Vanadium Field Empty?<br/><b>AutoNom>Vanadium</b>"
_widget.setHtml(_text)
if o_step1_handler.is_vanadium_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_vanadium_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_vanadium)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# vanadium background
_row = 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Vanadium Background Field Empty?<br/><b>AutoNom>Vanadium Background</b>"
_widget.setHtml(_text)
if o_step1_handler.is_vanadium_background_text_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step1_handler.is_vanadium_background_text_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step1_vanadium_background)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# table status
_row = 2
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Main Table Empty?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.is_table_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_table_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# at least one row checked
_row = 3
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Main Table Row Selected?<br/><b>Post Processing>Table</b>"
_widget.setHtml(_text)
if o_step2_handler.at_least_one_row_checked():
_widget.setStyleSheet(self.widget_ok)
else:
_widget.setStyleSheet(self.widget_bad)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(not o_step2_handler.at_least_one_row_checked())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_table)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# calibration
_row = 4
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Calibration File Selected?<br/><b>Post Processing>Rietveld>Calibration</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_calibration_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_calibration_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_browse_calibration)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# characterization
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Characterization File Selected?<br/><b>Post Processing>Rietveld>Characterization</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_characterization_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_characterization_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_browse_characterization)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# number of bins int
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Is Number of Bins an Int?<br/><b>Post Processing>Rietveld>Number of Bins</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_number_of_bins_no_int():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_number_of_bins_no_int())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_number_of_bins)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# min crop wavelegenth
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Is min Crop Wavelength a float?<br/><b>Post Processing>Rietveld>Crop Wavelength Min</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_min_crop_wavelength_no_float():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_min_crop_wavelength_no_float())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_min_crop_wavelength)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# max crop wavelegenth
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Is max Crop Wavelength a float?<br/><b>Post Processing>Rietveld>Crop Wavelength Max</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_max_crop_wavelength_no_float():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_max_crop_wavelength_no_float())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_max_crop_wavelength)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# vanadium radius
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Is Vanadium Radius a float?<br/><b>Post Processing>Rietveld>Vanadium Radius</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_vanadium_radius_not_float():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_vanadium_radius_not_float())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_vanadium_radius)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
# output directory
_row += 1
self.parent.ui.table_status.insertRow(_row)
self.parent.ui.table_status.setRowHeight(_row, self.row_height)
_widget = QTextEdit()
_text = "Is Output Directory Empty?<br/><b>Post Processing>Rietveld>Output Directory</b>"
_widget.setHtml(_text)
if o_step2_handler.is_mantid_output_directory_empty():
_widget.setStyleSheet(self.widget_bad)
else:
_widget.setStyleSheet(self.widget_ok)
self.parent.ui.table_status.setCellWidget(_row, 0, _widget)
_widget_2 = QPushButton()
_widget_2.setEnabled(o_step2_handler.is_mantid_output_directory_empty())
_widget_2.setText(self.jump_message)
_widget_2.clicked.connect(self.jump_to_step2_mantid_output_directory_button)
self.parent.ui.table_status.setCellWidget(_row, 1, _widget_2)
| {
"repo_name": "neutrons/FastGR",
"path": "addie/help_handler/help_gui_table_initialization.py",
"copies": "1",
"size": "31524",
"license": "mit",
"hash": 5201004384156470000,
"line_mean": 44.0342857143,
"line_max": 105,
"alpha_frac": 0.6453495749,
"autogenerated": false,
"ratio": 3.231242312423124,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4376591887323124,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from qtpy.QtWidgets import QTableWidgetSelectionRange
from qtpy.QtCore import Qt
import numpy as np
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_SEARCHABLE
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_COMBOBOX
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_SPECIAL_COLUMNS_SEARCHABLE
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_MASS_DENSITY
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_ALIGN_AND_FOCUS_ARGS
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_ITEMS
from addie.processing.mantid.master_table.tree_definition import INDEX_OF_COLUMNS_WITH_CHECKBOX
from addie.processing.mantid.master_table.utilities import Utilities
class SelectionHandler:
right_column = -1
left_column = -2
top_row = -1
bottom_row = -2
def __init__(self, selection_range):
if len(selection_range) == 0:
return
# only considering the first selection in this class
selection_range = selection_range[0]
self.selection_range = selection_range
self.left_column = self.selection_range.leftColumn()
self.right_column = self.selection_range.rightColumn()
self.top_row = self.selection_range.topRow()
self.bottom_row = self.selection_range.bottomRow()
def nbr_column_selected(self):
return (self.right_column - self.left_column) + 1
def nbr_row_selected(self):
return (self.top_row - self.bottom_row) + 1
def first_column_selected(self):
return self.left_column
def first_row_selected(self):
return self.top_row
def get_list_column(self):
return np.arange(self.left_column, self.right_column + 1)
def get_list_row(self):
return np.arange(self.top_row, self.bottom_row + 1)
class SelectionHandlerMaster:
def __init__(self, parent=None):
self.parent = parent
self.table_ui = self.parent.processing_ui.h3_table
def get_ui_key_for_this_row(self, row=-1):
_check_box_ui_of_this_row = self.table_ui.cellWidget(row, 0).children()[
1]
_table_list_ui = self.parent.master_table_list_ui
for _key in _table_list_ui.keys():
if _table_list_ui[_key]['active'] == _check_box_ui_of_this_row:
return _key
return None
def get_list_ui_for_this_row(self, row=-1):
_check_box_ui_of_this_row = self.table_ui.cellWidget(row, 0).children()[
1]
_table_list_ui = self.parent.master_table_list_ui
for _key in _table_list_ui.keys():
if _table_list_ui[_key]['active'] == _check_box_ui_of_this_row:
list_ui = [
_check_box_ui_of_this_row,
_table_list_ui[_key]['sample']['shape'],
_table_list_ui[_key]['sample']['abs_correction'],
_table_list_ui[_key]['sample']['mult_scat_correction'],
_table_list_ui[_key]['sample']['inelastic_correction'],
_table_list_ui[_key]['normalization']['abs_correction'],
_table_list_ui[_key]['normalization']['mult_scat_correction'],
_table_list_ui[_key]['normalization']['inelastic_correction'],
]
self.parent.master_table_list_ui[_key] = {}
return list_ui
return []
def lock_signals(self, list_ui=[], lock=True):
if list_ui == []:
return
for _ui in list_ui:
if _ui is not None:
_ui.blockSignals(lock)
def check_right_click_buttons(self):
nbr_row = self.table_ui.rowCount()
if nbr_row == 0:
status_button = False
else:
status_button = True
self.parent.master_table_right_click_buttons['activate_check_all']['status'] = status_button
self.parent.master_table_right_click_buttons['activate_uncheck_all']['status'] = status_button
self.parent.master_table_right_click_buttons['activate_inverse']['status'] = status_button
self.parent.master_table_right_click_buttons['cells_copy']['status'] = status_button
self.parent.master_table_right_click_buttons['cells_clear']['status'] = status_button
self.parent.master_table_right_click_buttons['rows_copy']['status'] = status_button
self.parent.master_table_right_click_buttons['rows_duplicate']['status'] = status_button
class TransferH3TableWidgetState(SelectionHandlerMaster):
def __init__(self, parent=None):
SelectionHandlerMaster.__init__(self, parent=parent)
def transfer_states(self, from_key=None, data_type='sample'):
selection = self.table_ui.selectedRanges()
o_selection = SelectionHandler(selection)
master_table_row_ui = self.parent.master_table_list_ui
# enable or disable all other selected rows (if only first column
# selected)
if (o_selection.nbr_column_selected() == 1):
range_row = o_selection.get_list_row()
column_selected = o_selection.first_column_selected()
o_utilities = Utilities(parent=self.parent)
from_row = o_utilities.get_row_index_from_row_key(row_key=from_key)
# activate row widget (first column)
if (column_selected == 0):
#state = self.table_ui.cellWidget(from_row, 0).children()[1].checkState()
state = master_table_row_ui[from_key]['active'].checkState()
# apply state to all the widgets
for _row in range_row:
_to_key = o_utilities.get_row_key_from_row_index(row=_row)
ui = master_table_row_ui[_to_key]['active']
ui.blockSignals(True)
ui.setCheckState(state)
ui.blockSignals(False)
# sample or normalization, shape, abs. corr., mult. scat. corr or
# inelastic corr.
elif (column_selected in INDEX_OF_COLUMNS_WITH_COMBOBOX):
ui = self.table_ui.cellWidget(
from_row, column_selected).children()[1]
index = ui.currentIndex()
for _row in range_row:
if _row == from_row:
continue
ui = self.table_ui.cellWidget(
_row, column_selected).children()[1]
if index > -1:
# ui.blockSignals(True)
ui.setCurrentIndex(index)
# ui.blockSignals(False)
elif (column_selected in INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA):
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(
row=from_row)
chemical_formula = str(
master_table_row_ui[_from_key][data_type]['material']['text'].text())
for _row in range_row:
if _row == from_row:
continue
_to_key = o_utilities.get_row_key_from_row_index(row=_row)
master_table_row_ui[_to_key][data_type]['material']['text'].setText(
chemical_formula)
elif (column_selected in INDEX_OF_COLUMNS_WITH_MASS_DENSITY):
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(
row=from_row)
mass_density_info = master_table_row_ui[_from_key][data_type]['mass_density_infos']
mass_density_value = str(
master_table_row_ui[_from_key][data_type]['mass_density']['text'].text())
for _row in range_row:
if _row == from_row:
continue
_to_key = o_utilities.get_row_key_from_row_index(row=_row)
master_table_row_ui[_to_key][data_type]['mass_density_infos'] = mass_density_info
master_table_row_ui[_to_key][data_type]['mass_density']['text'].setText(
mass_density_value)
elif (column_selected in INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS):
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(
row=from_row)
radius = str(
master_table_row_ui[_from_key][data_type]['geometry']['radius']['value'].text())
radius2 = str(
master_table_row_ui[_from_key][data_type]['geometry']['radius2']['value'].text())
height = str(
master_table_row_ui[_from_key][data_type]['geometry']['height']['value'].text())
for _row in range_row:
if _row == from_row:
continue
_to_key = o_utilities.get_row_key_from_row_index(row=_row)
master_table_row_ui[_to_key][data_type]['geometry']['radius']['value'].setText(
radius)
master_table_row_ui[_to_key][data_type]['geometry']['radius2']['value'].setText(
radius2)
master_table_row_ui[_to_key][data_type]['geometry']['height']['value'].setText(
height)
class RowsHandler(SelectionHandlerMaster):
def __init__(self, parent=None):
SelectionHandlerMaster.__init__(self, parent=parent)
selection = self.table_ui.selectedRanges()
self.selection = selection
self.o_selection = SelectionHandler(selection)
def copy(self, row=None):
# select entire row
if row is None:
list_row = self.o_selection.get_list_row()
if len(list_row) > 1:
self.parent.ui.statusbar.setStyleSheet("color: red")
self.parent.ui.statusbar.showMessage(
"Please select only 1 row!", self.parent.statusbar_display_time)
return
row = list_row[0]
_table_ui = self.table_ui
nbr_col = _table_ui.columnCount()
_row_selection = QTableWidgetSelectionRange(row, 0, row, nbr_col - 1)
_table_ui.setRangeSelected(_row_selection, True)
self.parent.ui.statusbar.setStyleSheet("color: green")
self.parent.ui.statusbar.showMessage(
"Select another row to copy the current selected row!",
self.parent.statusbar_display_time)
self.parent.master_table_cells_copy['temp'] = []
self.parent.master_table_cells_copy['list_column'] = []
self.parent.master_table_cells_copy['row'] = row
#self.parent.master_table_right_click_buttons['rows_paste']['status'] = True
def paste(self, row=None):
if row is None:
list_to_row = self.o_selection.get_list_row()
nbr_col = self.table_ui.columnCount()
_row_selection = QTableWidgetSelectionRange(list_to_row[0],
0,
list_to_row[-1],
nbr_col - 1)
self.table_ui.setRangeSelected(_row_selection, True)
list_to_row = self.o_selection.get_list_row()
else:
list_to_row = [row]
from_row = self.parent.master_table_cells_copy['row']
nbr_col = self.table_ui.columnCount()
o_copy = CopyCells(parent=self.parent)
list_column_copy = np.arange(0, nbr_col)
for _row in list_to_row:
for _column in list_column_copy:
o_copy.copy_from_to(from_row=from_row,
from_col=_column,
to_row=_row)
def remove(self, row=None):
if row is None:
list_to_row = self.o_selection.get_list_row()
_first_row = list_to_row[0]
for _ in list_to_row:
self.remove(row=_first_row)
else:
# self.table_ui.blockSignals(True)
self.table_ui.setRangeSelected(self.selection[0], False)
self.table_ui.removeRow(row)
# self.table_ui.blockSignals(False)
self.check_right_click_buttons()
class CellsHandler(SelectionHandlerMaster):
def __init__(self, parent=None):
SelectionHandlerMaster.__init__(self, parent=parent)
selection = self.table_ui.selectedRanges()
self.o_selection = SelectionHandler(selection)
def clear(self):
list_row = self.o_selection.get_list_row()
list_column = self.o_selection.get_list_column()
for _row in list_row:
for _column in list_column:
if _column in INDEX_OF_COLUMNS_WITH_ITEMS:
self.table_ui.item(_row, _column).setText("")
elif _column in INDEX_OF_COLUMNS_WITH_COMBOBOX:
self.table_ui.cellWidget(_row, _column).children()[
1].setCurrentIndex(0)
elif _column in INDEX_OF_COLUMNS_WITH_CHECKBOX:
_disable_state = Qt.Unchecked
self.table_ui.cellWidget(_row, _column).children()[
1].setCheckState(_disable_state)
elif _column in INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS:
o_utilities = Utilities(parent=self.parent)
_key = o_utilities.get_row_key_from_row_index(row=_row)
# sample
if _column == INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS[0]:
data_type = 'sample'
else:
data_type = 'normalization'
geometry = self.parent.master_table_list_ui[_key][data_type]['geometry']
geometry['radius']['value'].setText("N/A")
geometry['radius2']['value'].setText("N/A")
geometry['height']['value'].setText("N/A")
elif _column in INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA:
o_utilities = Utilities(parent=self.parent)
_key = o_utilities.get_row_key_from_row_index(row=_row)
# sample
if _column == INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA[0]:
data_type = 'sample'
else:
data_type = 'normalization'
material = self.parent.master_table_list_ui[_key][data_type]['material']
material['text'].setText("")
elif _column in INDEX_OF_COLUMNS_WITH_MASS_DENSITY:
o_utilities = Utilities(parent=self.parent)
_key = o_utilities.get_row_key_from_row_index(row=_row)
# sample
if _column == INDEX_OF_COLUMNS_WITH_MASS_DENSITY[0]:
data_type = 'sample'
else:
data_type = 'normalization'
data_type_entry = self.parent.master_table_list_ui[_key][data_type]
mass_density = data_type_entry['mass_density']
mass_density['text'].setText("N/A")
mass_density_infos = data_type_entry['mass_density_infos']
mass_density_infos['number_density']['value'] = "N/A"
mass_density_infos['number_density']['selected'] = False
mass_density_infos['mass_density']['value'] = "N/A"
mass_density_infos['mass_density']['selected'] = True
mass_density_infos['mass']['value'] = "N/A"
mass_density_infos['mass']['selected'] = False
def copy(self):
''' only 1 row at the time is allowed in the copy'''
list_row = self.o_selection.get_list_row()
nbr_row = len(list_row)
if nbr_row > 1:
self.parent.ui.statusbar.setStyleSheet("color: red")
self.parent.ui.statusbar.showMessage(
"Selection of columns must be in the same row!",
self.parent.statusbar_display_time)
return
list_column = self.o_selection.get_list_column()
#nbr_column = len(list_column)
# row_column_items = [['' for x in np.arange(nbr_column)]
# for y in np.arange(nbr_row)]
# for _row in np.arange(nbr_row):
# for _column in np.arange(nbr_column):
# _item = self.table_ui.item(_row, _column)
# if _item:
# row_column_items[_row][_column] = _item.text()
#self.parent.master_table_cells_copy['temp'] = row_column_items
self.parent.master_table_cells_copy['list_column'] = list_column
self.parent.master_table_cells_copy['row'] = list_row[0]
self.parent.master_table_right_click_buttons['cells_paste']['status'] = True
def paste(self):
list_column_copy = self.parent.master_table_cells_copy['list_column']
row_copy = self.parent.master_table_cells_copy['row']
list_row_paste = self.o_selection.get_list_row()
list_column_paste = self.o_selection.get_list_column()
# nbr_row_paste = len(list_row_paste)
# nbr_column_paste = len(list_column_paste)
#
#row_columns_items_to_copy = self.parent.master_table_cells_copy['temp']
#[nbr_row_copy, nbr_column_copy] = np.shape(row_columns_items_to_copy)
# if we don't select the same amount of columns, we stop here (and inform
# user of issue in statusbar
if list_column_copy[0] != list_column_paste[0]:
self.parent.ui.statusbar.setStyleSheet("color: red")
self.parent.ui.statusbar.showMessage(
"Copy and Paste first column selected do not match!",
self.parent.statusbar_display_time)
return
# we only clicked once cell before using PASTE, so we can copy as the
# first column are the same
if len(list_column_paste) == 1:
o_copy = CopyCells(parent=self.parent)
for _row_paste in list_row_paste:
for _column in list_column_copy:
o_copy.copy_from_to(from_row=row_copy,
from_col=_column,
to_row=_row_paste)
else: # we clicked several columns before clicking PASTE
# in this case, the COPY and PASTE number of columns have to match
# perfectly
# not the same number of copy and paste columns selected
if len(list_column_copy) != len(list_column_paste):
self.parent.ui.statusbar.setStyleSheet("color: red")
self.parent.ui.statusbar.showMessage(
"Copy and Paste do not cover the same number of columns!",
self.parent.statusbar_display_time)
return
else:
# copy and paste columns are not the same
list_intersection = set(
list_column_copy).intersection(list_column_paste)
if len(list_intersection) != len(list_column_copy):
self.parent.ui.statusbar.setStyleSheet("color: red")
self.parent.ui.statusbar.showMessage(
"Copy and Paste do not cover the same columns!",
self.parent.statusbar_display_time)
return
else:
# we selected the same number of columns, the same ones and
# now we can copy countain
o_copy = CopyCells(parent=self.parent)
for _row_paste in list_row_paste:
for _column in list_column_copy:
o_copy.copy_from_to(from_row=row_copy,
from_col=_column,
to_row=_row_paste)
_row_paste += 1
class CopyCells:
def __init__(self, parent=None):
self.parent = parent
self.table_ui = self.parent.processing_ui.h3_table
def _copy_from_to_for_dict(self, from_row, to_row, data_type):
""" Utility function that copies a dictionary of values
from one row to another given the master table key as `data_type`
:param from_row: Row index we will be copying from
:type from_row: int
:param to_row: Row index we will be copying to
:type to_row: int
:param data_type: Key in master table for column to copy dict value
:type data_type: str
"""
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(row=from_row)
_to_key = o_utilities.get_row_key_from_row_index(row=to_row)
_dict = self.parent.master_table_list_ui[_from_key][data_type]
self.parent.master_table_list_ui[_to_key][data_type] = _dict
def _copy_from_to_for_density(self, from_info, to_info, density_type):
""" Utility function that copies the density using
a density-type key.
:param from_info: Mass density info dictionary to copy from
:type from_info: dict
:param to_info: Mass density info dictionary to copy to
:type to_info: dict
:param density_type: Density-type key to use for copy-paste
:type density_type: str from ['number_density', 'mass_density', 'mass']
"""
from_density = from_info[density_type]
to_density = to_info[density_type]
to_density['value'] = from_density['value']
to_density['selected'] = from_density['selected']
def copy_from_to(self, from_row=-1, from_col=-1, to_row=-1):
if from_col in INDEX_OF_COLUMNS_WITH_ITEMS:
_from_cell_value = self.table_ui.item(from_row, from_col).text()
self.table_ui.item(to_row, from_col).setText(_from_cell_value)
elif from_col in INDEX_OF_COLUMNS_WITH_COMBOBOX:
ui = self.table_ui.cellWidget(from_row, from_col).children()[1]
_from_index = ui.currentIndex()
self.table_ui.cellWidget(to_row, from_col).children()[
1].setCurrentIndex(_from_index)
elif from_col in INDEX_OF_COLUMNS_WITH_CHECKBOX:
ui = self.table_ui.cellWidget(from_row, from_col).children()[1]
_state = ui.checkState()
self.table_ui.cellWidget(to_row, from_col).children()[
1].setCheckState(_state)
elif from_col in INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS:
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(row=from_row)
_to_key = o_utilities.get_row_key_from_row_index(row=to_row)
if from_col == INDEX_OF_COLUMNS_WITH_GEOMETRY_INFOS[0]: # sample
data_type = 'sample'
else:
data_type = 'normalization'
from_geometry = self.parent.master_table_list_ui[_from_key][data_type]['geometry']
_radius = str(from_geometry['radius']['value'].text())
_radius2 = str(from_geometry['radius2']['value'].text())
_height = str(from_geometry['height']['value'].text())
to_geometry = self.parent.master_table_list_ui[_to_key][data_type]['geometry']
to_geometry['radius']['value'].setText(_radius)
to_geometry['radius2']['value'].setText(_radius2)
to_geometry['height']['value'].setText(_height)
elif from_col in INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA:
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(row=from_row)
_to_key = o_utilities.get_row_key_from_row_index(row=to_row)
if from_col == INDEX_OF_COLUMNS_WITH_CHEMICAL_FORMULA[0]: # sample
data_type = 'sample'
else:
data_type = 'normalization'
from_material = self.parent.master_table_list_ui[_from_key][data_type]['material']
to_material = self.parent.master_table_list_ui[_to_key][data_type]['material']
_chemical_formula = str(from_material['text'].text())
to_material['text'].setText(_chemical_formula)
elif from_col in INDEX_OF_COLUMNS_WITH_MASS_DENSITY:
o_utilities = Utilities(parent=self.parent)
_from_key = o_utilities.get_row_key_from_row_index(row=from_row)
_to_key = o_utilities.get_row_key_from_row_index(row=to_row)
if from_col == INDEX_OF_COLUMNS_WITH_MASS_DENSITY[0]: # sample
data_type = 'sample'
else:
data_type = 'normalization'
# Get the to and from dictionaries for either sample or
# normalization
from_data_type = self.parent.master_table_list_ui[_from_key][data_type]
to_data_type = self.parent.master_table_list_ui[_to_key][data_type]
# Convenience variables for the "from" variables
from_mass_density = from_data_type['mass_density']
from_info = from_data_type['mass_density_infos']
# Convenience variables for the "to" variables
to_mass_density = to_data_type['mass_density']
to_info = to_data_type['mass_density_infos']
# Copy-paste the "top-level" MassDensity (display in table)
_mass_density = str(from_mass_density['text'].text())
to_mass_density['text'].setText(_mass_density)
# Copy-paste the NumberDensity in Widget
self._copy_from_to_for_density(
from_info, to_info, 'number_density')
# Copy-paste the MassDensity in Widget
self._copy_from_to_for_density(from_info, to_info, 'mass_density')
# Copy-paste the Mass in Widget
self._copy_from_to_for_density(from_info, to_info, 'mass')
elif from_col in INDEX_OF_COLUMNS_WITH_ALIGN_AND_FOCUS_ARGS:
data_type = 'align_and_focus_args_infos'
self._copy_from_to_for_dict(from_row, to_row, data_type)
else:
self.parent.ui.statusbar.setStyleSheet("color: red")
msg_string = "Don't know how to copy/paste the cell from row #{} to row #{} at the column #{}"
msg = msg_string.format(from_row, to_row, from_col)
time = self.parent.statusbar_display_time * 2
self.parent.ui.statusbar.showMessage(msg, time)
class TableHandler(SelectionHandlerMaster):
list_of_columns_to_search_for = []
def __init__(self, parent=None):
SelectionHandlerMaster.__init__(self, parent=parent)
def search(self, text=""):
nbr_row = self.table_ui.rowCount()
if text == "":
# show everything
for _row in np.arange(nbr_row):
self.table_ui.setRowHidden(_row, False)
else:
# look in all the searchable columns, row by row
for _row in np.arange(nbr_row):
hide_it = True
for _col in INDEX_OF_COLUMNS_SEARCHABLE:
_text_cell = str(self.table_ui.item(
_row, _col).text()).lower()
if text.lower() in _text_cell:
hide_it = False
for _col in INDEX_OF_SPECIAL_COLUMNS_SEARCHABLE:
if (_col == 6) or (
_col == 17): # layout inside a layout for these cells
_text_widget = str(
self.table_ui.cellWidget(
_row, _col).children()[1].children()[1].text()).lower()
else:
_text_widget = str(self.table_ui.cellWidget(
_row, _col).children()[1].text()).lower()
if text.lower() in _text_widget:
hide_it = False
self.table_ui.setRowHidden(_row, hide_it)
def clear_search(self):
self.parent.processing_ui.name_search_3.setText("")
self.search("")
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/mantid/master_table/selection_handler.py",
"copies": "1",
"size": "28800",
"license": "mit",
"hash": 3035553151537052700,
"line_mean": 42.4389140271,
"line_max": 107,
"alpha_frac": 0.5599652778,
"autogenerated": false,
"ratio": 3.8699274388605214,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49298927166605216,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from ranger.api.commands import Command
def select_file_by_command(self, command):
import subprocess
import os.path
comm = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE)
stdout, _stderr = comm.communicate()
if comm.returncode != 0:
return
fzf_file = os.path.abspath(stdout.rstrip('\n'))
if os.path.isdir(fzf_file):
self.fm.cd(fzf_file)
else:
self.fm.select_file(fzf_file)
class fzf(Command):
"""
:fzf
Find a file using fzf.
With a prefix argument select only directories.
See: https://github.com/junegunn/fzf
"""
def execute(self):
if self.quantifier:
# match only directories
command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \
-o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m"
else:
# match files and directories
command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \
-o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m"
select_file_by_command(self, command)
class frece_dir(Command):
"""
:frece_dir
Find a folder using fzf and frece.
"""
def execute(self):
command = '''
DB_FILE="$HOME/.frece_dir.db"
item=$(frece print "$DB_FILE" | fzf)
[[ -z $item ]] && exit 1
frece increment "$DB_FILE" "$item"
echo "$item"
'''
select_file_by_command(self, command)
| {
"repo_name": "SicariusNoctis/dotfiles",
"path": "ranger/.config/ranger/commands.py",
"copies": "1",
"size": "1601",
"license": "mit",
"hash": -6121576963498280000,
"line_mean": 28.6481481481,
"line_max": 94,
"alpha_frac": 0.5683947533,
"autogenerated": false,
"ratio": 3.2807377049180326,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43491324582180324,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from setuptools import setup
import ast, io, re
from os.path import join, dirname, abspath
# determine __version__ from pw.py source (adapted from mitsuhiko)
VERSION_RE = re.compile(r'__version__\s+=\s+(.*)')
with io.open('pw/__init__.py', encoding='utf-8') as fp:
version_code = VERSION_RE.search(fp.read()).group(1)
version = str(ast.literal_eval(version_code))
# read long description and convert to RST
long_description = io.open(join(dirname(abspath(__file__)), 'README.md'),
encoding='utf-8').read()
try:
import pypandoc
long_description = pypandoc.convert(long_description, 'rst', format='md')
except ImportError:
pass
# package metadata
setup(
name='pw',
version=version,
description='Grep GPG-encrypted YAML password safe.',
author='Michael Walter',
author_email='michael.walter@gmail.com',
url='https://github.com/catch22/pw',
license='MIT',
packages=['pw'],
entry_points={
'console_scripts': ['pw = pw.__main__:pw']
},
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
],
long_description=long_description,
install_requires=['click>=5.1', 'colorama', 'pyperclip>=1.5.11', 'ushlex'],
tests_require=['pytest', 'PyYAML'])
| {
"repo_name": "mbr/pw",
"path": "setup.py",
"copies": "1",
"size": "1596",
"license": "mit",
"hash": -2633259036663957500,
"line_mean": 33.6956521739,
"line_max": 79,
"alpha_frac": 0.6303258145,
"autogenerated": false,
"ratio": 3.668965517241379,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.47992913317413793,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.simdnautil import dinuc_shuffle, util
from simdna.synthetic.substringgen import AbstractSubstringGenerator
from simdna.synthetic.quantitygen import FixedQuantityGenerator, AbstractQuantityGenerator
from collections import OrderedDict
import csv
class AbstractBackgroundGenerator(object):
"""Returns the sequence that :class:`.AbstractEmbeddable` objects
are to be embedded into.
"""
def generate_background(self):
self.generateBackground()
def generateBackground(self):
"""Returns a sequence that is the background.
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class RepeatedSubstringBackgroundGenerator(AbstractBackgroundGenerator):
"""Repeatedly call a substring generator and concatenate the result.
Can be used to generate variable-length sequences.
Arguments:
substringGenerator: instance of :class:`.AbstractSubstringGenerator`
repetitions: instance of :class:`.AbstractQuantityGenerator`.\
If pass an int, will create a\
:class:`.FixedQuantityGenerator` from the int. This will be called\
to determine the number of times to generate a substring from\
``self.substringGenerator``
Returns:
The concatenation of all the calls to ``self.substringGenerator``
"""
def __init__(self, substringGenerator, repetitions):
self.substringGenerator = substringGenerator
if isinstance(repetitions, int):
self.repetitions = FixedQuantityGenerator(repetitions)
else:
assert isinstance(repetitions, AbstractQuantityGenerator)
self.repetitions = repetitions
def generateBackground(self):
toReturn = []
for i in range(self.repetitions.generateQuantity()):
# first pos is substring, second pos is the name
toReturn.append(self.substringGenerator.generateSubstring()[0])
return "".join(toReturn)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "RepeatedSubstringBackgroundGenerator"),
("substringGenerator", self.substringGenerator.getJsonableObject()),
("repetitions", self.repetitions.getJsonableObject())])
class SampleFromDiscreteDistributionSubstringGenerator(AbstractSubstringGenerator):
"""Generate a substring by sampling from a distribution.
If the "substrings" are single characters (A/C/G/T), can be used
in conjunction with :class:`.RepeatedSubstringBackgroundGenerator` to
generate sequences with a certain GC content.
Arguments:
discreteDistribution: instance of ``util.DiscreteDistribution``
"""
def __init__(self, discreteDistribution):
self.discreteDistribution = discreteDistribution
def generateSubstring(self):
return self.discreteDistribution.sample()
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "SampleFromDiscreteDistributionSubstringGenerator"),
("discreteDistribution", self.discreteDistribution.valToFreq)])
class ZeroOrderBackgroundGenerator(RepeatedSubstringBackgroundGenerator):
"""Returns a sequence with a certain GC content.
Each base is sampled independently.
Arguments:
seqLength: int, length of the background
discreteDistribution: either an instance of\
``util.DiscreteDistribution` or a dict mapping values to frequency.\
defaults to ``util.DEFAULT_BASE_DISCRETE_DISTRIBUTION``
"""
def __init__(self, seqLength,
discreteDistribution=util.DEFAULT_BASE_DISCRETE_DISTRIBUTION):
if isinstance(discreteDistribution,dict):
discreteDistribution= util.DiscreteDistribution(
discreteDistribution)
super(ZeroOrderBackgroundGenerator, self).__init__(
SampleFromDiscreteDistributionSubstringGenerator(discreteDistribution),
seqLength)
class FirstOrderBackgroundGenerator(AbstractBackgroundGenerator):
"""Returns a sequence from a first order markov chain with defined
gc content
Each base is sampled independently.
Arguments:
seqLength: int, length of the background
priorFrequencies: ordered dictionary with freqs of starting base
dinucFrequencies: dictionary with the frequences of the dinucs
"""
def __init__(self,
seqLength,
priorFrequencies=util.DEFAULT_BACKGROUND_FREQ,
dinucFrequencies=util.DEFAULT_DINUC_FREQ):
self.seqLength = seqLength
self._priorFrequencies = priorFrequencies
self._dinucFrequencies = dinucFrequencies
assert self.seqLength > 0
#do some sanity checks on dinucFrequencies
assert abs(sum(dinucFrequencies.values())-1.0) < 10**-7,\
sum(dinucFrequencies.values())
assert all(len(key) == 2 for key in dinucFrequencies.keys())
#build a transition matrix and priors matrix
chars = set([key[0] for key in dinucFrequencies])
transitionMatrix = {}
for char in chars:
probOnSecondChar = OrderedDict()
totalProb = 0.0
for key in dinucFrequencies:
if key[0]==char:
probOnSecondChar[key[1]] = dinucFrequencies[key]
totalProb += probOnSecondChar[key[1]]
probOnSecondChar = util.DiscreteDistribution(
OrderedDict([(key,val/totalProb) for key,val
in probOnSecondChar.items()]))
transitionMatrix[char] = probOnSecondChar
self.transitionMatrix = transitionMatrix
self.priorFrequencies = util.DiscreteDistribution(priorFrequencies)
def generateBackground(self):
generatedCharacters = [self.priorFrequencies.sample()]
for i in range(self.seqLength-1):
generatedCharacters.append(
self.transitionMatrix[generatedCharacters[-1]].sample())
return "".join(generatedCharacters)
def getJsonableObject(self):
return OrderedDict([('class', 'FirstOrderBackgroundGenerator'),
('priorFrequencies', self._priorFrequencies),
('dinucFrequencies', self._dinucFrequencies)]
)
class ShuffledBackgroundGenerator(AbstractBackgroundGenerator):
"""Shuffles a given sequence
Arguments:
string: the string to shuffle
shuffler: instance of :class:`.AbstractShuffler`.
Returns:
The shuffled string
"""
def __init__(self, string, shuffler):
self.string = string
self.shuffler = shuffler
def generateBackground(self):
return self.shuffler.shuffle(self.string)
def getJsonableObject(self):
"""See superclass.
"""
raise NotImplementedError()
class AbstractShuffler(object):
"""Implements a method to shuffle a supplied sequence"""
def shuffle(self, string):
raise NotImplementedError()
def getJsonableObject(self):
return OrderedDict([('class', type(self).__name__)])
class DinucleotideShuffler(AbstractShuffler):
def shuffle(self, string):
return dinuc_shuffle.dinuc_shuffle(string)
class BackgroundArrayFromGenerator(AbstractBackgroundGenerator):
def __init__(self, backgroundGenerator, num_seqs=100):
"""Returns a sequence array from a generator
Each sequence is sampled independently.
These sequence arrays can be used to embed motifs in the
same position in a number of background sequences, useful
for setting up randomized-background experiments.
Arguments:
backgroundGenerator: AbstractBackgroundGenerator, to sample from
num_seqs: int, number of sequences to be in the returned array
"""
self.backgroundGenerator = backgroundGenerator
self.num_seqs = num_seqs
def generateBackground(self):
return ["".join(self.backgroundGenerator.generateBackground()) for _ in range(self.num_seqs)]
def getJsonableObject(self):
return OrderedDict([('class', 'BackgroundArrayFromGenerator'),
('backgroundGenerator', self.backgroundGenerator.getJsonableObject()),
('num_seqs', self.num_seqs)]
)
class BackgroundFromSimData(AbstractBackgroundGenerator):
def __init__(self, simdata="data/backgrounds.simdata"):
"""
Cyclically return a backgrounds from a simdata file; on
reaching the end of the file simply start at the beginning.
:param simdata: path to simdata file to load
"""
self.simdata = simdata
with open(self.simdata) as tsvfile:
reader = csv.reader(tsvfile, delimiter="\t")
next(reader)
self.seqs = [seq[1] for seq in reader]
self.idx = 0
def __len__(self):
return len(self.seqs)
def generateBackground(self):
seq = self.seqs[self.idx]
self.idx = (self.idx + 1) % len(self)
return seq
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "BackgroundFromSimData"),
('simdata', self.simdata)]
)
class BackgroundArrayFromSimData(AbstractBackgroundGenerator):
def __init__(self, simdata="data/backgrounds.simdata"):
"""
Returns a sequence array read from a SimData file
:param simdata: path to simdata file to load
"""
self.simdata = simdata
def generateBackground(self):
"""
This returns the full array of sequences read from the file..
:return: a list of sequences which can be manipulated in other SimDNA functions
"""
with open(self.simdata) as tsvfile:
reader = csv.reader(tsvfile, delimiter="\t")
next(reader)
seqs = [seq[1] for seq in reader]
return seqs
def getJsonableObject(self):
return OrderedDict([('class', 'BackgroundArrayFromSimData'),
('simdata', self.simdata)]
)
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/backgroundgen.py",
"copies": "1",
"size": "10653",
"license": "mit",
"hash": 7553237182249515000,
"line_mean": 34.2748344371,
"line_max": 101,
"alpha_frac": 0.6587815639,
"autogenerated": false,
"ratio": 4.66622864651774,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0018282762982620411,
"num_lines": 302
} |
from __future__ import absolute_import, division, print_function
from simdna.simdnautil import util
from collections import OrderedDict
import numpy as np
import re
import itertools
class DefaultNameMixin(object):
"""Basic functionality for classes that have a self.name attribute.
The self.name attribute is typically used to leave a trace in
an instance of :class:`.AdditionalInfo`
Arguments:
name: string
"""
def __init__(self, name):
if (name == None):
name = self.getDefaultName()
self.name = name
def get_default_name(self):
self.getDefaultName()
def getDefaultName(self):
return type(self).__name__
class GeneratedSequence(object):
"""An object representing a sequence that has been generated.
Arguments:
seqName: string representing the name/id of the sequence
seq: string representing the final generated sequence
embeddings: an array of :class:`.Embedding` objects.
additionalInfo: an instance of :class:`.AdditionalInfo`
"""
def __init__(self, seqName, seq, embeddings, additionalInfo):
self.seqName = seqName
self.seq = seq
self.embeddings = embeddings
self.additionalInfo = additionalInfo
class Embedding(object):
"""Represents something that has been embedded in a sequence.
Think of this as a combination of an embeddable + a start position.
Arguments:
what: object representing the thing that has been embedded.\
Should have`` __str__`` and ``__len__`` defined.\
Often is an instance of :class:`.AbstractEmbeddable`
startPos: int, the position relative to the start of the parent\
sequence at which seq has been embedded
"""
def __init__(self, what, startPos):
self.what = what
self.startPos = startPos
def __str__(self):
return "pos-" + str(self.startPos) + "_" + str(self.what)
@classmethod
def from_string(cls, theString, whatClass=None):
cls.fromString(cls, theString, whatClass=whatClass)
@classmethod
def fromString(cls, string, whatClass=None):
"""Recreate an :class:`.Embedding` object from a string.
Arguments:
string: assumed to have format:\
``description[-|_]startPos[-|_]whatString``, where
``whatString`` will be provided to ``whatClass``
whatClass: the class (usually a :class:`.AbstractEmbeddable`) that\
will be used to instantiate the what from the whatString
Returns:
The Embedding class called with
``what=whatClass.fromString(whatString)`` and
``startPos=int(startPos)``
"""
if (whatClass is None):
from simdna.synthetic.embeddables import StringEmbeddable
whatClass = StringEmbeddable
# was printed out as pos-[startPos]_[what], but the
# [what] may contain underscores, hence the maxsplit
# to avoid splitting on them.
p = re.compile(r"pos\-(\d+)_(.*)$")
m = p.search(string)
startPos = m.group(1)
whatString = m.group(2)
return cls(what=whatClass.fromString(whatString),
startPos=int(startPos))
def get_embeddings_from_string(string):
return getEmbeddingsFromString(string)
def getEmbeddingsFromString(string):
"""Get a series of :class:`.Embedding` objects from a string.
Splits the string on commas, and then passes the comma-separated vals
to :func:`.Embedding.fromString`
Arguments:
string: The string to turn into an array of Embedding objects
Returns:
an array of :class:`.Embedding` objects
"""
if len(string) == 0:
return []
else:
embeddingStrings = string.split(",")
return [Embedding.fromString(x) for x in embeddingStrings]
class AbstractSequenceSetGenerator(object):
"""A generator for a collection of generated sequences.
"""
def generate_sequences(self):
self.generateSequences()
def generateSequences(self):
"""The generator; implementation should have a yield.
Called as
``generatedSequences = sequenceSetGenerator.generateSequences()``
``generateSequences`` can then be iterated over.
Returns:
A generator of GeneratedSequence objects
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class ChainSequenceSetGenerators(AbstractSequenceSetGenerator):
"""Chains several generators together.
Arguments:
generators: instances of :class:`.AbstractSequenceSetGenerator`.
"""
def __init__(self, *generators):
self.generators = generators
def generateSequences(self):
"""A chain of generators
Returns:
A chain of generators
"""
for item in itertools.chain(*[generator.generateSequences()
for generator in self.generators]):
yield item
def getJsonableObject(self):
"""See superclass
"""
return OrderedDict([('generators',
[x.getJsonableObject() for x
in self.generators])])
class GenerateSequenceNTimes(AbstractSequenceSetGenerator):
"""Call a :class:`.AbstractSingleSequenceGenerator` N times.
Arguments:
singleSetGenerator: an instance of
:class:`.AbstractSequenceSetGenerator`
N: integer, the number of times to call singleSetGenerator
"""
def __init__(self, singleSetGenerator, N):
self.singleSetGenerator = singleSetGenerator
self.N = N
def generateSequences(self):
"""A generator that calls self.singleSetGenerator N times.
Returns:
a generator that will call self.singleSetGenerator N times.
"""
for i in range(self.N):
out = self.singleSetGenerator.generateSequence()
if isinstance(out, list):
for seq in out:
yield seq
else:
yield out
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("numSeq", self.N),
("singleSetGenerator", self.singleSetGenerator.getJsonableObject())])
class AbstractSingleSequenceGenerator(object):
"""Generate a single sequence.
Arguments:
namePrefix: the GeneratedSequence object has a field
for the object's name; this is the prefix associated
with that name. The suffix is the value of a counter that
is incremented every time
"""
def __init__(self, namePrefix=None):
self.namePrefix = namePrefix if namePrefix is not None else "synth"
def generateSequence(self):
"""Generate the sequence.
Returns:
An instance of :class:`.GeneratedSequence`
"""
raise NotImplementedError()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class EmbedInABackground(AbstractSingleSequenceGenerator):
"""Generate a background sequence and embed smaller sequences in it.
Takes a backgroundGenerator and a series of embedders. Will
generate the background and then call each of the embedders in
succession. Then returns the result.
Arguments:
backgroundGenerator: instance of
:class:`.AbstractBackgroundGenerator`
embedders: array of instances of :class:`.AbstractEmbedder`
namePrefix: see parent
"""
def __init__(self, backgroundGenerator, embedders, namePrefix=None):
super(EmbedInABackground, self).__init__(namePrefix)
self.backgroundGenerator = backgroundGenerator
self.embedders = embedders
self.sequenceCounter = 0
@staticmethod
def generateSequenceGivenBackgroundGeneratorAndEmbedders(
backgroundGenerator, embedders, sequenceName):
additionalInfo = AdditionalInfo()
backgroundString = backgroundGenerator.generateBackground()
backgroundStringArr = [list(x) for x in backgroundString] if isinstance(backgroundString,
list) else list(backgroundString)
# priorEmbeddedThings keeps track of what has already been embedded
len_bsa = len(backgroundStringArr[0]) if isinstance(backgroundStringArr[0], list) else len(
backgroundStringArr)
priorEmbeddedThings = PriorEmbeddedThings_numpyArrayBacked(len_bsa)
for embedder in embedders:
embedder.embed(backgroundStringArr,
priorEmbeddedThings, additionalInfo)
# deal w fact that can be an array or a string
if isinstance(backgroundStringArr[0], list):
gen_seq = [GeneratedSequence(sequenceName,
"".join(bs),
priorEmbeddedThings.getEmbeddings(),
additionalInfo)
for bs in backgroundStringArr]
else:
gen_seq = GeneratedSequence(sequenceName,
"".join(backgroundStringArr),
priorEmbeddedThings.getEmbeddings(),
additionalInfo)
return gen_seq
def generateSequence(self):
"""Produce the sequence.
Generates a background using self.backgroundGenerator,
splits it into an array, and passes it to each of
self.embedders in turn for embedding things.
Returns:
An instance of :class:`.GeneratedSequence`
"""
toReturn = EmbedInABackground. \
generateSequenceGivenBackgroundGeneratorAndEmbedders(
backgroundGenerator=self.backgroundGenerator,
embedders=self.embedders,
sequenceName=self.namePrefix + str(self.sequenceCounter))
self.sequenceCounter += 1 # len(toReturn) if isinstance(toReturn, list) else 1
return toReturn
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "EmbedInABackground"),
("namePrefix", self.namePrefix),
("backgroundGenerator",
self.backgroundGenerator.getJsonableObject()),
("embedders",
[x.getJsonableObject() for x in self.embedders])
])
class AdditionalInfo(object):
"""Used to keep track of which embedders/ops were
called and how many times.
An instance of AdditionalInfo is meant to be an attribute of
a :class:`.GeneratedSequence` object. It keeps track of things
like embedders, position generators, etc.
Has self.trace which is a dictionary from operatorName->int
and which records operations that were called in the
process of embedding things in the sequence. At the time
of writing, operatorName is typically just the name of the
embedder.
"""
def __init__(self):
self.trace = OrderedDict() # a trace of everything that was called.
self.additionalInfo = OrderedDict() # for more ad-hoc messages
def is_in_trace(self, operatorName):
self.isInTrace(operatorName)
def isInTrace(self, operatorName):
"""Return True if operatorName has been called on the sequence.
"""
return operatorName in self.trace
def update_trace(self, operatorName):
self.updateTrace(operatorName)
def updateTrace(self, operatorName):
"""Increment count for the number of times operatorName was called.
"""
if (operatorName not in self.trace):
self.trace[operatorName] = 0
self.trace[operatorName] += 1
def update_additional_info(self, operatorName, value):
self.updateAdditionalInfo(operatorName, value)
def updateAdditionalInfo(self, operatorName, value):
"""Can be used to store any additional information on operatorName.
"""
self.additionaInfo[operatorName] = value
class AbstractPriorEmbeddedThings(object):
"""Keeps track of what has already been embedded in a sequence.
"""
def can_embed(self, startPos, endPos):
return self.canEmbed(startPos, endPos)
def canEmbed(self, startPos, endPos):
"""Test whether startPos-endPos is available for embedding.
Arguments:
startPos: int, starting index
endPos: int, ending index+1 (same semantics as array-slicing)
Returns:
True if startPos:endPos is available for embedding
"""
raise NotImplementedError()
def add_embedding(self, startPos, what):
self.addEmbedding(startPos, what)
def addEmbedding(self, startPos, what):
"""Records the embedding of a :class:`AbstractEmbeddable`.
Embeds ``what`` from ``startPos`` to ``startPos+len(what)``.
Creates an :class:`Embedding` object.
Arguments:
startPos: int, the starting position at which to embed.
what: instance of :class:`AbstractEmbeddable`
"""
raise NotImplementedError()
def get_num_occupied_pos(self):
return self.getNumOccupiedPos()
def getNumOccupiedPos(self):
"""
Returns:
Number of posiitons that are filled with some kind of embedding
"""
raise NotImplementedError()
def get_total_pos(self):
return self.getTotalPos()
def getTotalPos(self):
"""
Returns:
Total number of positions (occupied and unoccupoed) available
to embed things in.
"""
raise NotImplementedError()
def get_embeddings(self):
return self.getEmbeddings()
def getEmbeddings(self):
"""
Returns:
A collection of Embedding objects
"""
raise NotImplementedError()
class PriorEmbeddedThings_numpyArrayBacked(AbstractPriorEmbeddedThings):
"""A numpy-array based implementation of
:class:`.AbstractPriorEmbeddedThings`.
Uses a numpy array where positions are set to 1 if they are occupied,
to determine which positions are occupied and which are not.
See superclass for more documentation.
Arguments:
seqLen: integer indicating length of the sequence you are embedding in
"""
def __init__(self, seqLen):
self.seqLen = seqLen
self.arr = np.zeros(seqLen)
self.embeddings = []
def canEmbed(self, startPos, endPos):
"""See superclass.
"""
return np.sum(self.arr[startPos:endPos]) == 0
def addEmbedding(self, startPos, what):
"""See superclass.
"""
self.arr[startPos:startPos + len(what)] = 1
self.embeddings.append(Embedding(what=what, startPos=startPos))
def getNumOccupiedPos(self):
"""See superclass.
"""
return np.sum(self.arr)
def getTotalPos(self):
"""See superclass.
"""
return len(self.arr)
def getEmbeddings(self):
"""See superclass.
"""
return self.embeddings
class LabelGenerator(object):
"""Generate labels for a generated sequence.
Arguments:
labelNames: an array of strings that are the names of the labels
labelsFromGeneratedSequenceFunction: function that accepts
an instance of :class:`.GeneratedSequence` and returns an array
of the labels (eg: an array of ones and zeros indicating if
the criteria for various labels are met)
"""
def __init__(self, labelNames, labelsFromGeneratedSequenceFunction):
self.labelNames = labelNames
self.labelsFromGeneratedSequenceFunction = \
labelsFromGeneratedSequenceFunction
def generate_labels(self, generatedSequence):
return self.generateLabels(generatedSequence)
def generateLabels(self, generatedSequence):
"""calls self.labelsFromGeneratedSequenceFunction.
Arguments:
generatedSequence: an instance of :class:`.GeneratedSequence`
"""
return self.labelsFromGeneratedSequenceFunction(
self, generatedSequence)
class IsInTraceLabelGenerator(LabelGenerator):
"""LabelGenerator where labels match which embedders are called.
A special kind of LabelGenerator where the names of the labels
are the names of embedders, and the label is 1 if a particular
embedder has been called on the sequence and 0 otherwise.
"""
def __init__(self, labelNames):
def labelsFromGeneratedSequenceFunction(self, generatedSequence):
return [(1 if generatedSequence.additionalInfo.isInTrace(x) else 0)
for x in self.labelNames]
super(IsInTraceLabelGenerator, self).__init__(
labelNames, labelsFromGeneratedSequenceFunction)
def print_sequences(outputFileName, sequenceSetGenerator,
includeEmbeddings=False, labelGenerator=None,
includeFasta=False, prefix=None):
printSequences(outputFileName, sequenceSetGenerator,
includeEmbeddings=includeEmbeddings, labelGenerator=labelGenerator,
includeFasta=includeFasta, prefix=prefix)
def printSequences(outputFileName, sequenceSetGenerator,
includeEmbeddings=False, labelGenerator=None,
includeFasta=False, prefix=None):
"""Print a series of synthetic sequences.
Given an output filename, and an instance of
:class:`.AbstractSequenceSetGenerator`, will call the
sequenceSetGenerator and print the generated sequences
to the output file. Will also create a file "info_outputFileName.txt"
in the same directory as outputFileName that contains
all the information about sequenceSetGenerator.
Arguments:
outputFileName: string
sequenceSetGenerator: instance of
:class:`.AbstractSequenceSetGenerator`
includeEmbeddings: a boolean indicating whether to print a
column that lists the embeddings
labelGenerator: optional instance of :class:`.LabelGenerator`
includeFasta: optional boolean indicating whether to also
print out the generated sequences in fasta format
(the file will be produced with a .fa extension)
prefix: string - this will be prefixed in front of the generated
sequence ids, followed by a hyphen
"""
ofh = util.get_file_handle(outputFileName, 'w')
if (includeFasta):
fastaOfh = util.get_file_handle(util.get_file_name_parts(
outputFileName).get_transformed_file_path(
lambda x: x, extension=".fa"), 'w')
ofh.write("seqName\tsequence"
+ ("\tembeddings" if includeEmbeddings else "")
+ ("\t" +
"\t".join(labelGenerator.labelNames)
if labelGenerator is not None else "") + "\n")
generatedSequences = sequenceSetGenerator.generateSequences() # returns a generator
for generatedSequence in generatedSequences:
ofh.write((prefix + "-" if prefix is not None else "")
+ generatedSequence.seqName + "\t" + generatedSequence.seq
+ ("\t" + ",".join(str(x)
for x in generatedSequence.embeddings)
if includeEmbeddings else "")
+ ("\t" + "\t".join(str(x) for x in labelGenerator.generateLabels(
generatedSequence)) if labelGenerator is not None else "")
+ "\n")
if includeFasta:
fastaOfh.write(">" + (prefix + "-" if prefix is not None else "")
+ generatedSequence.seqName + "\n")
fastaOfh.write(generatedSequence.seq + "\n")
ofh.close()
if (includeFasta):
fastaOfh.close()
infoFilePath = (util.get_file_name_parts(outputFileName)
.get_transformed_file_path(
lambda x: x + "_info", extension=".txt"))
ofh = util.get_file_handle(infoFilePath, 'w')
ofh.write(util.format_as_json(sequenceSetGenerator.getJsonableObject()))
ofh.close()
def read_simdata_file(simdata_file, ids_to_load=None):
"""
Read a simdata file and extract all simdata info as an enum
:param simdata_file: str, path to file
:param ids_to_load: list of ints, ids of sequences to load
:return: enum of sequences, the embedded elements in each
sequence, and any labels for those sequences
"""
ids = []
sequences = []
embeddings = []
labels = []
if (ids_to_load is not None):
ids_to_load = set(ids_to_load)
def action(inp, line_number):
if (line_number > 1):
if (ids_to_load is None or (inp[0] in ids_to_load)):
ids.append(inp[0])
sequences.append(inp[1])
embeddings.append(getEmbeddingsFromString(inp[2]))
labels.append([int(x) for x in inp[3:]])
util.perform_action_on_each_line_of_file(
file_handle=util.get_file_handle(simdata_file),
action=action,
transformation=util.default_tab_seppd)
return util.enum(
ids=ids,
sequences=sequences,
embeddings=embeddings,
labels=np.array(labels))
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/core.py",
"copies": "1",
"size": "22058",
"license": "mit",
"hash": -7445139196125766000,
"line_mean": 33.1984496124,
"line_max": 99,
"alpha_frac": 0.6324236105,
"autogenerated": false,
"ratio": 4.632087358252835,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5764510968752835,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna import random
from collections import OrderedDict
import math
class AbstractPositionGenerator(DefaultNameMixin):
"""Generate a start position at which to embed something
Given the length of the background sequence and the length
of the substring you are trying to embed, will return a start position
to embed the substring at.
"""
def generate_pos(self, lenBackground, lenSubstring, additionalInfo=None):
self.generatePos(lenBackground, lenSubstring, additionalInfo=additionalInfo)
def generatePos(self, lenBackground, lenSubstring, additionalInfo=None):
"""Generate the position to embed in.
Arguments:
lenBackground: int, length of background sequence
lenSubstring: int, lenght of substring to embed
additionalInfo: optional, instance of :class:`.AdditionalInfo`. Is
used to leave a trace that this positionGenerator was called
Returns:
An integer which is the start index to embed in.
"""
if (additionalInfo is not None):
additionalInfo.updateTrace(self.name)
return self._generatePos(lenBackground, lenSubstring, additionalInfo)
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
"""Generate the position to embed in - this method should be
overriden by the subclass. See
:func:`.AbstractPositionGenerator.generatePos` for documentaiton
on the arguments.
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class NormalDistributionPositionGenerator(AbstractPositionGenerator):
"""Generate position according to normal distribution with mean at
offsetFromCenter
"""
def __init__(self, stdInBp, offsetFromCenter=0, name=None):
super(NormalDistributionPositionGenerator, self).__init__(name)
self.stdInBp = stdInBp
self.offsetFromCenter = offsetFromCenter
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
from scipy.stats import norm
center = (lenBackground-lenSubstring)/2.0
validPos = False
totalTries = 0
while (validPos == False):
sampledPos = int(norm.rvs(loc=center+self.offsetFromCenter,
scale=self.stdInBp))
totalTries += 1
if (sampledPos > 0 and sampledPos < (lenBackground-lenSubstring)):
validPos = True
if (totalTries%10 == 0 and totalTries > 0):
print("Warning: made "+str(totalTries)+" attempts at sampling"
+" a position with lenBackground "+str(lenBackground)
+" and center "+str(center)+" and offset "
+str(self.offsetFromCenter))
return sampledPos
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
return OrderedDict([("class", "NormalDistributionPositionGenerator"),
("stdInBp", self.stdInBp),
("offsetFromCenter", self.offsetFromCenter)])
class UniformPositionGenerator(AbstractPositionGenerator):
"""Sample position uniformly at random.
Samples a start position to embed the substring in uniformly at random;
does not return positions that are too close to the end of the
background sequence to embed the full substring.
Arguments:
name: string, see :class:`.DefaultNameMixin`
"""
def __init__(self, name=None):
super(UniformPositionGenerator, self).__init__(name)
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
return sampleIndexWithinRegionOfLength(lenBackground, lenSubstring)
def getJsonableObject(self):
"""See superclass.
"""
return "uniform"
class FixedPositionGenerator(AbstractPositionGenerator):
"""Return a constant position value.
Takes a position to return; returns that position on calling _generatePos.
Will error if position is outside the sequence bounds.
Arguments:
name: string, see :class:`.DefaultNameMixin`
"""
def __init__(self, pos, name=None):
super(FixedPositionGenerator, self).__init__(name)
self.pos = abs(pos)
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
assert (self.pos < lenBackground - lenSubstring)
return self.pos
def getJsonableObject(self):
"""See superclass.
"""
return "fixed" + str(self.pos)
#instantiate a UniformPositionGenerator for general use
uniformPositionGenerator = UniformPositionGenerator()
class InsideCentralBp(AbstractPositionGenerator):
"""For embedding within only the central region of a background.
Returns a position within the central region of a background
sequence, sampled uniformly at random
Arguments:
centralBp: int, the number of bp, centered in the
middle of the background, from which to sample the position.
Is NOT +/- centralBp around the middle
(is +/- centralBp/2 around the middle). If the background
sequence is even and centralBp is odd, the shorter region
will go on the left.
name: string - see :class:`.DefaultNameMixin`
"""
def __init__(self, centralBp, name=None):
"""
"""
self.centralBp = centralBp
super(InsideCentralBp, self).__init__(name)
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
if (lenBackground < self.centralBp):
raise RuntimeError("The background length should be atleast as long as self.centralBp; is " +
str(lenBackground) + " and " + str(self.centralBp) + " respectively")
startIndexForRegionToEmbedIn = int(
lenBackground / 2) - int(self.centralBp / 2)
indexToSample = startIndexForRegionToEmbedIn + \
sampleIndexWithinRegionOfLength(self.centralBp, lenSubstring)
return int(indexToSample)
def getJsonableObject(self):
"""See superclass.
"""
return "insideCentral-" + str(self.centralBp)
class OutsideCentralBp(AbstractPositionGenerator):
"""For embedding only OUTSIDE a central region of a background seq.
Returns a position OUTSIDE the central region of a background sequence,
sampled uniformly at random. Complement of InsideCentralBp.
Arguments:
centralBp: int, the centralBp to avoid embedding in. See the docs
for :class:`.InsideCentralBp` for more details (this is the
complement).
"""
def __init__(self, centralBp, name=None):
self.centralBp = centralBp
super(OutsideCentralBp, self).__init__(name)
def _generatePos(self, lenBackground, lenSubstring, additionalInfo):
# choose whether to embed in the left or the right
if random.random() > 0.5:
left = True
else:
left = False
# embeddableLength is the length of the region we are considering
# embedding in
embeddableLength = 0.5 * (lenBackground - self.centralBp)
# if lenBackground-self.centralBp is odd, the longer region
# goes on the left (inverse of the shorter embeddable region going on the left in
# the centralBpToEmbedIn case
if (left):
embeddableLength = math.ceil(embeddableLength)
startIndexForRegionToEmbedIn = 0
else:
embeddableLength = math.floor(embeddableLength)
startIndexForRegionToEmbedIn = math.ceil(
(lenBackground - self.centralBp) / 2) + self.centralBp
indexToSample = startIndexForRegionToEmbedIn + \
sampleIndexWithinRegionOfLength(embeddableLength, lenSubstring)
return int(indexToSample)
def getJsonableObject(self):
"""See superclass.
"""
return "outsideCentral-" + str(self.centralBp)
def sampleIndexWithinRegionOfLength(length, lengthOfThingToEmbed):
"""Uniformly at random samples integers from 0 to
``length``-``lengthOfThingToEmbedIn``.
Arguments:
length: length of full region that could be embedded in
lengthOfThingToEmbed: length of thing being embedded in larger region
"""
assert lengthOfThingToEmbed <= length
indexToSample = int(
random.random() * ((length - lengthOfThingToEmbed) + 1))
return indexToSample
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/positiongen.py",
"copies": "1",
"size": "9205",
"license": "mit",
"hash": 6166709725128877000,
"line_mean": 36.5714285714,
"line_max": 105,
"alpha_frac": 0.6598587724,
"autogenerated": false,
"ratio": 4.49243533430942,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014600540035668848,
"num_lines": 245
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna import random
import numpy as np
from collections import OrderedDict
class AbstractQuantityGenerator(DefaultNameMixin):
"""Class for sampling values from a distribution.
"""
def generate_quantity(self):
self.generateQuantity()
def generateQuantity(self):
"""Sample a quantity from a distribution.
Returns:
The sampled value.
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class ChooseValueFromASet(AbstractQuantityGenerator):
"""Randomly samples a particular value from a set of values.
Arguments:
setOfPossibleValues: array of values that will be randomly sampled
from.
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, setOfPossibleValues, name=None):
self.setOfPossibleValues = setOfPossibleValues
super(ChooseValueFromASet, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
return self.setOfPossibleValues[int(random.random() * (len(self.setOfPossibleValues)))]
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "ChooseValueFromASet"),
("possibleValues", self.setOfPossibleValues)])
class UniformIntegerGenerator(AbstractQuantityGenerator):
"""Randomly samples an integer from minVal to maxVal, inclusive.
Arguments:
minVal: minimum integer that can be sampled
maxVal: maximum integers that can be sampled
name: See superclass.
"""
def __init__(self, minVal, maxVal, name=None):
self.minVal = minVal
self.maxVal = maxVal
super(UniformIntegerGenerator, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
# the 1+ makes the max val inclusive
return self.minVal + int(random.random() * (1 + self.maxVal - self.minVal))
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "UniformIntegerGenerator"), ("minVal", self.minVal), ("maxVal", self.maxVal)])
class FixedQuantityGenerator(AbstractQuantityGenerator):
"""Returns a fixed number every time generateQuantity is called.
Arguments:
quantity: the value to return when generateQuantity is called.
"""
def __init__(self, quantity, name=None):
self.quantity = quantity
super(FixedQuantityGenerator, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
return self.quantity
def getJsonableObject(self):
"""See superclass.
"""
return "fixedQuantity-" + str(self.quantity)
class PoissonQuantityGenerator(AbstractQuantityGenerator):
"""Generates values according to a poisson distribution.
Arguments:
mean: the mean of the poisson distribution
"""
def __init__(self, mean, name=None):
self.mean = mean
super(PoissonQuantityGenerator, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
return random.poisson(self.mean)
def getJsonableObject(self):
"""See superclass.
"""
return "poisson-" + str(self.mean)
class BernoulliQuantityGenerator(AbstractQuantityGenerator):
"""Generates 1 or 0 according to a bernoulli distribution.
Arguments:
prob: probability of 1
"""
def __init__(self, prob, name=None):
self.prob = prob
super(BernoulliQuantityGenerator, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
return 1 if (random.random() <= self.prob) else 0
def getJsonableObject(self):
"""See superclass.
"""
return "bernoulli-" + str(self.prob)
class MinMaxWrapper(AbstractQuantityGenerator):
"""Compress a distribution to lie within a min and a max.
Wrapper that restricts a distribution to only return values between
the min and the max. If a value outside the range is returned,
resamples until it obtains a value within the range.
Warns every time it tries to resample 10 times without successfully
finding a value in the correct range.
Arguments:
quantityGenerator: instance of :class:`.AbstractQuantityGenerator`.
Used to draw samples from the distribution to truncate
theMin: can be None; if so will be ignored.
theMax: can be None; if so will be ignored.
"""
def __init__(self, quantityGenerator, theMin=None, theMax=None, name=None):
self.quantityGenerator = quantityGenerator
self.theMin = theMin
self.theMax = theMax
assert self.quantityGenerator is not None
super(MinMaxWrapper, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
tries = 0
while (True):
tries += 1
quantity = self.quantityGenerator.generateQuantity()
if (self.theMin is None or quantity >= self.theMin) and (self.theMax is None or quantity <= self.theMax):
return quantity
if tries % 10 == 0:
print("warning: made " + str(tries) +
" tries at trying to sample from distribution with min/max limits")
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("min", self.theMin),
("max", self.theMax),
("quantityGenerator", self.quantityGenerator.getJsonableObject())])
class ZeroInflater(AbstractQuantityGenerator):
"""Inflate a particular distribution with zeros.
Wrapper that inflates the number of zeros returned.
Flips a coin; if positive, will return zero - otherwise will
sample from the wrapped distribution (which may still return 0)
Arguments:
quantityGenerator: an instance of :class:`.AbstractQuantityGenerator`;\
represents the distribution to sample from with probability ``1-zeroProb``
zeroProb: the probability of just returning 0 without sampling\
from ``quantityGenerator``
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, quantityGenerator, zeroProb, name=None):
self.quantityGenerator = quantityGenerator
self.zeroProb = zeroProb
super(ZeroInflater, self).__init__(name)
def generateQuantity(self):
"""See superclass.
"""
val = random.random()
if (val < self.zeroProb):
return 0
else:
return self.quantityGenerator.generateQuantity()
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "ZeroInflater"),
("zeroProb", self.zeroProb),
("quantityGenerator", self.quantityGenerator.getJsonableObject())])
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/quantitygen.py",
"copies": "1",
"size": "7460",
"license": "mit",
"hash": 4614350987594060000,
"line_mean": 30.0833333333,
"line_max": 117,
"alpha_frac": 0.6344504021,
"autogenerated": false,
"ratio": 4.6625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.57969504021,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna.simdnautil import util, pwm
from collections import OrderedDict
import numpy as np
class AbstractLoadedMotifs(object):
"""Class representing loaded PWMs.
A class that contains instances of ``pwm.PWM`` loaded from a file.
The pwms can be accessed by name.
Arguments:
loadedMotifs: dictionary mapping names of motifs
to instances of ``pwm.PWM``
"""
def __init__(self, loadedMotifs):
self.loadedMotifs = loadedMotifs
def get_pwm(selfself, name):
self.getPwm(name)
def getPwm(self, name):
"""Get a specific PWM.
Returns:
The ``pwm.PWM`` instance with the specified name.
"""
return self.loadedMotifs[name]
def add_motifs(self, abstractLoadedMotifs):
self.addMotifs(abstractLoadedMotifs)
def addMotifs(self, abstractLoadedMotifs):
"""Adds the motifs in abstractLoadedMotifs to this.
Arguments:
abstractLoadedMotifs: instance of :class:`.AbstractLoadedMotifs`
Returns:
self, as a convenience
"""
self.loadedMotifs.update(abstractLoadedMotifs.loadedMotifs)
return self #convenience return
class AbstractLoadedMotifsFromFile(AbstractLoadedMotifs):
"""Class representing loaded PWMs.
A class that contains instances of ``pwm.PWM`` loaded from a file.
The pwms can be accessed by name.
Arguments:
fileName: string, the path to the file to load
pseudocountProb: if some of the pwms have 0 probability for\
some of the positions, will add the specified ``pseudocountProb``\
to the rows of the pwm and renormalise.
"""
def __init__(self, fileName,
pseudocountProb=0.0):
self.fileName = fileName
fileHandle = util.get_file_handle(fileName)
self.pseudocountProb = pseudocountProb
self.loadedMotifs = OrderedDict()
action = self.getReadPwmAction(self.loadedMotifs)
util.perform_action_on_each_line_of_file(
file_handle=fileHandle,
action=action,
transformation=util.trim_newline
)
for pwm in self.loadedMotifs.values():
pwm.finalise(pseudocountProb=self.pseudocountProb)
super(AbstractLoadedMotifsFromFile, self).__init__(self.loadedMotifs)
def getReadPwmAction(self, loadedMotifs):
"""Action performed when each line of the pwm text file is read in.
This function is to be overridden by a specific implementation.
It is executed on each line of the file when it is read in, and
when PWMs are ready they will get inserted into ``loadedMotifs``.
Arguments:
loadedMotifs: an ``OrderedDict`` that will be filled with PWMs.
The keys will be the names of the PWMs and the
values will be instances of ``pwm.PWM``
"""
raise NotImplementedError()
class LoadedEncodeMotifs(AbstractLoadedMotifsFromFile):
"""A class for reading in a motifs file in the ENCODE motifs format.
This class is specifically for reading files in the encode motif
format - specifically the motifs.txt file that contains Pouya's motifs
(http://compbio.mit.edu/encode-motifs/motifs.txt)
Basically, the motif declarations start with a >, the first
characters after > until the first space are taken as the motif name,
the lines after the line with a > have the format:
"<ignored character> <prob of A> <prob of C> <prob of G> <prob of T>"
"""
def getReadPwmAction(self, loadedMotifs):
"""See superclass.
"""
currentPwm = util.VariableWrapper(None)
def action(inp, lineNumber):
if (inp.startswith(">")):
inp = inp.lstrip(">")
inpArr = inp.split()
motifName = inpArr[0]
currentPwm.var = pwm.PWM(motifName)
loadedMotifs[currentPwm.var.name] = currentPwm.var
else:
# assume that it's a line of the pwm
assert currentPwm.var is not None
inpArr = inp.split()
summaryLetter = inpArr[0]
currentPwm.var.addRow([float(x) for x in inpArr[1:]])
return action
class LoadedHomerMotifs(AbstractLoadedMotifsFromFile):
"""A class for reading in a motifs file in the Homer motifs format.
Eg: HOCOMOCOv10_HUMAN_mono_homer_format_0.001.motif in resources
"""
def getReadPwmAction(self, loadedMotifs):
"""See superclass.
"""
currentPwm = util.VariableWrapper(None)
def action(inp, lineNumber):
if (inp.startswith(">")):
inp = inp.lstrip(">")
inpArr = inp.split()
motifName = inpArr[1]
currentPwm.var = pwm.PWM(motifName)
loadedMotifs[currentPwm.var.name] = currentPwm.var
else:
# assume that it's a line of the pwm
assert currentPwm.var is not None
inpArr = inp.split()
currentPwm.var.addRow([float(x) for x in inpArr[0:]])
return action
class LoadedJasparRawPFMMotifs(AbstractLoadedMotifsFromFile):
"""A class for reading in a motifs file in the Jaspar Raw motifs format.
This class is specifically for reading files in the Jaspar motif
format.
Basically, the motif declarations start with a >, the first
characters after > until the first space are taken as the motif name,
the lines after the line with a > have the format:
P(A) "<nuc 1> <nuc 2> ... <nuc n>"
P(C) "<nuc 1> <nuc 2> ... <nuc n>"
P(T) "<nuc 1> <nuc 2> ... <nuc n>"
P(G) "<nuc 1> <nuc 2> ... <nuc n>"
Likely if you need to load your own motifs you will use this import
script.
"""
def getReadPwmAction(self, loadedMotifs):
"""See superclass.
"""
currentPwm = util.VariableWrapper(None)
_pwm = []
def action(inp, lineNumber):
if (inp.startswith(">")):
inp = inp.lstrip(">")
inpArr = inp.split()
motifName = inpArr[1]
while len(_pwm) > 0:
_pwm.pop()
currentPwm.var = pwm.PWM(motifName)
loadedMotifs[currentPwm.var.name] = currentPwm.var
else:
# assume that it's a line of the pwm
assert currentPwm.var is not None
inpArr = inp.split()
_pwm.append([float(x) for x in inpArr])
if len(_pwm) == 4:
arr = np.array(_pwm).T
arr /= arr.sum(axis=1, keepdims=True)
currentPwm.var.addRows(arr.tolist())
return action
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/loadedmotifs.py",
"copies": "1",
"size": "6919",
"license": "mit",
"hash": -9013671443212253000,
"line_mean": 34.8497409326,
"line_max": 77,
"alpha_frac": 0.6077467842,
"autogenerated": false,
"ratio": 3.876190476190476,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49839372603904764,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna.simdnautil import util, pwm
from simdna import random
from collections import OrderedDict
import numpy as np
import sys
class AbstractSubstringGenerator(DefaultNameMixin):
"""
Generates a substring, usually for embedding in a background sequence.
"""
def generate_substring(self):
self.generateSubstring()
def generateSubstring(self):
"""
Return:
A tuple of ``(string, stringDescription)``; the result can be
wrapped in an instance of :class:`.StringEmbeddable`.
``stringDescription`` is a short descriptor that does not contain
spaces and may be prefixed in front of string when generating
the __str__ representation for :class:`.StringEmbeddable`.
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class FixedSubstringGenerator(AbstractSubstringGenerator):
"""Generates the same string every time.
When generateSubstring() is called, always returns the same string.
The string also serves as its own description
Arguments:
fixedSubstring: the string to be generated
name: see :class:`.DefaultNameMixin`
"""
def __init__(self, fixedSubstring, name=None):
self.fixedSubstring = fixedSubstring
super(FixedSubstringGenerator, self).__init__(name)
def generateSubstring(self):
"""See superclass.
"""
return self.fixedSubstring, self.fixedSubstring
def getJsonableObject(self):
"""See superclass.
"""
return "fixedSubstring-" + self.fixedSubstring
class ReverseComplementWrapper(AbstractSubstringGenerator):
"""Reverse complements a string with a specified probability.
Wrapper around an instance of
:class:`.AbstractSubstringGenerator` that reverse complements the
generated string with a specified probability.
Arguments:
substringGenerator: instance of `.AbstractSubstringGenerator`
reverseComplementProb: probability of reverse complementation.
Defaults to 0.5.
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, substringGenerator, reverseComplementProb=0.5, name=None):
self.reverseComplementProb = reverseComplementProb
self.substringGenerator = substringGenerator
super(ReverseComplementWrapper, self).__init__(name)
def generateSubstring(self):
seq, seqDescription = self.substringGenerator.generateSubstring()
if (random.random() < self.reverseComplementProb):
seq = util.reverseComplement(seq)
seqDescription = "revComp-" + seqDescription
return seq, seqDescription
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "ReverseComplementWrapper"),
("reverseComplementProb", self.reverseComplementProb),
("substringGenerator", self.substringGenerator.getJsonableObject())])
class PwmSampler(AbstractSubstringGenerator):
"""Samples from a pwm by calling ``self.pwm.sampleFromPwm``
Arguments:
pwm: an instance of ``pwm.PWM``
name: see :class:`.DefaultNameMixin`
"""
def __init__(self, pwm, name=None, bg=None, minScore=None):
self.pwm = pwm
assert ((bg is None and minScore is None)
or (bg is not None and minScore is not None)),\
"bg should be specified iff minScore is specified"
self.bg = bg
self.minScore = minScore
super(PwmSampler, self).__init__(name)
def generateSubstring(self):
"""See superclass.
"""
if (self.minScore is not None):
tries = 0
sampled_pwm_score = -np.inf
while sampled_pwm_score <= self.minScore:
sampled_pwm, sampled_pwm_score =\
self.pwm.sampleFromPwmAndScore(bg=self.bg)
sys.stdout.flush()
tries += 1
if tries % 10 == 0:
print("Warning: spent " + str(tries) + " tries trying to " +
" sample a pwm " + str(self.pwm.name) +
" with min score " + str(self.minScore))
sys.stdout.flush()
if tries >= 50:
raise RuntimeError("Terminated loop due to too many tries")
return sampled_pwm, (self.pwm.name+"-score_"
+str(round(sampled_pwm_score,2)))
else:
return self.pwm.sampleFromPwm(), self.pwm.name
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "PwmSampler"),
("motifName", self.pwm.name),
("bg", self.bg),
("minScore", self.minScore)])
class PwmSamplerFromLoadedMotifs(PwmSampler):
"""Instantiates a :class:`.PwmSampler` from an
:class:`.AbstractLoadedMotifs` object.
Convenience wrapper class for instantiating :class:`.PwmSampler`
by pulling the pwm.PWM object using the provided name
from an :class:`.AbstractLoadedMotifs` object
Arguments:
loadedMotifs: instance of :class:`.AbstractLoadedMotifs`
motifName: string, name of a motif in :class:`.AbstractLoadedMotifs`
name: see :class:`.DefaultNameMixin`
"""
def __init__(self, loadedMotifs, motifName, name=None,
bg=None, minScore=None):
self.loadedMotifs = loadedMotifs
super(PwmSamplerFromLoadedMotifs, self).__init__(
loadedMotifs.getPwm(motifName), name, bg=bg, minScore=minScore)
def getJsonableObject(self):
"""See superclass.
"""
obj = super(PwmSamplerFromLoadedMotifs, self).getJsonableObject()
return obj
class BestHitPwm(AbstractSubstringGenerator):
"""Always return the best possible match to a ``pwm.PWM`` when called.
Arguments:
pwm: an instance of ``pwm.PWM``
name: see :class:`.DefaultNameMixin`
"""
def __init__(self, pwm, name=None):
self.pwm = pwm
super(BestHitPwm, self).__init__(name)
def generateSubstring(self):
"""See superclass.
"""
return self.pwm.getBestHit(), self.pwm.name
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "BestHitPwm"), ("pwm", self.pwm.name)])
class BestHitPwmFromLoadedMotifs(BestHitPwm):
"""Instantiates :class:`BestHitPwm` using a :class:`.LoadedMotifs` file.
Analogous to :class:`.PwmSamplerFromLoadedMotifs`.
"""
def __init__(self, loadedMotifs, motifName, name=None):
self.loadedMotifs = loadedMotifs
super(BestHitPwmFromLoadedMotifs, self).__init__(
loadedMotifs.getPwm(motifName), name)
def getJsonableObject(self):
"""See superclass.
"""
obj = super(BestHitPwmFromLoadedMotifs, self).getJsonableObject()
return obj
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/substringgen.py",
"copies": "1",
"size": "7492",
"license": "mit",
"hash": -3446624707638457300,
"line_mean": 32.2977777778,
"line_max": 83,
"alpha_frac": 0.6280032034,
"autogenerated": false,
"ratio": 4.232768361581921,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5360771564981921,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna.synthetic.embeddables import StringEmbeddable
from simdna.synthetic.substringgen import AbstractSubstringGenerator
from simdna.synthetic.embeddables import PairEmbeddable
from collections import OrderedDict
class AbstractEmbeddableGenerator(DefaultNameMixin):
"""Generates an embeddable, usually for embedding in a background sequence.
"""
def generate_embeddable(self):
self.generateEmbeddable()
def generateEmbeddable(self):
"""Generate an embeddable object.
Returns:
An instance of :class:`AbstractEmbeddable`
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class PairEmbeddableGenerator(AbstractEmbeddableGenerator):
"""Embed a pair of embeddables with some separation.
Arguments:
emeddableGenerator1: instance of\
:class:`.AbstractEmbeddableGenerator`. If an
:class:`.AbstractSubstringGenerator` is provided, will be wrapped in\
an instance of :class:`.SubstringEmbeddableGenerator`.
embeddableGenerator2: same type information as for\
``embeddableGenerator1``
separationGenerator: instance of\
:class:`.AbstractQuantityGenerator`
name: string, see :class:`DefaultNameMixin`
"""
def __init__(self, embeddableGenerator1,
embeddableGenerator2, separationGenerator, name=None):
if isinstance(embeddableGenerator1, AbstractSubstringGenerator):
embeddableGenerator1 =\
SubstringEmbeddableGenerator(embeddableGenerator1)
if (isinstance(embeddableGenerator2, AbstractSubstringGenerator)):
embeddableGenerator2 =\
SubstringEmbeddableGenerator(embeddableGenerator2)
self.embeddableGenerator1 = embeddableGenerator1
self.embeddableGenerator2 = embeddableGenerator2
self.separationGenerator = separationGenerator
super(PairEmbeddableGenerator, self).__init__(name)
def generateEmbeddable(self):
"""See superclass.
"""
embeddable1 = self.embeddableGenerator1.generateEmbeddable()
embeddable2 = self.embeddableGenerator2.generateEmbeddable()
return PairEmbeddable(
embeddable1=embeddable1, embeddable2=embeddable2,
separation=self.separationGenerator.generateQuantity()
)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "PairEmbeddableGenerator"),
("embeddableGenerator1", self.embeddableGenerator1.getJsonableObject()),
("embeddableenerator2", self.embeddableGenerator2.getJsonableObject()),
("separationGenerator", self.separationGenerator.getJsonableObject())])
class SubstringEmbeddableGenerator(AbstractEmbeddableGenerator):
"""Generates a :class:`.StringEmbeddable`
Calls ``substringGenerator``, wraps the result in
a :class:`.StringEmbeddable` and returns it.
Arguments:
substringGenerator: instance of :class:`.AbstractSubstringGenerator`
"""
def __init__(self, substringGenerator, name=None):
self.substringGenerator = substringGenerator
super(SubstringEmbeddableGenerator, self).__init__(name)
def generateEmbeddable(self):
substring, substringDescription =\
self.substringGenerator.generateSubstring()
return StringEmbeddable(substring, substringDescription)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "SubstringEmbeddableGenerator"),
("substringGenerator", self.substringGenerator.getJsonableObject())])
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/embeddablegen.py",
"copies": "1",
"size": "4129",
"license": "mit",
"hash": -5976434584300355000,
"line_mean": 36.5363636364,
"line_max": 79,
"alpha_frac": 0.704771131,
"autogenerated": false,
"ratio": 4.439784946236559,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5644556077236559,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna.synthetic.positiongen import uniformPositionGenerator
from simdna.synthetic.embeddablegen import SubstringEmbeddableGenerator
from simdna.synthetic.quantitygen import AbstractQuantityGenerator
from simdna.simdnautil import util
from collections import OrderedDict
from simdna import random
class AbstractEmbedder(DefaultNameMixin):
"""Produces :class:`AbstractEmbeddable` objects and
embeds them in a sequence.
"""
def embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo=None):
"""Embeds things in the provided ``backgroundStringArr``.
Modifies backgroundStringArr to include whatever has been embedded.
Arguments:
backgroundStringArr: array of characters\
representing the background string
priorEmbeddedThings: instance of\
:class:`.AbstractPriorEmbeddedThings`
additionalInfo: instance of :class:`.AdditionalInfo`;\
allows the embedder to send back info about what it did
Returns:
The modifed ``backgroundStringArr``
"""
if (additionalInfo is not None):
additionalInfo.updateTrace(self.name)
return self._embed(backgroundStringArr, priorEmbeddedThings, additionalInfo)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""The actual implementation of _embed to be overridden by
the subclass.
See docs for :func:`.AbstractEmbedder.embed`
"""
raise NotImplementedError()
def get_jsonable_object(self):
self.getJsonableObject()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class EmbeddableEmbedder(AbstractEmbedder):
"""Embeds an instance of :class:`.AbstractEmbeddable` at a sampled pos.
Embeds instances of :class:`.AbstractEmbeddable` within the
background sequence, at a position sampled from a distribution.
Only embeds at unoccupied positions.
Arguments:
embeddableGenerator: instance of :class:`.AbstractEmbeddableGenerator`
positionGenerator: instance of :class:`.AbstractPositionGenerator`
"""
def __init__(self, embeddableGenerator,
positionGenerator=uniformPositionGenerator, name=None):
self.embeddableGenerator = embeddableGenerator
self.positionGenerator = positionGenerator
super(EmbeddableEmbedder, self).__init__(name)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""See superclass.
Calls self.embeddableGenerator to determine the
embeddable to embed. Then calls self.positionGenerator to
determine the start position at which to embed it.
If the position is occupied, will resample from
``self.positionGenerator``. Will warn if tries to
resample too many times.
"""
embeddable = self.embeddableGenerator.generateEmbeddable()
canEmbed = False
tries = 0
while not canEmbed:
tries += 1
if isinstance(backgroundStringArr[0], list):
len_bsa = len(backgroundStringArr[0])
else:
len_bsa = len(backgroundStringArr)
startPos = self.positionGenerator.generatePos(
len_bsa, len(embeddable), additionalInfo)
canEmbed = embeddable.canEmbed(priorEmbeddedThings, startPos)
if tries % 10 == 0:
print("Warning: made " + str(tries) +
" attemps at trying to embed " + str(embeddable) +
" in region of length " + str(priorEmbeddedThings.getTotalPos()) +
" with " + str(priorEmbeddedThings.getNumOccupiedPos()) + " occupied sites")
embeddable.embedInBackgroundStringArr(
priorEmbeddedThings, backgroundStringArr, startPos)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("embeddableGenerator", self.embeddableGenerator.getJsonableObject()),
("positionGenerator", self.positionGenerator.getJsonableObject())])
class SubstringEmbedder(EmbeddableEmbedder):
"""Used to embed substrings.
Embeds a single generated substring within the background sequence,
at a position sampled from a distribution. Only embeds at unoccupied
positions
Arguments:
substringGenerator: instance of :class:`.AbstractSubstringGenerator`
positionGenerator: instance of :class:`.AbstractPositionGenerator`
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, substringGenerator, positionGenerator=uniformPositionGenerator, name=None):
super(SubstringEmbedder, self).__init__(
SubstringEmbeddableGenerator(substringGenerator), positionGenerator, name)
class XOREmbedder(AbstractEmbedder):
"""Calls exactly one of the supplied embedders.
Arguments:
embedder1: instance of :class:`.AbstractEmbedder`
embedder2: instance of :class:`.AbstractEmbedder`
probOfFirst: probability of calling the first embedder
"""
def __init__(self, embedder1, embedder2, probOfFirst, name=None):
self.embedder1 = embedder1
self.embedder2 = embedder2
self.probOfFirst = probOfFirst
super(XOREmbedder, self).__init__(name)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""See superclass.
"""
if (random.random() < self.probOfFirst):
embedder = self.embedder1
else:
embedder = self.embedder2
return embedder.embed(backgroundStringArr, priorEmbeddedThings, additionalInfo)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "XOREmbedder"), ("embedder1", self.embedder1.getJsonableObject()), ("embedder2", self.embedder2.getJsonableObject()), ("probOfFirst", self.probOfFirst)])
class AllEmbedders(AbstractEmbedder):
"""Wrapper around a list of embedders that calls each one in turn.
Useful to nest under a :class:`.RandomSubsetOfEmbedders`
Arguments:
embedders: an iterable of :class:`.AbstractEmbedder` objects.
"""
def __init__(self, embedders, name=None):
self.embedders = embedders
super(AllEmbedders, self).__init__(name)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""See superclass.
"""
for embedder in self.embedders:
embedder.embed(backgroundStringArr,
priorEmbeddedThings, additionalInfo)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "AllEmbedders"),
("embedders",
[x.getJsonableObject() for x in self.embedders])
])
class RandomSubsetOfEmbedders(AbstractEmbedder):
"""Call some random subset of supplied embedders.
Takes a quantity generator that generates a quantity of
embedders, and executes that many embedders from a supplied set,
in sequence
Arguments:
quantityGenerator: instance of :class:`.AbstractQuantityGenerator`
embedders: a list of :class:`.AbstractEmbedder` objects
"""
def __init__(self, quantityGenerator, embedders, name=None):
if (isinstance(quantityGenerator, int)):
quantityGenerator = FixedQuantityGenerator(quantityGenerator)
assert isinstance(quantityGenerator, AbstractQuantityGenerator)
self.quantityGenerator = quantityGenerator
self.embedders = embedders
super(RandomSubsetOfEmbedders, self).__init__(name)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""See superclass.
"""
numberOfEmbeddersToSample = self.quantityGenerator.generateQuantity()
if (numberOfEmbeddersToSample > len(self.embedders)):
raise RuntimeError("numberOfEmbeddersToSample came up as " + str(
numberOfEmbeddersToSample) + " but total number of embedders is " + str(len(self.embedders)))
sampledEmbedders = util.sampleWithoutReplacement(
self.embedders, numberOfEmbeddersToSample)
for embedder in sampledEmbedders:
embedder.embed(backgroundStringArr,
priorEmbeddedThings, additionalInfo)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "RandomSubsetOfEmbedders"), ("setOfEmbedders", [x.getJsonableObject() for x in self.embedders])])
class RepeatedEmbedder(AbstractEmbedder):
"""Call an embedded multiple times.
Wrapper around an embedder to call it multiple times according to samples
from a distribution. First calls ``self.quantityGenerator`` to get the
quantity, then calls ``self.embedder`` a number of times equal
to the value returned.
Arguments:
embedder: instance of :class:`.AbstractEmbedder`
quantityGenerator: instance of :class:`.AbstractQuantityGenerator`
"""
def __init__(self, embedder, quantityGenerator, name=None):
self.embedder = embedder
self.quantityGenerator = quantityGenerator
super(RepeatedEmbedder, self).__init__(name)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""See superclass.
"""
quantity = self.quantityGenerator.generateQuantity()
for i in range(quantity):
self.embedder.embed(backgroundStringArr,
priorEmbeddedThings, additionalInfo)
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "RepeatedEmbedder"), ("embedder", self.embedder.getJsonableObject()), ("quantityGenerator", self.quantityGenerator.getJsonableObject())])
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/embedders.py",
"copies": "1",
"size": "10323",
"license": "mit",
"hash": -422042575088968060,
"line_mean": 37.9547169811,
"line_max": 191,
"alpha_frac": 0.6719945752,
"autogenerated": false,
"ratio": 4.612600536193029,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000738181493674267,
"num_lines": 265
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
from simdna.synthetic.substringgen import AbstractSubstringGenerator
from collections import OrderedDict
import random
class TransformedSubstringGenerator(AbstractSubstringGenerator):
"""Generates a substring and applies a series of transformations.
Takes a substringGenerator and a set of AbstractTransformation objects,
applies the transformations to the generated substring
Arguments:
substringGenerator: instance of :class:`.AbstractSubstringGenerator`
transformations: an iterable of :class:`.AbstractTransformation`
transformationsDescription: a string that will be prefixed in\
front of ``substringDescription`` (generated by\
``substringGenerator.generateSubstring())`` to produce the\
``stringDescription``.
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, substringGenerator, transformations,
transformationsDescription="transformations",
name=None):
self.substringGenerator = substringGenerator
self.transformations = transformations
self.transformationsDescription = transformationsDescription
super(TransformedSubstringGenerator, self).__init__(self.name)
def generateSubstring(self):
"""See superclass.
"""
substring, substringDescription = self.substringGenerator.generateSubstring()
baseSubstringArr = [x for x in substring]
for transformation in self.transformations:
baseSubstringArr = transformation.transform(baseSubstringArr)
return "".join(baseSubstringArr), self.transformationsDescription + "-" + substringDescription
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([("class", "TransformedSubstringGenerator"),
("substringGenerator", self.substringGenerator.getJsonableObject()),
("transformations", [x.getJsonableObject() for x in self.transformations])
])
class AbstractTransformation(DefaultNameMixin):
"""Class representing a transformation applied to a character array.
Takes an array of characters, applies some transformation.
"""
def transform(self, stringArr):
"""Applies a transformation to stringArr.
Arguments:
stringArr: an array of characters.
Returns:
An array of characters that has the transformation applied.
May mutate ``stringArr``
"""
raise NotImplementedError()
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
class RevertToReference(AbstractTransformation):
"""For a series of mutations, reverts the supplied character
to the reference ("unmutated") string.
Arguments:
setOfMutations: instance of AbstractSetOfMutations
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, setOfMutations, name=None):
self.setOfMutations = setOfMutations
super(RevertToReference, self).__init__(name)
def transform(self, stringArr):
"""See superclass.
"""
for mutation in self.setOfMutations.getMutationsArr():
mutation.revert(stringArr)
return stringArr
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", "RevertToReference"),
("setOfMutations", self.setOfMutations.getJsonableObject())])
class AbstractApplySingleMutationFromSet(AbstractTransformation):
"""
Class for applying a single mutation from a set of mutations; used
to transform substrings generated by another method
Arguments:
setOfMutations: instance of :class:`.AbstractSetOfMutations`
name: see :class:`.DefaultNameMixin`.
"""
def __init__(self, setOfMutations, name=None):
self.setOfMutations = setOfMutations
super(AbstractApplySingleMutationFromSet, self).__init__(name)
def transform(self, stringArr):
"""See superclass.
"""
selectedMutation = self.selectMutation()
selectedMutation.applyMutation(stringArr)
return stringArr
def selectMutation(self):
"""Chooses a mutation from the set of mutations to apply.
Returns:
an instance of :class:`.Mutation`
"""
raise NotImplementedError()
def getJsonableObject(self):
"""See superclass.
"""
return OrderedDict([
("class", type(self).__name__),
("selectedMutations", self.setOfMutations.getJsonableObject())])
class Mutation(object):
"""Represent a single bp mutation in a motif sequence.
Useful for creating simulations involving SNPs.
Arguments:
index: the position idx within the motif of the mutation
previous: character, the previous base at this position
new: character, the new base at this position after the mutation
parentLength: optional; length of the motif. Used for assertion checks.
"""
def __init__(self, index, previous, new, parentLength=None):
self.index = index
assert previous != new
self.previous = previous
self.new = new
# the length of the full sequence that self.index indexes into
self.parentLength = parentLength
def parentLengthAssertionCheck(self, stringArr):
"""Checks that stringArr is consistent with parentLength if defined.
"""
assert self.parentLength is None or len(stringArr) == self.parentLength
def revert(self, stringArr):
"""Set the base at the position of the mutation to the unmutated value.
Modifies stringArr which is an array of characters.
Arguments:
stringArr: an array of characters, which gets modified.
"""
self.parentLengthAssertionCheck(stringArr)
stringArr[self.index] = self.previous
def applyMutation(self, stringArr):
"""Set the base at the position of the mutation to the mutated value.
Modifies stringArr which is an array of characters.
Arguments:
stringArr: an array of characters, which gets modified.
"""
self.parentLengthAssertionCheck(stringArr)
assert stringArr[self.index] == self.previous
stringArr[self.index] = self.new
class ChooseMutationAtRandom(AbstractApplySingleMutationFromSet):
"""Selects a mutation at random from self.setOfMutations to apply.
"""
def selectMutation(self):
mutationsArr = self.setOfMutations.getMutationsArr()
return mutationsArr[int(random.random() * len(mutationsArr))]
class AbstractSetOfMutations(object):
"""Represents a collection of :class:`.Mutation` objects.
Arguments:
mutationsArr: array of :class:`.Mutation` objects
"""
def __init__(self, mutationsArr):
self.mutationsArr = mutationsArr
def getMutationsArr(self):
"""Returns ``self.mutationsArr``
Returns:
``self.mutationsArr``
"""
return self.mutationsArr
def getJsonableObject(self):
"""Get JSON object representation.
Returns:
A json-friendly object (built of dictionaries, lists and
python primitives), which can be converted to json to
record the exact details of what was simualted.
"""
raise NotImplementedError()
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/mutation.py",
"copies": "1",
"size": "7835",
"license": "mit",
"hash": -630949723478577700,
"line_mean": 32.3404255319,
"line_max": 102,
"alpha_frac": 0.6667517549,
"autogenerated": false,
"ratio": 4.797917942437232,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5964669697337233,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.core import DefaultNameMixin
import re
class AbstractEmbeddable(object):
"""Represents a thing which can be embedded.
An :class:`.AbstractEmbeddable` + a position = an :class:`.Embedding`
"""
def __len__(self):
raise NotImplementedError()
def __str__(self):
raise NotImplementedError()
def get_description(self):
return self.getDescription()
def getDescription(self):
"""Return a concise description of the embeddable.
This should be concise and shouldn't contain spaces. It will often
be used when generating the __str__ representation of the embedabled.
"""
raise NotImplementedError()
def can_embed(self, priorEmbeddedThings, startPos):
return self.canEmbed(priorEmbeddedThings, startPos)
def canEmbed(self, priorEmbeddedThings, startPos):
"""Checks whether embedding is possible at a given pos.
Accepts an instance of :class:`AbstractPriorEmbeddedThings` and
a ``startPos``, and checks if ``startPos`` is viable given the
contents of ``priorEmbeddedThings``.
Arguments:
priorEmbeddedThings: instance of
:class:`AbstractPriorEmbeddedThings`
startPos: int; the position you are considering embedding self at
Returns:
A boolean indicating whether self can be embedded at startPos,
given the things that have already been embedded.
"""
raise NotImplementedError()
def embed_in_background_string_arr(self, priorEmbeddedThings, backgroundStringArr, startPos):
self.embedInBackgroundStringArr(priorEmbeddedThings, backgroundStringArr, startPos)
def embedInBackgroundStringArr(self, priorEmbeddedThings, backgroundStringArr, startPos):
"""Embed self in a background string.
Will embed self at ``startPos`` in ``backgroundStringArr``,
and will update ``priorEmbeddedThings`` accordingly.
Arguments:
priorEmbeddedThings: instance of
:class:`AbstractPriorEmbeddedThings`
backgroundStringArr: an array of characters representing
the background
startPos: integer; the position to embed self at
"""
raise NotImplementedError()
@classmethod
def from_string(cls, theString):
cls.fromString(cls, theString)
@classmethod
def fromString(cls, theString):
"""Generate an instance of the embeddable from the provided string.
"""
raise NotImplementedError()
class StringEmbeddable(AbstractEmbeddable):
"""A string that is to be embedded in a background.
Represents a string (such as a sampling from a pwm) that is to
be embedded in a background. See docs for superclass.
Arguments:
string: the core string to be embedded
stringDescription: a short descriptor prefixed before the\
``__str__`` representation of the embeddable.\
Should not contain a hyphen. Defaults to "".
"""
def __init__(self, string, stringDescription=""):
self.string = string
self.stringDescription = stringDescription
def __len__(self):
return len(self.string)
def __str__(self):
return self.stringDescription + ("-" if self.stringDescription != "" else "") + self.string
def getDescription(self):
"""See superclass.
"""
return self.stringDescription
def canEmbed(self, priorEmbeddedThings, startPos):
"""See superclass.
"""
return priorEmbeddedThings.canEmbed(startPos, startPos + len(self.string))
def embedInBackgroundStringArr(self, priorEmbeddedThings,
backgroundStringArr, startPos):
"""See superclass.
"""
if isinstance(backgroundStringArr[0], list):
len_bsa = len(backgroundStringArr[0])
else:
len_bsa = len(backgroundStringArr)
positions_left = len_bsa - startPos
if (positions_left < len(self.string)):
print("Warning: length of background is "
+str(len(backgroundStringArr))
+" but was asked to embed string of length "
+str(len(self.string))+" at position "
+str(startPos)+"; truncating")
string_to_embed = self.string[:positions_left]
else:
string_to_embed = self.string
if isinstance(backgroundStringArr[0], list):
for i in range(len(backgroundStringArr)):
backgroundStringArr[i][startPos:startPos + len(string_to_embed)] = string_to_embed
else:
backgroundStringArr[startPos:startPos + len(string_to_embed)] = string_to_embed
priorEmbeddedThings.addEmbedding(startPos, self)
@classmethod
def fromString(cls, theString):
"""Generates a StringEmbeddable from the provided string.
Arguments:
theString: string of the format ``stringDescription-coreString``.\
Will then return:\
``StringEmbeddable(string=coreString, stringDescription=stringDescription)``
Returns:
An instance of :class:`.StringEmbeddable`
"""
if ("-" in theString):
p = re.compile(r"((revComp\-)?(.*))\-(.*)$")
m = p.search(theString)
stringDescription = m.group(1)
coreString = m.group(4)
return cls(string=coreString, stringDescription=stringDescription)
else:
return cls(string=theString)
class PairEmbeddable(AbstractEmbeddable):
"""Embed two embeddables with some separation.
Arguments:
embeddable1: instance of :class:`.AbstractEmbeddable`.
First embeddable to be embedded. If a string is provided, will\
be wrapped in :class:`.StringEmbeddable`
embeddable2: second embeddable to be embedded. Type information\
similar to that of ``embeddable1``
separation: int of distance separating embeddable1 and embeddable2
embeddableDescription: a concise descriptive string prefixed in\
front when generating a __str__ representation of the embeddable.\
Should not contain a hyphen.
nothingInBetween: if true, then nothing else is allowed to be\
embedded in the gap between embeddable1 and embeddable2.
"""
def __init__(self, embeddable1, embeddable2, separation,
embeddableDescription="", nothingInBetween=True):
if (isinstance(embeddable1, str)):
embeddable1 = StringEmbeddable(string=embeddable1)
if (isinstance(embeddable2, str)):
embeddable2 = StringEmbeddable(string=embeddable2)
self.embeddable1 = embeddable1
self.embeddable2 = embeddable2
self.separation = separation
self.embeddableDescription = embeddableDescription
self.nothingInBetween = nothingInBetween
def __len__(self):
return len(self.embeddable1) + self.separation + len(self.embeddable2)
def __str__(self):
return self.embeddableDescription +\
("-" if self.embeddableDescription != "" else "") +\
str(self.embeddable1) + "-Gap" + str(self.separation) +\
"-" + str(self.embeddable2)
def getDescription(self):
"""See superclass.
"""
return self.embeddableDescription
def canEmbed(self, priorEmbeddedThings, startPos):
"""See superclass.
"""
if (self.nothingInBetween):
return priorEmbeddedThings.canEmbed(startPos, startPos + len(self))
else:
return (priorEmbeddedThings.canEmbed(startPos, startPos + len(self.embeddable1))
and priorEmbeddedThings.canEmbed(startPos + len(self.embeddable1) + self.separation, startPos + len(self)))
def embedInBackgroundStringArr(self, priorEmbeddedThings,
backgroundStringArr, startPos):
"""See superclass.
If ``self.nothingInBetween``, then all the intervening positions
between the two embeddables will be marked as occupied. Otherwise,
only the positions occupied by the embeddables will be marked
as occupied.
"""
self.embeddable1.embedInBackgroundStringArr(
priorEmbeddedThings, backgroundStringArr, startPos)
self.embeddable2.embedInBackgroundStringArr(
priorEmbeddedThings, backgroundStringArr,
startPos+len(self.embeddable1)+self.separation)
if (self.nothingInBetween):
priorEmbeddedThings.addEmbedding(startPos, self)
else:
priorEmbeddedThings.addEmbedding(startPos, self.embeddable1)
priorEmbeddedThings.addEmbedding(
startPos + len(self.embeddable1) + self.separation, self.embeddable2)
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/embeddables.py",
"copies": "1",
"size": "8968",
"license": "mit",
"hash": -6975396854957450000,
"line_mean": 37,
"line_max": 127,
"alpha_frac": 0.6460749331,
"autogenerated": false,
"ratio": 4.387475538160469,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000984326666082423,
"num_lines": 236
} |
from __future__ import absolute_import, division, print_function
from simdna.synthetic.embedders import AbstractEmbedder
from simdna.synthetic.core import AbstractSequenceSetGenerator
from simdna.simdnautil import util
from simdna.synthetic.core import EmbedInABackground
from simdna.synthetic.backgroundgen import ShuffledBackgroundGenerator
from simdna.synthetic.substringgen import (PwmSamplerFromLoadedMotifs,
ReverseComplementWrapper)
from simdna.synthetic.embeddablegen import SubstringEmbeddableGenerator
from collections import OrderedDict
from simdna import random
def parse_dnase_motif_embedder_string(embedderString, loadedMotifs):
parseDnaseMotifEmbedderString(embedderString, loadedMotifs)
def parseDnaseMotifEmbedderString(embedderString, loadedMotifs):
"""Parse a string representing a motif and position
Arguments:
embedderString: of format <motif name>-<position in sequence>
loadedMotifs: instance of :class:`.AbstractLoadedMotifs`
Returns:
An instance of :class:`FixedEmbeddableWithPosEmbedder`
"""
motifName,pos = embedderString.split("-")
pwmSampler = PwmSamplerFromLoadedMotifs(
motifName=motifName,
loadedMotifs=loadedMotifs)
embeddableGenerator = SubstringEmbeddableGenerator(
substringGenerator=
ReverseComplementWrapper(pwmSampler))
return FixedEmbeddableWithPosEmbedder(
embeddableGenerator=embeddableGenerator,
startPos=int(pos))
class DnaseSimulation(AbstractSequenceSetGenerator):
"""Simulation based on a file that details the sequences (which may be
shuffled) and the motifs+positions in the sequences
Arguments:
dnaseSimulationFile: file with a title, and columns:
sequenceName<tab>sequence<tab>motif1-pos1,motif2-pos2...
loadedMotifs: instance of :class:`.AbstractLoadedMotifs`
shuffler: instance of :class:`.AbstractShuffler`
"""
def __init__(self, dnaseSimulationFile, loadedMotifs, shuffler):
self.dnaseSimulationFile = dnaseSimulationFile
self.loadedMotifs = loadedMotifs
self.shuffler=shuffler
def generateSequences(self):
fileHandle = util.get_file_handle(self.dnaseSimulationFile)
for lineNumber, line in enumerate(fileHandle):
if (lineNumber > 0): #ignore title
inp = util.default_tab_seppd(line)
sequenceName = inp[0]
backgroundGenerator = ShuffledBackgroundGenerator(
string=inp[1], shuffler=self.shuffler)
embedders = [parseDnaseMotifEmbedderString(
embedderString, self.loadedMotifs)
for embedderString in inp[2].split(",")
if len(embedderString) > 0]
yield SingleDnaseSequenceGenerator(
backgroundGenerator=backgroundGenerator,
dnaseMotifEmbedders=embedders,
sequenceName=sequenceName).generateSequence()
def getJsonableObject(self):
"""See superclass
"""
return OrderedDict(
[('dnaseSimulationFile', self.dnaseSimulationFile),
('shuffler', self.shuffler.getJsonableObject())])
class SingleDnaseSequenceGenerator(object):
def __init__(self, backgroundGenerator, dnaseMotifEmbedders, sequenceName):
self.backgroundGenerator = backgroundGenerator
self.dnaseMotifEmbedders = dnaseMotifEmbedders
self.sequenceName = sequenceName
def generate_sequence(self):
self.generateSequence()
def generateSequence(self):
return EmbedInABackground.\
generateSequenceGivenBackgroundGeneratorAndEmbedders(
backgroundGenerator=self.backgroundGenerator,
embedders=self.dnaseMotifEmbedders,
sequenceName=self.sequenceName)
class FixedEmbeddableWithPosEmbedder(AbstractEmbedder):
"""Embeds a given :class:`.AbstractEmbeddable` at a given pos.
Embeds a given instance of :class:`.AbstractEmbeddable` within the
background sequence, at a given position. Could result in overlapping
embeddings if positions are too close.
Arguments:
embeddable: instance of :class:`.AbstractEmbeddable`
startPos: an int
"""
def __init__(self, embeddableGenerator, startPos):
self.embeddableGenerator = embeddableGenerator
self.startPos = startPos
super(FixedEmbeddableWithPosEmbedder, self).__init__(None)
def _embed(self, backgroundStringArr, priorEmbeddedThings, additionalInfo):
"""Shoves the designated embeddable at the designated position
Skips if some of the positions are already occupied.
"""
embeddable = self.embeddableGenerator.generateEmbeddable()
canEmbed = embeddable.canEmbed(priorEmbeddedThings, self.startPos)
#randomly pick a value for searchLeft
if random.random() < 0.5:
searchLeft=True
else:
searchLeft=False
validEmbeddingPos = self._getValidEmbeddingPos(
embeddable=embeddable,
priorEmbeddedThings=priorEmbeddedThings,
backgroundStringArr=backgroundStringArr,
startingPosToSearchFrom=self.startPos,
searchLeft=searchLeft)
#if couldn't find a valid pos, search in the other direction
if (validEmbeddingPos is None):
validEmbeddingPos = self._getValidEmbeddingPos(
embeddable=embeddable,
priorEmbeddedThings=priorEmbeddedThings,
backgroundStringArr=backgroundStringArr,
startingPosToSearchFrom=self.startPos,
searchLeft=(searchLeft==False))
if (validEmbeddingPos is None):
print("Warning: could not find a place to embed "+str(embeddable)
+"; bailing")
return
else:
embeddable.embedInBackgroundStringArr(
priorEmbeddedThings=priorEmbeddedThings,
backgroundStringArr=backgroundStringArr,
startPos=validEmbeddingPos)
def _getValidEmbeddingPos(self, embeddable,
priorEmbeddedThings,
backgroundStringArr,
startingPosToSearchFrom,
searchLeft):
posToQuery = startingPosToSearchFrom
maxLen = len(backgroundStringArr)
embeddableLen = len(embeddable)
#search left/right (according to the value of searchLeft) for
#a valid position at which to embed the embeddable
while (posToQuery > 0 and posToQuery < maxLen):
canEmbed = embeddable.canEmbed(priorEmbeddedThings, posToQuery)
if (canEmbed):
return posToQuery
if (searchLeft):
posToQuery -= 1
else:
posToQuery += 1
return None
def getJsonableObject(self):
"""See superclass.
"""
raise NotImplementedError()
| {
"repo_name": "kundajelab/simdna",
"path": "simdna/synthetic/dnase.py",
"copies": "1",
"size": "7432",
"license": "mit",
"hash": -558084632858068350,
"line_mean": 41.4685714286,
"line_max": 79,
"alpha_frac": 0.6380516685,
"autogenerated": false,
"ratio": 4.607563546187229,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.574561521468723,
"avg_score": null,
"num_lines": null
} |
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
from collections import Iterable, namedtuple, OrderedDict
import warnings
from weakref import WeakValueDictionary
import IPython
from IPython.display import set_matplotlib_formats
import ipywidgets
import iris
from iris.coords import Coord, DimCoord
import iris.plot as iplt
import matplotlib.pyplot as plt
# Cube-browser version.
__version__ = '1.1.0'
# Set default IPython magics if an IPython session has invoked the import.
ipynb = IPython.get_ipython()
if ipynb is not None: # pragma: no cover
ipynb.magic(u"%matplotlib nbagg")
ipynb.magic(u"%autosave 0")
# set iris Future switches
iris.FUTURE.netcdf_promote = True
iris.FUTURE.strict_grib_load = True
class _AxisAlias(namedtuple('_AxisAlias', 'dim, name, size')):
def __eq__(self, other):
result = NotImplemented
if isinstance(other, _AxisAlias):
left = (self.name, self.size)
right = (other.name, other.size)
result = left == right
elif isinstance(other, _AxisDefn):
result = False
return result
def __ne__(self, other):
result = self.__eq__(other)
if result is not NotImplemented:
result = not result
return result
class _AxisDefn(namedtuple('_AxisDefn', 'dim, name, size, coord')):
def __eq__(self, other):
result = NotImplemented
if isinstance(other, _AxisDefn):
left = (self.name, self.size, self.coord)
right = (other.name, other.size, other.coord)
result = left == right
elif isinstance(other, _AxisAlias):
result = False
return result
def __ne__(self, other):
result = self.__eq__(other)
if result is not NotImplemented:
result = not result
return result
class Plot2D(object):
def __init__(self, cube, axes, **kwargs):
"""
Args:
* cube
The :class:`~iris.cube.Cube` instance to plot.
* axes
Matplotlib axes for plotting.
Kwargs:
* coords
The cube coordinates, coordinate names or dimension indices
to plot in the order (x-axis, y-axis).
* kwargs
Matplotlib kwargs for plot customization.
"""
self.cube = cube
#: The latest rendered cube slice.
self.subcube = None
#: The associated rendered matplotlib element.
self.element = None
if cube.ndim < 2:
emsg = '{} requires at least a 2d cube, got {}d.'
raise ValueError(emsg.format(type(self).__name__, cube.ndim))
self.axes = axes
coords = kwargs.pop('coords', None)
if coords is None:
coords = self._default_coords()
#: Coordinates/dimensions to use for the plot x-axis and y-axis.
self.coords = self._check_coords(coords)
self.kwargs = kwargs
# Set of plot axis dimensions.
self._plot_dims = {c if isinstance(c, int) else
cube.coord_dims(c)[0] for c in self.coords}
# Mapping of slider dimension coordinate name to dimension.
self._slider_dim_by_name = self._sliders_dim()
# A mapping of dimension alias name to dimension.
self._dim_by_alias = {}
# A weak reference value cache for plot sub-cube sharing.
self._cache = None
def _default_coords(self):
"""
Determines the default coordinates or cube dimensions to use
for the plot x-axis and y-axis.
Firstly, will attempt to default to the 'X' and 'Y' dimension
coordinates, but only if both exist on the cube.
Otherwise, default to the last two cube dimensions, assuming
the last two cube dimensions are in y-axis, x-axis order i.e.
the x-axis is the last cube dimension.
Returns a tuple of the chosen coordinates/dimensions.
"""
xcoord = self.cube.coords(axis='x', dim_coords=True)
ycoord = self.cube.coords(axis='y', dim_coords=True)
if xcoord and ycoord:
# Default to the cube X and Y dimension coordinates.
coords = (xcoord[0], ycoord[0])
else:
# Default to the last two cube dimensions in ydim, xdim order.
ndim = self.cube.ndim
xdim, ydim = ndim - 1, ndim - 2
xcoord = self.cube.coords(dimensions=xdim, dim_coords=True)
xcoord = xcoord[0] if xcoord else xdim
ycoord = self.cube.coords(dimensions=ydim, dim_coords=True)
ycoord = ycoord[0] if ycoord else ydim
coords = (xcoord, ycoord)
return coords
def _check_coords(self, coords):
"""
Verify the two coordinates/dimensions specified to use for the plot
x-axis and y-axis.
Ensures that explicit cube dimension values are suitably translated
for use with the target 2d cube.
Returns a list of the verified coordinates/dimensions.
"""
result = []
dims = []
translate = False
if isinstance(coords, (six.string_types, int)):
coords = (coords,)
if len(coords) != 2:
emsg = '{} requires 2 coordinates, one for each plot axis, got {}.'
raise ValueError(emsg.format(type(self).__name__, len(coords)))
ndim = self.cube.ndim
for i, (coord, axis) in enumerate(zip(coords, ['x', 'y'])):
if isinstance(coord, int):
if coord < 0:
coord = ndim + coord
if coord < 0 or coord >= ndim:
emsg = ('Nominated {}-axis plot dimension for {}d cube '
'out of range, got {}.')
raise IndexError(emsg.format(axis, ndim, coords[i]))
result.append(coord)
dims.append(coord)
translate = True
else:
cube_coord = self.cube.coords(coord)
if len(cube_coord) == 0:
name = coord.name() if isinstance(coord, Coord) else coord
emsg = ('Nominated {}-axis plot coordinate {!r} not '
'found on cube.')
raise ValueError(emsg.format(axis, name))
[coord] = cube_coord
if not isinstance(coord, DimCoord):
emsg = ('Nominated {}-axis plot coordinate {!r} must be '
'a dimension coordinate.')
raise ValueError(emsg.format(axis, coord.name()))
dim = self.cube.coord_dims(coord)
if not dim:
emsg = ('Nominated {}-axis plot coordinate {!r} cannot be '
'a scalar coordinate.')
raise ValueError(emsg.format(axis, coord.name()))
result.append(coord)
dims.append(dim[0])
if dims[0] == dims[1]:
emsg = ('Nominated x-axis and y-axis reference the same cube '
'dimension, got {}.')
raise ValueError(emsg.format(dims[0]))
if translate:
# Ensure explicit dimension values are suitably translated
# for use with the target 2d cube.
dims = [0, 1] if dims[0] < dims[1] else [1, 0]
for i, (r, d) in enumerate(zip(result, dims)):
if isinstance(r, int):
result[i] = d
return tuple(result)
@property
def aliases(self):
"""
Returns the known dimension aliases for the plot's cube.
"""
result = None
if self._dim_by_alias:
result = self._dim_by_alias.copy()
return result
def remove_alias(self, name):
"""
Remove the named dimension alias associated with the plot's cube.
"""
if name not in self._dim_by_alias:
emsg = 'Unknown dimension alias {!r}.'
raise ValueError(emsg.format(name))
self._dim_by_alias.pop(name)
def alias(self, **kwargs):
"""
Associate the named alias to the specified cube dimension of the plot.
Kwargs:
The alias name and associated cube dimension.
E.g. ::
plot.alias(time=0, latitude=2)
This associates the 'time' alias to cube dimension 0, and the
'latitude' alias to cube dimension 2.
"""
ndim = self.cube.ndim
for name, dim in kwargs.items():
if not isinstance(dim, int):
emsg = ('Alias {!r} requires an integer dimension value, '
'got {!r}.')
raise TypeError(emsg.format(name, type(dim).__name__))
original = dim
if dim < 0:
dim = ndim + dim
if dim < 0 or dim >= ndim:
emsg = ('Dimension alias {!r} value for {}d cube out of '
'range, got {}.')
raise IndexError(emsg.format(name, ndim, original))
coords = self.cube.coords(name)
if coords:
dims = self.cube.coord_dims(name)
dcount = len(dims)
if dcount != 1:
dtype = 'scalar' if dcount == 0 else '{}d'.format(dcount)
emsg = ('Dimension alias {!r} cannot cover a {} '
'coordinate.')
raise ValueError(emsg.format(name, dtype))
if dim != dims[0]:
emsg = ('Dimension alias {!r} must cover the same '
'dimension as existing cube coordinate, got '
'dimension {} expected {}.')
raise ValueError(emsg.format(name, dim, dims[0]))
# Check there are no alias covering the same dimension.
if dim in list(self._dim_by_alias.values()):
emsg = ('Dimension alias {!r} covers the same dimension '
'as alias {!r}.')
alias_by_dim = self._invert_mapping(self._dim_by_alias)
raise ValueError(emsg.format(name, alias_by_dim[dim]))
self._dim_by_alias[name] = dim
@property
def cache(self):
if self._cache is None:
self._cache = WeakValueDictionary()
return self._cache
@cache.setter
def cache(self, value):
if not isinstance(value, WeakValueDictionary):
emsg = "Require cache to be a {!r}, got {!r}."
raise TypeError(emsg.format(WeakValueDictionary.__name__,
type(value).__name__))
self._cache = value
def _sliders_dim(self):
"""
Determines the dimension coordinate and associated dimension for each
cube slider dimension i.e. not plot dimensions.
Returns a dictionary of slider dimension by coordinate name.
"""
mapping = {}
dims = set(range(self.cube.ndim)) - self._plot_dims
for dim in dims:
coord = self.cube.coords(dimensions=dim, dim_coords=True)
if coord:
mapping[coord[0].name()] = dim
else:
# Fill with an appropriate dimension coordinate from the
# auxiliary coordinates.
coords = self.cube.coords(dimensions=dim, dim_coords=False)
coords = [coord for coord in coords
if isinstance(coord, DimCoord)]
if coords:
func = lambda coord: coord._as_defn()
coords.sort(key=func)
mapping[coords[0].name()] = dim
return mapping
@staticmethod
def _invert_mapping(mapping):
"""
Reverse the dictionary mapping from (key, value) to (value, key).
Returns the inverted dictionary.
"""
keys = set(mapping.keys())
values = set(mapping.values())
if len(keys) != len(values):
emsg = 'Cannot invert non 1-to-1 mapping, got {!r}.'
raise ValueError(emsg.format(mapping))
result = dict([(k_v[1], k_v[0]) for k_v in list(mapping.items())])
return result
@property
def sliders_axis(self):
"""
Returns a list containing either an :class:`~cube_browser._AxisAlias`
or :class:`~cube_browser._AxisDefn` for each cube slider dimension.
"""
shape = self.cube.shape
dims = set(range(self.cube.ndim)) - self._plot_dims
slider_name_by_dim = self._invert_mapping(self._slider_dim_by_name)
alias_by_dim = self._invert_mapping(self._dim_by_alias)
result = []
for dim in dims:
name = alias_by_dim.get(dim)
if name is not None:
axis = _AxisAlias(dim=dim, name=name, size=shape[dim])
else:
name = slider_name_by_dim.get(dim)
if name is None:
emsg = '{!r} cube {!r} has no meta-data for dimension {}.'
raise ValueError(emsg.format(type(self).__name__,
self.cube.name(), dim))
# Prepare the coordinate for lenient equality.
coord = self.cube.coord(name).copy()
coord.bounds = None
coord.var_name = None
coord.attributes = {}
axis = _AxisDefn(dim=dim, name=name,
size=coord.points.size, coord=coord)
result.append(axis)
return result
# XXX: Issue #24
def __call__(self, **kwargs):
"""
Renders the plot for the given named slider values.
Kwargs:
The slider name and associated dimension index value.
E.g. ::
plot(time=5, model_level_number=23)
The plot cube will be sliced on the associated 'time' and
'model_level_number' dimensions at the specified index values
before being rendered on its axes.
"""
index = [slice(None)] * self.cube.ndim
alias_by_dim = self._invert_mapping(self._dim_by_alias)
for name, value in kwargs.items():
# The alias has priority, so check this first.
dim = self._dim_by_alias.get(name)
if dim is None:
dim = self._slider_dim_by_name.get(name)
if dim is None:
emsg = '{!r} called with unknown name {!r}.'
raise ValueError(emsg.format(type(self).__name__, name))
else:
if dim in alias_by_dim:
wmsg = ('{!r} expected to be called with alias {!r} '
'for dimension {}, rather than with {!r}.')
warnings.warn(wmsg.format(type(self).__name__,
alias_by_dim[dim], dim,
name))
index[dim] = value
index = tuple(index)
key = tuple(sorted(kwargs.items()))
# A primative weak reference cache.
self.subcube = self.cache.setdefault(key, self.cube[index])
return self.draw(self.subcube)
def draw(self, cube):
"""Abstract method."""
emsg = '{!r} requires a draw method for rendering.'
raise NotImplementedError(emsg.format(type(self).__name__))
def legend(self, mappable):
fig = plt.gcf()
posn = self.axes.get_position()
extent = self.axes.get_extent()
aspect = (extent[1] - extent[0]) / (extent[3] - extent[2])
self.cb_depth = 0.02
self.cb_sep = 0.01
if aspect < 1.2:
self.cbar_ax = fig.add_axes([posn.x1 + self.cb_sep, posn.y0,
self.cb_depth, posn.height])
plt.colorbar(mappable, ax=self.axes, orientation='vertical',
cax=self.cbar_ax)
else:
self.cbar_ax = fig.add_axes([posn.x0, posn.y0 - 6*self.cb_sep,
posn.width, 2*self.cb_depth])
plt.colorbar(mappable, ax=self.axes, orientation='horizontal',
cax=self.cbar_ax)
fig.canvas.mpl_connect('resize_event', self.resize_colourbar)
self.resize_colourbar(None)
def resize_colourbar(self, event):
# Ensure axes position is up to date.
self.axes.apply_aspect()
posn = self.axes.get_position()
extent = self.axes.get_extent()
aspect = (extent[1] - extent[0]) / (extent[3] - extent[2])
if aspect < 1.2:
self.cbar_ax.set_position([posn.x1 + self.cb_sep, posn.y0,
self.cb_depth, posn.height])
else:
self.cbar_ax.set_position([posn.x0, posn.y0 - 6*self.cb_sep,
posn.width, 2*self.cb_depth])
plt.draw()
class Contourf(Plot2D):
"""
Constructs a filled contour plot instance of a cube.
An :func:`iris.plot.contourf` instance is created using coordinates
specified in the input arguments as axes coordinates.
See :func:`matplotlib.pyplot.contourf` and :func:`iris.plot.contourf`
for details of other valid keyword arguments
"""
def draw(self, cube):
self.element = iplt.contourf(cube, axes=self.axes, coords=self.coords,
extend='both', **self.kwargs)
if 'levels' not in self.kwargs:
self.kwargs['levels'] = self.element.levels
return self.element
# XXX: Not sure this should live here!
# Need test coverage!
def clear(self):
if self.element is not None:
for collection in self.element.collections:
collection.remove()
class Contour(Plot2D):
"""
Constructs a line contour plot instance of a cube.
An :func:`iris.plot.contour` instance is created using coordinates
specified in the input arguments as axes coordinates.
See :func:`matplotlib.pyplot.contour` and :func:`iris.plot.contour`
for details of other valid keyword arguments.
"""
def draw(self, cube):
self.element = iplt.contour(cube, axes=self.axes, coords=self.coords,
extend='both', **self.kwargs)
if 'levels' not in self.kwargs:
self.kwargs['levels'] = self.element.levels
return self.element
def clear(self):
if self.element is not None:
for collection in self.element.collections:
collection.remove()
class Pcolormesh(Plot2D):
"""
Constructs a pseduocolour plot instance of a cube on a quadrilateral mesh.
An :func:`iris.plot.pcolormesh` instance is created using coordinates
specified in the input arguments as axes coordinates.
See :func:`matplotlib.pyplot.pcolormesh` and :func:`iris.plot.pcolormesh`
for details of other valid keyword arguments.
"""
def draw(self, cube):
for name in self.coords:
if not isinstance(name, int):
coord = cube.coord(name)
if not coord.has_bounds():
coord.guess_bounds()
self.element = iplt.pcolormesh(cube, axes=self.axes,
coords=self.coords, **self.kwargs)
if 'clim' not in self.kwargs:
self.kwargs['clim'] = self.element.get_clim()
return self.element
def clear(self):
if self.element is not None:
self.element.remove()
class Browser(object):
"""
Compiler for cube_browser plots and associated sliders.
Compiles a single cube_browser plot instance or list of instances into a
vertical arrangement of axes with shared coordinate sliders, to be
displayed in a Jupyter notebook.
"""
def __init__(self, plots):
"""
Compiles non-axis coordinates into sliders, the values from which are
used to reconstruct plots upon movement of slider.
Args:
* plot
cube_browser plot instance to display with slider.
"""
if not isinstance(plots, Iterable):
plots = [plots]
self.plots = plots
# Mapping of coordinate/alias name to axis.
self._axis_by_name = {}
# Mapping of cube-id to shared cache.
self._cache_by_cube_id = {}
# Mapping of plot-id to coordinate/alias name.
self._names_by_plot_id = {}
# Mapping of coordinate/alias name to plots.
self._plots_by_name = {}
self._build_mappings()
self._slider_by_name = {}
self._name_by_slider_id = {}
if self._axis_by_name:
name_len = max([len(name) for name in self._axis_by_name])
children = []
for axis in self._axis_by_name.values():
if hasattr(axis, 'coord') and axis.coord.units.is_time_reference():
pairs = [(axis.coord.units.num2date(axis.coord.points[i]), i)
for i in range(axis.size)]
elif hasattr(axis, 'coord'):
pairs = [(axis.coord.points[i], i) for i in range(axis.size)]
else:
pairs = [(i, i) for i in range(axis.size)]
options = OrderedDict(pairs)
slider = ipywidgets.SelectionSlider(options=options)
slider.observe(self.on_change, names='value')
self._slider_by_name[axis.name] = slider
self._name_by_slider_id[id(slider)] = axis.name
# Explicitly control the slider label in order to avoid
# fix width widget description label wrapping issues.
# XXX: Adjust px/em scale-factor dynamically based on font-size.
scale_factor = .65
width = u'{}em'.format(int(name_len * scale_factor))
label = ipywidgets.Label(axis.name, padding=u'0.3em', width=width)
hbox = ipywidgets.HBox(children=[label, slider])
children.append(hbox)
# Layout the sliders in a consitent order.
self.form = ipywidgets.VBox()
key = lambda hbox: hbox.children[0].value
self.form.children = sorted(children, key=key)
def display(self):
# XXX: Ideally, we might want to register an IPython display hook.
self.on_change(None)
IPython.display.display(self.form)
def _build_mappings(self):
"""
Create the cross-reference dictionaries required to manage the
orchestration of the registered plots.
In summary,
* _axis_by_name
The mapping with the meta-data required to define each
slider dimension.
* _cache_by_cube_id
The mapping used to share the weak reference cache between
plots that reference the same cube.
* _names_by_plot_id
The mapping that specifies the exact slider dimensions
required by each plot.
* _plots_by_name
The mapping that specifies all the plots to be updated when
a specific slider state changes.
"""
for plot in self.plots:
names = []
for axis in plot.sliders_axis:
if isinstance(axis, _AxisAlias):
if axis.name is None:
emsg = ('{!r} cube {!r} has no meta-data for '
'dimension {}.')
raise ValueError(emsg.format(type(plot).__name__,
plot.cube.name(),
axis.dim))
existing = self._axis_by_name.get(axis.name)
if existing is None:
self._axis_by_name[axis.name] = axis
elif existing != axis:
emsg = ('{!r} cube {!r} has an incompatible axis {!r} '
'on dimension {}.')
raise ValueError(emsg.format(type(plot).__name__,
plot.cube.name(),
axis.name, axis.dim))
plots = self._plots_by_name.setdefault(axis.name, [])
plots.append(plot)
names.append(axis.name)
if names:
# Only make an entry if the plot has at least one axis
# to slider over.
self._names_by_plot_id[id(plot)] = names
cube_id = id(plot.cube)
cache = self._cache_by_cube_id.get(cube_id)
if cache is None:
self._cache_by_cube_id[cube_id] = plot.cache
else:
plot.cache = cache
def on_change(self, change):
"""
Common slider widget traitlet event handler that refreshes
all appropriate plots given a slider state change.
"""
def _update(plots, force=False, legend=False):
for plot in plots:
plot.clear()
for plot in plots:
names = self._names_by_plot_id.get(id(plot))
# Check whether we need to force an invariant plot
# to render itself.
if force and names is None:
names = []
if names is not None:
kwargs = {name: slider_by_name[name].value
for name in names}
mappable = plot(**kwargs)
if legend:
plot.legend(mappable)
slider_by_name = self._slider_by_name
if change is None:
# Initial render of all the plots.
_update(self.plots, force=True, legend=True)
else:
# A widget slider state has changed, so only refresh
# the appropriate plots.
slider_id = id(change['owner'])
name = self._name_by_slider_id[slider_id]
_update(self._plots_by_name[name])
| {
"repo_name": "SciTools/cube_browser",
"path": "lib/cube_browser/__init__.py",
"copies": "1",
"size": "26278",
"license": "bsd-3-clause",
"hash": 5255510506348143000,
"line_mean": 37.084057971,
"line_max": 79,
"alpha_frac": 0.5387396301,
"autogenerated": false,
"ratio": 4.28958537381652,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.532832500391652,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from six.moves import map
import numpy as np
import matplotlib as mpl
import utool as ut
#(print, print_, printDBG, rrr, profile) = utool.inject(__name__, '[custom_constants]', DEBUG=False)
ut.noinject(__name__, '[custom_constants]')
# GENERAL FONTS
SMALLEST = 6
SMALLER = 8
SMALL = 10
MED = 12
LARGE = 14
LARGER = 18
#fpargs = dict(family=None, style=None, variant=None, stretch=None, fname=None)
def FontProp(*args, **kwargs):
r""" overwrite fontproperties with custom settings
Kwargs:
fname=u'',
name=u'',
style=u'normal',
variant=u'normal',
weight=u'normal',
stretch=u'normal',
size=u'medium'
"""
kwargs['family'] = 'monospace'
font_prop = mpl.font_manager.FontProperties(*args, **kwargs)
return font_prop
FONTS = ut.DynStruct()
FONTS.smallest = FontProp(weight='light', size=SMALLEST)
FONTS.small = FontProp(weight='light', size=SMALL)
FONTS.smaller = FontProp(weight='light', size=SMALLER)
FONTS.med = FontProp(weight='light', size=MED)
FONTS.large = FontProp(weight='light', size=LARGE)
FONTS.medbold = FontProp(weight='bold', size=MED)
FONTS.largebold = FontProp(weight='bold', size=LARGE)
# SPECIFIC FONTS
if False:
# personal
FONTS.legend = FONTS.small
FONTS.figtitle = FONTS.med
FONTS.axtitle = FONTS.small
FONTS.subtitle = FONTS.med
#FONTS.xlabel = FONTS.smaller
FONTS.xlabel = FONTS.small
FONTS.ylabel = FONTS.small
FONTS.relative = FONTS.smallest
else:
# personal
FONTS.legend = FONTS.med
FONTS.figtitle = FONTS.large
FONTS.axtitle = FONTS.med
FONTS.subtitle = FONTS.med
#FONTS.xlabel = FONTS.smaller
FONTS.xlabel = FONTS.med
FONTS.ylabel = FONTS.med
FONTS.relative = FONTS.med
# COLORS
RED = np.array((255, 0, 0, 255)) / 255.0
YELLOW = np.array((255, 255, 0, 255)) / 255.0
GREEN = np.array(( 0, 255, 0, 255)) / 255.0
CYAN = np.array(( 0, 255, 255, 255)) / 255.0
BLUE = np.array(( 0, 0, 255, 255)) / 255.0
MAGENTA = np.array((255, 0, 255, 255)) / 255.0
ORANGE = np.array((255, 127, 0, 255)) / 255.0
BLACK = np.array(( 0, 0, 0, 255)) / 255.0
WHITE = np.array((255, 255, 255, 255)) / 255.0
GRAY = np.array((127, 127, 127, 255)) / 255.0
LIGHTGRAY = np.array((220, 220, 220, 255)) / 255.0
DEEP_PINK = np.array((255, 20, 147, 255)) / 255.0
PINK = np.array((255, 100, 100, 255)) / 255.0
LIGHT_PINK = np.array((255, 200, 200, 255)) / 255.0
FALSE_RED = np.array((255, 51, 0, 255)) / 255.0
TRUE_GREEN = np.array(( 0, 255, 0, 255)) / 255.0
# TRUE_BLUE = np.array(( 0, 255, 255, 255)) / 255.0
TRUE_BLUE = np.array(( 0, 115, 207, 255)) / 255.0
DARK_GREEN = np.array(( 0, 127, 0, 255)) / 255.0
DARK_BLUE = np.array(( 0, 0, 127, 255)) / 255.0
DARK_RED = np.array((127, 0, 0, 255)) / 255.0
DARK_ORANGE = np.array((127, 63, 0, 255)) / 255.0
DARK_YELLOW = np.array((127, 127, 0, 255)) / 255.0
PURPLE = np.array((102, 0, 153, 255)) / 255.0
BRIGHT_PURPLE = np.array((255, 0, 255, 255)) / 255.0
LIGHT_PURPLE = np.array((255, 102, 255, 255)) / 255.0
BRIGHT_GREEN = np.array(( 39, 255, 20, 255)) / 255.0
PURPLE2 = np.array((150, 51, 200, 255)) / 255.0
LIGHT_BLUE = np.array((102, 100, 255, 255)) / 255.0
LIGHT_GREEN = np.array((102, 255, 102, 255)) / 255.0
NEUTRAL = np.array((225, 229, 231, 255)) / 255.0
NEUTRAL_BLUE = np.array((159, 159, 241, 255)) / 255.0
UNKNOWN_PURP = PURPLE
# GOLDEN RATIOS
PHI_numer = 1 + np.sqrt(5)
PHI_denom = 2.0
PHI = PHI_numer / PHI_denom
def golden_wh2(sz):
return (PHI * sz, sz)
def golden_wh(x):
'returns a width / height with a golden aspect ratio'
return list(map(int, list(map(round, (x * .618, x * .312)))))
# FIGURE GEOMETRY
DPI = 96
#FIGSIZE = (24) # default windows fullscreen
FIGSIZE_MED = (12, 6)
FIGSIZE_SQUARE = (12, 12)
FIGSIZE_GOLD = golden_wh2(8)
FIGSIZE_BIGGER = (24, 12)
FIGSIZE_HUGE = (32, 16)
#FIGSIZE = FIGSIZE_MED
# Quality drawings
FIGSIZE = FIGSIZE_GOLD
| {
"repo_name": "Erotemic/plottool",
"path": "plottool_ibeis/custom_constants.py",
"copies": "1",
"size": "4213",
"license": "apache-2.0",
"hash": 1866353009740001500,
"line_mean": 31.1603053435,
"line_max": 100,
"alpha_frac": 0.5998101116,
"autogenerated": false,
"ratio": 2.561094224924012,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3660904336524012,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from six.moves import range
#import matplotlib.image as mpimg
from plottool_ibeis import viz_image2
from plottool_ibeis import interact_annotations
from plottool_ibeis import draw_func2 as df2
from plottool_ibeis import plot_helpers as ph
from plottool_ibeis import interact_helpers as ih
from plottool_ibeis import abstract_interaction
from matplotlib.widgets import Button # NOQA
import matplotlib.pyplot as plt # NOQA
import matplotlib as mpl # NOQA
import six
try:
import vtool_ibeis as vt
except ImportError:
pass
#import utool
import utool as ut
ut.noinject(__name__, '[pt.interact_multiimage]')
BASE_CLASS = abstract_interaction.AbstractInteraction
#BASE_CLASS = object
@ut.reloadable_class
class MultiImageInteraction(BASE_CLASS):
"""
CommandLine:
python -m plottool_ibeis.interact_multi_image --exec-MultiImageInteraction --show
Example:
>>> # ENABLE_DOCTEST
>>> from plottool_ibeis.interact_multi_image import * # NOQA
>>> import utool as ut
>>> TEST_IMAGES_URL = 'https://cthulhu.dyn.wildme.io/public/data/testdata.zip'
>>> test_image_dir = ut.grab_zipped_url(TEST_IMAGES_URL, appname='utool')
>>> # test image paths
>>> imgpaths = ut.list_images(test_image_dir, fullpath=True, recursive=False)
>>> bboxes_list = [[]] * len(imgpaths)
>>> #bboxes_list[0] = [(-200, -100, 400, 400)]
>>> bboxes_list[0] = [(20, 10, 400, 400)]
>>> iteract_obj = MultiImageInteraction(imgpaths, nPerPage=4,
>>> bboxes_list=bboxes_list)
>>> import plottool_ibeis as pt
>>> pt.show_if_requested()
"""
def __init__(self, gpath_list, nPerPage=4, bboxes_list=None,
thetas_list=None, verts_list=None, gid_list=None, nImgs=None,
fnum=None, context_option_funcs=None, xlabel_list=None,
vizkw=None, **kwargs):
# TODO: overlay function or draw function using a metadata object
print('Creating multi-image interaction')
#def __init__(self, img_list, nImgs=None, gid_list=None, aids_list=None,
#bboxes_list=None, nPerPage=10,fnum=None):
print('[pt] maX ', nPerPage)
self.context_option_funcs = context_option_funcs
if nImgs is None:
nImgs = len(gpath_list)
if bboxes_list is None:
bboxes_list = [[]] * nImgs
if thetas_list is None:
thetas_list = [[0] * len(bboxes) for bboxes in bboxes_list]
# How many images we are showing and per page
self.thetas_list = thetas_list
self.bboxes_list = bboxes_list
self.xlabel_list = xlabel_list
if gid_list is None:
self.gid_list = None
else:
self.gid_list = gid_list
self.vizkw = vizkw
self.nImgs = nImgs
self.nPerPage = min(nPerPage, nImgs)
self.current_index = 0
self.page_number = -1
# Initialize iterator over the image paths
self.gpath_list = gpath_list
# Display the first page
self.first_load = True
self.scope = []
self.current_pagenum = 0
self.nPages = vt.iceil(self.nImgs / nPerPage)
#self.show_page()
super(MultiImageInteraction, self).__init__(fnum=fnum, **kwargs)
#self.start()
def dump_to_disk(self, dpath, num=None, prefix='temp_img'):
import numpy as np
import plottool_ibeis as pt
dpath = ut.ensurepath(dpath)
num_zeros = np.ceil(np.log10(len(self.gpath_list)))
total = len(self.gpath_list)
if num is None:
num = total
fmtstr = prefix + '_%0' + str(num_zeros) + 'd.jpg'
fig = pt.figure(fnum=self.fnum)
for index in ut.ProgIter(range(num), lbl='dumping images to disk'):
fig = pt.figure(fnum=self.fnum)
fig.clf()
ax = self._plot_index(index, {'fnum': self.fnum})
fig = ax.figure
axes_extents = pt.extract_axes_extents(fig)
assert len(axes_extents) == 1, 'more than one axes'
extent = axes_extents[0]
fpath = ut.unixjoin(dpath, fmtstr % (index))
fig.savefig(fpath, bbox_inches=extent)
pt.plt.close(fig)
def make_hud(self):
""" Creates heads up display """
# Button positioning
hl_slot, hr_slot = df2.make_bbox_positioners(y=.02, w=.08, h=.04,
xpad=.05, startx=0,
stopx=1)
prev_rect = hl_slot(0)
next_rect = hr_slot(0)
# Create buttons
if self.current_pagenum != 0:
self.append_button('prev', callback=self.prev_page, rect=prev_rect)
if self.current_pagenum != self.nPages - 1:
self.append_button('next', callback=self.next_page, rect=next_rect)
def next_page(self, event):
self.show_page(self.current_pagenum + 1)
def prev_page(self, event):
self.show_page(self.current_pagenum - 1)
def prepare_page(self, pagenum):
""" Gets indexes for the pagenum ready to be displayed """
# Set the start index
self.start_index = pagenum * self.nPerPage
# Clip based on nImgs
self.nDisplay = min(self.nImgs - self.start_index, self.nPerPage)
nRows, nCols = ph.get_square_row_cols(self.nDisplay)
# Create a grid to hold nPerPage
self.pnum_ = df2.get_pnum_func(nRows, nCols)
# Adjust stop index
self.stop_index = self.start_index + self.nDisplay
# Clear current figure
self.clean_scope()
self.fig = df2.figure(fnum=self.fnum, pnum=self.pnum_(0), doclf=True, docla=False)
# For some reason clf isn't working correctly in figure
self.fig.clf()
ih.disconnect_callback(self.fig, 'button_press_event')
ih.connect_callback(self.fig, 'button_press_event',
self.on_click)
def show_page(self, pagenum=None):
""" Displays a page of matches """
if pagenum is None:
pagenum = self.current_pagenum
#print('[iqr2] show page: %r' % pagenum)
self.current_pagenum = pagenum
self.prepare_page(pagenum)
# Begin showing matches
index = self.start_index
start_index = self.start_index
stop_index = self.stop_index
for px, index in enumerate(range(start_index, stop_index)):
self.plot_image(index)
self.make_hud()
self.draw()
def _plot_index(self, index, _vizkw):
gpath = self.gpath_list[index]
if ut.is_funclike(gpath):
showfunc = gpath
# HACK
# override of plot image function
showfunc(**_vizkw)
import plottool_ibeis as pt
ax = pt.gca()
else:
if isinstance(gpath, six.string_types):
img = vt.imread(gpath)
else:
img = gpath
bbox_list = self.bboxes_list[index]
#print('bbox_list %r in display for px: %r ' % (bbox_list, px))
theta_list = self.thetas_list[index]
label_list = [ix + 1 for ix in range(len(bbox_list))]
#Add true values for every bbox to display
sel_list = [True for ix in range(len(bbox_list))]
_vizkw.update({
#title should always be the image number
'title': str(index),
'bbox_list' : bbox_list,
'theta_list' : theta_list,
'sel_list' : sel_list,
'label_list' : label_list,
})
#print(utool.repr2(_vizkw))
#print('vizkw = ' + utool.repr2(_vizkw))
_, ax = viz_image2.show_image(img, **_vizkw)
if self.xlabel_list is not None:
import plottool_ibeis as pt
pt.set_xlabel(self.xlabel_list[index])
#print(index)
ph.set_plotdat(ax, 'bbox_list', bbox_list)
ph.set_plotdat(ax, 'gpath', gpath)
return ax
def plot_image(self, index):
px = index - self.start_index
if self.vizkw is None:
_vizkw = {}
else:
_vizkw = self.vizkw.copy()
_vizkw.update({
'fnum': self.fnum,
'pnum': self.pnum_(px),
})
ax = self._plot_index(index, _vizkw)
ph.set_plotdat(ax, 'px', str(px))
ph.set_plotdat(ax, 'index', index)
def update_images(self, img_ind, updated_bbox_list, updated_theta_list,
changed_annottups, new_annottups):
"""Insert code for viz_image2 redrawing here"""
#print('update called')
index = int(img_ind)
#print('index: %r' % index)
#print('Images bbox before: %r' % (self.bboxes_list[index],))
self.bboxes_list[index] = updated_bbox_list
self.thetas_list[index] = updated_theta_list
#print('Images bbox after: %r' % (self.bboxes_list[index],))
self.plot_image(index)
self.draw()
def on_click_inside(self, event, ax):
index = ph.get_plotdat(ax, 'index')
print('index = %r' % (index,))
if index is not None:
if self.MOUSE_BUTTONS[event.button] == 'right':
if self.context_option_funcs is not None:
#if event.button == 3:
options = self.context_option_funcs[index]()
self.show_popup_menu(options, event)
elif self.MOUSE_BUTTONS[event.button] == 'left':
#bbox_list = ph.get_plotdat(ax, 'bbox_list')
gpath = self.gpath_list[index]
if ut.is_funclike(gpath):
print('gpath_isfunklike')
print('gpath = %r' % (gpath,))
import plottool_ibeis as pt
fnum = pt.next_fnum()
gpath(fnum=fnum)
df2.update()
else:
bbox_list = self.bboxes_list[index]
print('Bbox of figure: %r' % (bbox_list,))
theta_list = self.thetas_list[index]
print('theta_list = %r' % (theta_list,))
#img = mpimg.imread(gpath)
if isinstance(gpath, six.string_types):
img = vt.imread(gpath)
else:
img = gpath
fnum = df2.next_fnum()
mc = interact_annotations.AnnotationInteraction(
img, index, self.update_images, bbox_list=bbox_list,
theta_list=theta_list, fnum=fnum)
mc.start()
self.mc = mc
# """wait for accept
# have a flag to tell if a bbox has been changed, on the bbox
# list that is brought it" on accept: viz_image2.show_image
# callback
# """
df2.update()
print('Clicked: ax: num=%r' % index)
def on_key_press(self, event):
if event.key == 'n':
self.display_next_page()
if event.key == 'p':
self.display_prev_page()
#def clean_scope(self):
# """ Removes any widgets saved in the interaction scope """
# #for (but, ax) in self.scope:
# # but.disconnect_events()
# # ax.set_visible(False)
# # assert len(ax.callbacks.callbacks) == 0
# self.scope = []
#def draw(self):
# self.fig.canvas.draw()
#def append_button(self, text, divider=None, rect=None, callback=None,
# **kwargs):
# """ Adds a button to the current page """
# if divider is not None:
# new_ax = divider.append_axes('bottom', size='9%', pad=.05)
# if rect is not None:
# new_ax = df2.plt.axes(rect)
# new_but = mpl.widgets.Button(new_ax, text)
# if callback is not None:
# new_but.on_clicked(callback)
# ph.set_plotdat(new_ax, 'viztype', 'button')
# ph.set_plotdat(new_ax, 'text', text)
# for key, val in six.iteritems(kwargs):
# ph.set_plotdat(new_ax, key, val)
# # Keep buttons from losing scrop
# self.scope.append((new_but, new_ax))
#def display_buttons(self):
# # Create the button for scrolling forwards
# self.next_ax = plt.axes([0.75, 0.025, 0.15, 0.075])
# self.next_but = Button(self.next_ax, 'next')
# self.next_but.on_clicked(self.display_next_page)
# # Create the button for scrolling backwards
# self.prev_ax = plt.axes([0.1, .025, 0.15, 0.075])
# self.prev_but = Button(self.prev_ax, 'prev')
# self.prev_but.on_clicked(self.display_prev_page)
# # Connect the callback whenever the figure is clicked
if __name__ == '__main__':
"""
CommandLine:
python -m plottool_ibeis.interact_multi_image
python -m plottool_ibeis.interact_multi_image --allexamples
python -m plottool_ibeis.interact_multi_image --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
| {
"repo_name": "Erotemic/plottool",
"path": "plottool_ibeis/interact_multi_image.py",
"copies": "1",
"size": "13467",
"license": "apache-2.0",
"hash": 4517405536232981500,
"line_mean": 38.0347826087,
"line_max": 91,
"alpha_frac": 0.5513477389,
"autogenerated": false,
"ratio": 3.517106294071559,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9555736991052746,
"avg_score": 0.0025434083837627356,
"num_lines": 345
} |
from __future__ import absolute_import, division, print_function
from six.moves import range
import vtool_ibeis.keypoint as ktool
import vtool_ibeis.linalg as ltool
import numpy as np
import utool as ut
from vtool_ibeis import demodata
from vtool_ibeis.util_math import TAU
def test_get_invR_mats_orientation():
theta1 = TAU / 8
theta2 = -TAU / 8
theta3 = 0
theta4 = 7 * TAU / 8
invV_mats = demodata.get_dummy_invV_mats()
def R_mats(theta):
return np.array([ltool.rotation_mat2x2(theta) for _ in range(len(invV_mats))])
def test_rots(theta):
invVR_mats = ltool.matrix_multiply(invV_mats, R_mats(theta))
_oris = ktool.get_invVR_mats_oris(invVR_mats)
print('________')
print('theta = %r' % (theta % TAU,))
print('b / a = %r' % (_oris,))
passed, error = ut.almost_eq(_oris, theta % TAU, ret_error=True)
try:
assert np.all(passed)
except AssertionError as ex:
ut.printex(ex, 'rotation unequal', key_list=['passed',
'error'])
test_rots(theta1)
test_rots(theta2)
test_rots(theta3)
test_rots(theta4)
if __name__ == '__main__':
test_get_invR_mats_orientation()
| {
"repo_name": "Erotemic/vtool",
"path": "tests/test_vtool.py",
"copies": "1",
"size": "1265",
"license": "apache-2.0",
"hash": -3248190640809391600,
"line_mean": 29.119047619,
"line_max": 86,
"alpha_frac": 0.5928853755,
"autogenerated": false,
"ratio": 3.0335731414868103,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9123389473277372,
"avg_score": 0.0006138087418875596,
"num_lines": 42
} |
from __future__ import absolute_import, division, print_function
from six.moves import range
import os
import sys
import pandas
from argparse import ArgumentParser
from numpy import cbrt, floor, sqrt
from mpi4py import MPI
from firedrake.petsc import PETSc
from firedrake import COMM_WORLD, ExtrudedMesh, UnitCubeMesh, UnitSquareMesh, assemble
import form
parser = ArgumentParser(description="""Profile assembly""", add_help=False)
parser.add_argument("--mode", action="store", default="spectral",
help="Which TSFC mode to use?")
parser.add_argument("--help", action="store_true",
help="Show help")
args, _ = parser.parse_known_args()
if args.help:
help = parser.format_help()
PETSc.Sys.Print("%s\n" % help)
sys.exit(0)
assembly_path = os.path.abspath("assembly.csv")
matvec_path = os.path.abspath("matvec.csv")
num_matvecs = 40
PETSc.Log.begin()
parloop_event = PETSc.Log.Event("ParLoopExecute")
assemble_event = PETSc.Log.Event("AssembleMat")
matmult_event = PETSc.Log.Event("MatMult")
simplex_range = list(range(1, 8))
cube_range = list(range(1, 13))
test_cases = [
(form.stokes_momentum, False, 0.5),
# (form.elasticity, False, 0.1),
(form.poisson, True, 1),
# (form.mass_gll, True, 2),
(form.poisson_gll, True, 1),
(form.hyperelasticity, True, 0.1),
(form.curl_curl, True, 0.1),
]
def run(problem, tensor, size_factor, degree):
formname = problem.__name__
cellname = 'cube' if tensor else 'simplex'
PETSc.Sys.Print("%s: %s, degree=%d" % (formname, cellname, degree))
num_cells = COMM_WORLD.size * max(1, 1e8 * size_factor / (degree + 1)**7)
h = int(floor(cbrt(num_cells / COMM_WORLD.size)))
w = int(floor(sqrt(num_cells / h)))
d = int(round(num_cells / (w * h)))
num_cells = w * d * h
if tensor:
mesh = ExtrudedMesh(UnitSquareMesh(w, d, quadrilateral=True), h)
else:
mesh = UnitCubeMesh(w, d, h)
comm = mesh.comm
J = problem(mesh, int(degree))
# Warmup and allocate
A = assemble(J, form_compiler_parameters={'mode': args.mode})
A.force_evaluation()
Ap = A.petscmat
x, y = Ap.createVecs()
assert x.size == y.size
num_dofs = x.size
Ap.mult(x, y)
stage = PETSc.Log.Stage("%s(%d) %s" % (formname, degree, cellname))
with stage:
with assemble_event:
assemble(J, form_compiler_parameters={'mode': args.mode}, tensor=A)
A.force_evaluation()
Ap = A.petscmat
for _ in range(num_matvecs):
Ap.mult(x, y)
parloop = parloop_event.getPerfInfo()
assembly = assemble_event.getPerfInfo()
matmult = matmult_event.getPerfInfo()
assert parloop["count"] == 1
assert assembly["count"] == 1
assert matmult["count"] == num_matvecs
parloop_time = comm.allreduce(parloop["time"], op=MPI.SUM) / comm.size
assemble_time = comm.allreduce(assembly["time"], op=MPI.SUM) / comm.size
matmult_time = comm.allreduce(matmult["time"], op=MPI.SUM) / (comm.size * num_matvecs)
assembly_overhead = (1 - parloop_time / assemble_time)
PETSc.Sys.Print("Assembly overhead: %.1f%%" % (assembly_overhead * 100,))
if COMM_WORLD.rank == 0:
header = not os.path.exists(assembly_path)
data = {"num_cells": num_cells,
"num_dofs": num_dofs,
"num_procs": comm.size,
"tsfc_mode": args.mode,
"problem": formname,
"cell_type": cellname,
"degree": degree,
"assemble_time": assemble_time,
"parloop_time": parloop_time}
df = pandas.DataFrame(data, index=[0])
df.to_csv(assembly_path, index=False, mode='a', header=header)
header = not os.path.exists(matvec_path)
data = {"num_cells": num_cells,
"num_dofs": num_dofs,
"num_procs": comm.size,
"tsfc_mode": args.mode,
"problem": formname,
"cell_type": cellname,
"degree": degree,
"matvec_time": matmult_time}
df = pandas.DataFrame(data, index=[0])
df.to_csv(matvec_path, index=False, mode='a', header=header)
for problem, tensor, size_factor in test_cases:
for degree in (cube_range if tensor else simplex_range):
run(problem, tensor, size_factor, degree)
| {
"repo_name": "miklos1/fayette",
"path": "measure-assembly.py",
"copies": "1",
"size": "4435",
"license": "mit",
"hash": -4356232541451189000,
"line_mean": 31.1376811594,
"line_max": 94,
"alpha_frac": 0.601578354,
"autogenerated": false,
"ratio": 3.2706489675516224,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4372227321551623,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from six.moves import zip, map
import utool
import plottool_ibeis.draw_func2 as df2
from plottool_ibeis import custom_constants
#(print, print_, printDBG, rrr, profile) = utool.inject(__name__, '[viz_img2]', DEBUG=False)
utool.noinject(__name__, '[viz_img2]')
def draw_chip_overlay(ax, bbox, theta, text, is_sel):
""" Draw an annotation around a chip in the image """
lbl_alpha = .75 if is_sel else .6
bbox_alpha = .95 if is_sel else .6
lbl_color = custom_constants.BLACK * lbl_alpha
bbox_color = (custom_constants.ORANGE if is_sel else custom_constants.DARK_ORANGE) * bbox_alpha
df2.draw_bbox(bbox, text, bbox_color, lbl_color, theta=theta, ax=ax)
def draw_image_overlay(ax, bbox_list=[], theta_list=None, text_list=None,
sel_list=None, draw_lbls=True):
if not draw_lbls:
text_list = [''] * len(bbox_list)
if theta_list is None:
theta_list = [0] * len(bbox_list)
if text_list is None:
text_list = list(map(str, range(len(bbox_list))))
if sel_list is None:
sel_list = [False] * len(bbox_list)
# Draw all bboxes on top on image
annotation_iter = zip(bbox_list, theta_list, text_list, sel_list)
for bbox, theta, text, is_sel in annotation_iter:
draw_chip_overlay(ax, bbox, theta, text, is_sel)
#@utool.indent_func
def show_image(img, bbox_list=[], title='', theta_list=None,
text_list=None, sel_list=None, draw_lbls=True,
fnum=None, annote=True, **kwargs):
""" Driver function to show images """
# Shows an image with annotations
if fnum is None:
fnum = df2.next_fnum()
fig, ax = df2.imshow(img, title=title, fnum=fnum, docla=True, **kwargs)
df2.remove_patches(ax)
if annote:
draw_image_overlay(ax, bbox_list, theta_list, text_list, sel_list,
draw_lbls)
return fig, ax
| {
"repo_name": "Erotemic/plottool",
"path": "plottool_ibeis/viz_image2.py",
"copies": "1",
"size": "1957",
"license": "apache-2.0",
"hash": -102065711924282270,
"line_mean": 39.7708333333,
"line_max": 99,
"alpha_frac": 0.6351558508,
"autogenerated": false,
"ratio": 3.126198083067093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9248189645198772,
"avg_score": 0.0026328577336641856,
"num_lines": 48
} |
from __future__ import absolute_import, division, print_function
from .store import Entry
import sys, warnings
if sys.version_info[0] < 3:
str = unicode
EXTENSIONS = ['.yaml', '.yml']
def make_entry(key, dict):
notes = ' | '.join(str(dict[key]) for key in ['L', 'N'] if key in dict)
return Entry(
key=key,
user=str(dict.get('U', '')),
password=str(dict.get('P', '')),
notes=notes)
def parse_entries(src):
warnings.warn(
"YAML support is deprecated and will be removed in the next version",
DeprecationWarning)
import yaml
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader as Loader
root_node = yaml.load(src, Loader=Loader)
return list(_collect_entries(root_node, ''))
def _collect_entries(current_node, current_key):
# list of accounts?
if isinstance(current_node, list):
for child_node in current_node:
assert isinstance(child_node, dict), "expected list of accounts"
yield make_entry(current_key, child_node)
return
# single acccount?
if isinstance(current_node, dict) and 'P' in current_node:
yield make_entry(current_key, current_node)
return
# single password?
if not isinstance(current_node, dict):
yield Entry(key=current_key,
user='',
password=str(current_node),
notes='')
return
# otherwise: subtree!
for key, child_node in current_node.items():
# ignore entries in parentheses
if key.startswith('('):
continue
# recurse
for entry in _collect_entries(child_node, current_key + '.' + key if
current_key else key):
yield entry
| {
"repo_name": "mbr/pw",
"path": "pw/_yaml.py",
"copies": "1",
"size": "1839",
"license": "mit",
"hash": 3080971542630483000,
"line_mean": 28.1904761905,
"line_max": 77,
"alpha_frac": 0.5889070147,
"autogenerated": false,
"ratio": 4.123318385650224,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5212225400350224,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
from textwrap import dedent
import _pytest._code
import py
import pytest
from _pytest.config import PytestPluginManager
from _pytest.main import EXIT_NOTESTSCOLLECTED, EXIT_USAGEERROR
@pytest.fixture(scope="module", params=["global", "inpackage"])
def basedir(request, tmpdir_factory):
from _pytest.tmpdir import tmpdir
tmpdir = tmpdir(request, tmpdir_factory)
tmpdir.ensure("adir/conftest.py").write("a=1 ; Directory = 3")
tmpdir.ensure("adir/b/conftest.py").write("b=2 ; a = 1.5")
if request.param == "inpackage":
tmpdir.ensure("adir/__init__.py")
tmpdir.ensure("adir/b/__init__.py")
return tmpdir
def ConftestWithSetinitial(path):
conftest = PytestPluginManager()
conftest_setinitial(conftest, [path])
return conftest
def conftest_setinitial(conftest, args, confcutdir=None):
class Namespace(object):
def __init__(self):
self.file_or_dir = args
self.confcutdir = str(confcutdir)
self.noconftest = False
conftest._set_initial_conftests(Namespace())
class TestConftestValueAccessGlobal(object):
def test_basic_init(self, basedir):
conftest = PytestPluginManager()
p = basedir.join("adir")
assert conftest._rget_with_confmod("a", p)[1] == 1
def test_immediate_initialiation_and_incremental_are_the_same(self, basedir):
conftest = PytestPluginManager()
len(conftest._path2confmods)
conftest._getconftestmodules(basedir)
snap1 = len(conftest._path2confmods)
# assert len(conftest._path2confmods) == snap1 + 1
conftest._getconftestmodules(basedir.join('adir'))
assert len(conftest._path2confmods) == snap1 + 1
conftest._getconftestmodules(basedir.join('b'))
assert len(conftest._path2confmods) == snap1 + 2
def test_value_access_not_existing(self, basedir):
conftest = ConftestWithSetinitial(basedir)
with pytest.raises(KeyError):
conftest._rget_with_confmod('a', basedir)
def test_value_access_by_path(self, basedir):
conftest = ConftestWithSetinitial(basedir)
adir = basedir.join("adir")
assert conftest._rget_with_confmod("a", adir)[1] == 1
assert conftest._rget_with_confmod("a", adir.join("b"))[1] == 1.5
def test_value_access_with_confmod(self, basedir):
startdir = basedir.join("adir", "b")
startdir.ensure("xx", dir=True)
conftest = ConftestWithSetinitial(startdir)
mod, value = conftest._rget_with_confmod("a", startdir)
assert value == 1.5
path = py.path.local(mod.__file__)
assert path.dirpath() == basedir.join("adir", "b")
assert path.purebasename.startswith("conftest")
def test_conftest_in_nonpkg_with_init(tmpdir):
tmpdir.ensure("adir-1.0/conftest.py").write("a=1 ; Directory = 3")
tmpdir.ensure("adir-1.0/b/conftest.py").write("b=2 ; a = 1.5")
tmpdir.ensure("adir-1.0/b/__init__.py")
tmpdir.ensure("adir-1.0/__init__.py")
ConftestWithSetinitial(tmpdir.join("adir-1.0", "b"))
def test_doubledash_considered(testdir):
conf = testdir.mkdir("--option")
conf.ensure("conftest.py")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [conf.basename, conf.basename])
values = conftest._getconftestmodules(conf)
assert len(values) == 1
def test_issue151_load_all_conftests(testdir):
names = "code proj src".split()
for name in names:
p = testdir.mkdir(name)
p.ensure("conftest.py")
conftest = PytestPluginManager()
conftest_setinitial(conftest, names)
d = list(conftest._conftestpath2mod.values())
assert len(d) == len(names)
def test_conftest_global_import(testdir):
testdir.makeconftest("x=3")
p = testdir.makepyfile("""
import py, pytest
from _pytest.config import PytestPluginManager
conf = PytestPluginManager()
mod = conf._importconftest(py.path.local("conftest.py"))
assert mod.x == 3
import conftest
assert conftest is mod, (conftest, mod)
subconf = py.path.local().ensure("sub", "conftest.py")
subconf.write("y=4")
mod2 = conf._importconftest(subconf)
assert mod != mod2
assert mod2.y == 4
import conftest
assert conftest is mod2, (conftest, mod)
""")
res = testdir.runpython(p)
assert res.ret == 0
def test_conftestcutdir(testdir):
conf = testdir.makeconftest("")
p = testdir.mkdir("x")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [testdir.tmpdir], confcutdir=p)
values = conftest._getconftestmodules(p)
assert len(values) == 0
values = conftest._getconftestmodules(conf.dirpath())
assert len(values) == 0
assert conf not in conftest._conftestpath2mod
# but we can still import a conftest directly
conftest._importconftest(conf)
values = conftest._getconftestmodules(conf.dirpath())
assert values[0].__file__.startswith(str(conf))
# and all sub paths get updated properly
values = conftest._getconftestmodules(p)
assert len(values) == 1
assert values[0].__file__.startswith(str(conf))
def test_conftestcutdir_inplace_considered(testdir):
conf = testdir.makeconftest("")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [conf.dirpath()], confcutdir=conf.dirpath())
values = conftest._getconftestmodules(conf.dirpath())
assert len(values) == 1
assert values[0].__file__.startswith(str(conf))
@pytest.mark.parametrize("name", 'test tests whatever .dotdir'.split())
def test_setinitial_conftest_subdirs(testdir, name):
sub = testdir.mkdir(name)
subconftest = sub.ensure("conftest.py")
conftest = PytestPluginManager()
conftest_setinitial(conftest, [sub.dirpath()], confcutdir=testdir.tmpdir)
if name not in ('whatever', '.dotdir'):
assert subconftest in conftest._conftestpath2mod
assert len(conftest._conftestpath2mod) == 1
else:
assert subconftest not in conftest._conftestpath2mod
assert len(conftest._conftestpath2mod) == 0
def test_conftest_confcutdir(testdir):
testdir.makeconftest("assert 0")
x = testdir.mkdir("x")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
result = testdir.runpytest("-h", "--confcutdir=%s" % x, x)
result.stdout.fnmatch_lines(["*--xyz*"])
assert 'warning: could not load initial' not in result.stdout.str()
def test_no_conftest(testdir):
testdir.makeconftest("assert 0")
result = testdir.runpytest("--noconftest")
assert result.ret == EXIT_NOTESTSCOLLECTED
result = testdir.runpytest()
assert result.ret == EXIT_USAGEERROR
def test_conftest_existing_resultlog(testdir):
x = testdir.mkdir("tests")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
testdir.makefile(ext=".log", result="") # Writes result.log
result = testdir.runpytest("-h", "--resultlog", "result.log")
result.stdout.fnmatch_lines(["*--xyz*"])
def test_conftest_existing_junitxml(testdir):
x = testdir.mkdir("tests")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true")
"""))
testdir.makefile(ext=".xml", junit="") # Writes junit.xml
result = testdir.runpytest("-h", "--junitxml", "junit.xml")
result.stdout.fnmatch_lines(["*--xyz*"])
def test_conftest_import_order(testdir, monkeypatch):
ct1 = testdir.makeconftest("")
sub = testdir.mkdir("sub")
ct2 = sub.join("conftest.py")
ct2.write("")
def impct(p):
return p
conftest = PytestPluginManager()
conftest._confcutdir = testdir.tmpdir
monkeypatch.setattr(conftest, '_importconftest', impct)
assert conftest._getconftestmodules(sub) == [ct1, ct2]
def test_fixture_dependency(testdir, monkeypatch):
ct1 = testdir.makeconftest("")
ct1 = testdir.makepyfile("__init__.py")
ct1.write("")
sub = testdir.mkdir("sub")
sub.join("__init__.py").write("")
sub.join("conftest.py").write(dedent("""
import pytest
@pytest.fixture
def not_needed():
assert False, "Should not be called!"
@pytest.fixture
def foo():
assert False, "Should not be called!"
@pytest.fixture
def bar(foo):
return 'bar'
"""))
subsub = sub.mkdir("subsub")
subsub.join("__init__.py").write("")
subsub.join("test_bar.py").write(dedent("""
import pytest
@pytest.fixture
def bar():
return 'sub bar'
def test_event_fixture(bar):
assert bar == 'sub bar'
"""))
result = testdir.runpytest("sub")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_conftest_found_with_double_dash(testdir):
sub = testdir.mkdir("sub")
sub.join("conftest.py").write(dedent("""
def pytest_addoption(parser):
parser.addoption("--hello-world", action="store_true")
"""))
p = sub.join("test_hello.py")
p.write("def test_hello(): pass")
result = testdir.runpytest(str(p) + "::test_hello", "-h")
result.stdout.fnmatch_lines("""
*--hello-world*
""")
class TestConftestVisibility(object):
def _setup_tree(self, testdir): # for issue616
# example mostly taken from:
# https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
runner = testdir.mkdir("empty")
package = testdir.mkdir("package")
package.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def fxtr():
return "from-package"
"""))
package.join("test_pkgroot.py").write(dedent("""\
def test_pkgroot(fxtr):
assert fxtr == "from-package"
"""))
swc = package.mkdir("swc")
swc.join("__init__.py").ensure()
swc.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def fxtr():
return "from-swc"
"""))
swc.join("test_with_conftest.py").write(dedent("""\
def test_with_conftest(fxtr):
assert fxtr == "from-swc"
"""))
snc = package.mkdir("snc")
snc.join("__init__.py").ensure()
snc.join("test_no_conftest.py").write(dedent("""\
def test_no_conftest(fxtr):
assert fxtr == "from-package" # No local conftest.py, so should
# use value from parent dir's
"""))
print("created directory structure:")
for x in testdir.tmpdir.visit():
print(" " + x.relto(testdir.tmpdir))
return {
"runner": runner,
"package": package,
"swc": swc,
"snc": snc}
# N.B.: "swc" stands for "subdir with conftest.py"
# "snc" stands for "subdir no [i.e. without] conftest.py"
@pytest.mark.parametrize("chdir,testarg,expect_ntests_passed", [
# Effective target: package/..
("runner", "..", 3),
("package", "..", 3),
("swc", "../..", 3),
("snc", "../..", 3),
# Effective target: package
("runner", "../package", 3),
("package", ".", 3),
("swc", "..", 3),
("snc", "..", 3),
# Effective target: package/swc
("runner", "../package/swc", 1),
("package", "./swc", 1),
("swc", ".", 1),
("snc", "../swc", 1),
# Effective target: package/snc
("runner", "../package/snc", 1),
("package", "./snc", 1),
("swc", "../snc", 1),
("snc", ".", 1),
])
@pytest.mark.issue616
def test_parsefactories_relative_node_ids(
self, testdir, chdir, testarg, expect_ntests_passed):
dirs = self._setup_tree(testdir)
print("pytest run in cwd: %s" % (
dirs[chdir].relto(testdir.tmpdir)))
print("pytestarg : %s" % (testarg))
print("expected pass : %s" % (expect_ntests_passed))
with dirs[chdir].as_cwd():
reprec = testdir.inline_run(testarg, "-q", "--traceconfig")
reprec.assertoutcome(passed=expect_ntests_passed)
@pytest.mark.parametrize('confcutdir,passed,error', [
('.', 2, 0),
('src', 1, 1),
(None, 1, 1),
])
def test_search_conftest_up_to_inifile(testdir, confcutdir, passed, error):
"""Test that conftest files are detected only up to a ini file, unless
an explicit --confcutdir option is given.
"""
root = testdir.tmpdir
src = root.join('src').ensure(dir=1)
src.join('pytest.ini').write('[pytest]')
src.join('conftest.py').write(_pytest._code.Source("""
import pytest
@pytest.fixture
def fix1(): pass
"""))
src.join('test_foo.py').write(_pytest._code.Source("""
def test_1(fix1):
pass
def test_2(out_of_reach):
pass
"""))
root.join('conftest.py').write(_pytest._code.Source("""
import pytest
@pytest.fixture
def out_of_reach(): pass
"""))
args = [str(src)]
if confcutdir:
args = ['--confcutdir=%s' % root.join(confcutdir)]
result = testdir.runpytest(*args)
match = ''
if passed:
match += '*%d passed*' % passed
if error:
match += '*%d error*' % error
result.stdout.fnmatch_lines(match)
def test_issue1073_conftest_special_objects(testdir):
testdir.makeconftest("""
class DontTouchMe(object):
def __getattr__(self, x):
raise Exception('cant touch me')
x = DontTouchMe()
""")
testdir.makepyfile("""
def test_some():
pass
""")
res = testdir.runpytest()
assert res.ret == 0
def test_conftest_exception_handling(testdir):
testdir.makeconftest('''
raise ValueError()
''')
testdir.makepyfile("""
def test_some():
pass
""")
res = testdir.runpytest()
assert res.ret == 4
assert 'raise ValueError()' in [line.strip() for line in res.errlines]
def test_hook_proxy(testdir):
"""Session's gethookproxy() would cache conftests incorrectly (#2016).
It was decided to remove the cache altogether.
"""
testdir.makepyfile(**{
'root/demo-0/test_foo1.py': "def test1(): pass",
'root/demo-a/test_foo2.py': "def test1(): pass",
'root/demo-a/conftest.py': """
def pytest_ignore_collect(path, config):
return True
""",
'root/demo-b/test_foo3.py': "def test1(): pass",
'root/demo-c/test_foo4.py': "def test1(): pass",
})
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*test_foo1.py*',
'*test_foo3.py*',
'*test_foo4.py*',
'*3 passed*',
])
def test_required_option_help(testdir):
testdir.makeconftest("assert 0")
x = testdir.mkdir("x")
x.join("conftest.py").write(_pytest._code.Source("""
def pytest_addoption(parser):
parser.addoption("--xyz", action="store_true", required=True)
"""))
result = testdir.runpytest("-h", x)
assert 'argument --xyz is required' not in result.stdout.str()
assert 'general:' in result.stdout.str()
| {
"repo_name": "tareqalayan/pytest",
"path": "testing/test_conftest.py",
"copies": "1",
"size": "15638",
"license": "mit",
"hash": 3734634019592094700,
"line_mean": 31.9915611814,
"line_max": 81,
"alpha_frac": 0.598861747,
"autogenerated": false,
"ratio": 3.6494749124854144,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9748079378431591,
"avg_score": 0.00005145621076463929,
"num_lines": 474
} |
from __future__ import absolute_import, division, print_function
from tornado.concurrent import Future
from tornado import gen
from tornado.escape import json_decode, utf8, to_unicode, recursive_unicode, native_str, to_basestring
from tornado.httputil import format_timestamp
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream
from tornado import locale
from tornado.log import app_log, gen_log
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from tornado.template import DictLoader
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
from tornado.test.util import unittest, skipBefore35, exec_test
from tornado.util import ObjectDict, unicode_type, timedelta_to_seconds, PY3
from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature_v1, create_signed_value, decode_signed_value, ErrorHandler, UIModule, MissingArgumentError, stream_request_body, Finish, removeslash, addslash, RedirectHandler as WebRedirectHandler, get_signature_key_version, GZipContentEncoding
import binascii
import contextlib
import copy
import datetime
import email.utils
import gzip
from io import BytesIO
import itertools
import logging
import os
import re
import socket
if PY3:
import urllib.parse as urllib_parse # py3
else:
import urllib as urllib_parse # py2
wsgi_safe_tests = []
def relpath(*a):
return os.path.join(os.path.dirname(__file__), *a)
def wsgi_safe(cls):
wsgi_safe_tests.append(cls)
return cls
class WebTestCase(AsyncHTTPTestCase):
"""Base class for web tests that also supports WSGI mode.
Override get_handlers and get_app_kwargs instead of get_app.
Append to wsgi_safe to have it run in wsgi_test as well.
"""
def get_app(self):
self.app = Application(self.get_handlers(), **self.get_app_kwargs())
return self.app
def get_handlers(self):
raise NotImplementedError()
def get_app_kwargs(self):
return {}
class SimpleHandlerTestCase(WebTestCase):
"""Simplified base class for tests that work with a single handler class.
To use, define a nested class named ``Handler``.
"""
def get_handlers(self):
return [('/', self.Handler)]
class HelloHandler(RequestHandler):
def get(self):
self.write('hello')
class CookieTestRequestHandler(RequestHandler):
# stub out enough methods to make the secure_cookie functions work
def __init__(self, cookie_secret='0123456789', key_version=None):
# don't call super.__init__
self._cookies = {}
if key_version is None:
self.application = ObjectDict(settings=dict(cookie_secret=cookie_secret))
else:
self.application = ObjectDict(settings=dict(cookie_secret=cookie_secret,
key_version=key_version))
def get_cookie(self, name):
return self._cookies.get(name)
def set_cookie(self, name, value, expires_days=None):
self._cookies[name] = value
# See SignedValueTest below for more.
class SecureCookieV1Test(unittest.TestCase):
def test_round_trip(self):
handler = CookieTestRequestHandler()
handler.set_secure_cookie('foo', b'bar', version=1)
self.assertEqual(handler.get_secure_cookie('foo', min_version=1),
b'bar')
def test_cookie_tampering_future_timestamp(self):
handler = CookieTestRequestHandler()
# this string base64-encodes to '12345678'
handler.set_secure_cookie('foo', binascii.a2b_hex(b'd76df8e7aefc'),
version=1)
cookie = handler._cookies['foo']
match = re.match(br'12345678\|([0-9]+)\|([0-9a-f]+)', cookie)
self.assertTrue(match)
timestamp = match.group(1)
sig = match.group(2)
self.assertEqual(
_create_signature_v1(handler.application.settings["cookie_secret"],
'foo', '12345678', timestamp),
sig)
# shifting digits from payload to timestamp doesn't alter signature
# (this is not desirable behavior, just confirming that that's how it
# works)
self.assertEqual(
_create_signature_v1(handler.application.settings["cookie_secret"],
'foo', '1234', b'5678' + timestamp),
sig)
# tamper with the cookie
handler._cookies['foo'] = utf8('1234|5678%s|%s' % (
to_basestring(timestamp), to_basestring(sig)))
# it gets rejected
with ExpectLog(gen_log, "Cookie timestamp in future"):
self.assertTrue(
handler.get_secure_cookie('foo', min_version=1) is None)
def test_arbitrary_bytes(self):
# Secure cookies accept arbitrary data (which is base64 encoded).
# Note that normal cookies accept only a subset of ascii.
handler = CookieTestRequestHandler()
handler.set_secure_cookie('foo', b'\xe9', version=1)
self.assertEqual(handler.get_secure_cookie('foo', min_version=1), b'\xe9')
# See SignedValueTest below for more.
class SecureCookieV2Test(unittest.TestCase):
KEY_VERSIONS = {
0: 'ajklasdf0ojaisdf',
1: 'aslkjasaolwkjsdf'
}
def test_round_trip(self):
handler = CookieTestRequestHandler()
handler.set_secure_cookie('foo', b'bar', version=2)
self.assertEqual(handler.get_secure_cookie('foo', min_version=2), b'bar')
def test_key_version_roundtrip(self):
handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
key_version=0)
handler.set_secure_cookie('foo', b'bar')
self.assertEqual(handler.get_secure_cookie('foo'), b'bar')
def test_key_version_roundtrip_differing_version(self):
handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
key_version=1)
handler.set_secure_cookie('foo', b'bar')
self.assertEqual(handler.get_secure_cookie('foo'), b'bar')
def test_key_version_increment_version(self):
handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
key_version=0)
handler.set_secure_cookie('foo', b'bar')
new_handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
key_version=1)
new_handler._cookies = handler._cookies
self.assertEqual(new_handler.get_secure_cookie('foo'), b'bar')
def test_key_version_invalidate_version(self):
handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
key_version=0)
handler.set_secure_cookie('foo', b'bar')
new_key_versions = self.KEY_VERSIONS.copy()
new_key_versions.pop(0)
new_handler = CookieTestRequestHandler(cookie_secret=new_key_versions,
key_version=1)
new_handler._cookies = handler._cookies
self.assertEqual(new_handler.get_secure_cookie('foo'), None)
class CookieTest(WebTestCase):
def get_handlers(self):
class SetCookieHandler(RequestHandler):
def get(self):
# Try setting cookies with different argument types
# to ensure that everything gets encoded correctly
self.set_cookie("str", "asdf")
self.set_cookie("unicode", u"qwer")
self.set_cookie("bytes", b"zxcv")
class GetCookieHandler(RequestHandler):
def get(self):
self.write(self.get_cookie("foo", "default"))
class SetCookieDomainHandler(RequestHandler):
def get(self):
# unicode domain and path arguments shouldn't break things
# either (see bug #285)
self.set_cookie("unicode_args", "blah", domain=u"foo.com",
path=u"/foo")
class SetCookieSpecialCharHandler(RequestHandler):
def get(self):
self.set_cookie("equals", "a=b")
self.set_cookie("semicolon", "a;b")
self.set_cookie("quote", 'a"b')
class SetCookieOverwriteHandler(RequestHandler):
def get(self):
self.set_cookie("a", "b", domain="example.com")
self.set_cookie("c", "d", domain="example.com")
# A second call with the same name clobbers the first.
# Attributes from the first call are not carried over.
self.set_cookie("a", "e")
class SetCookieMaxAgeHandler(RequestHandler):
def get(self):
self.set_cookie("foo", "bar", max_age=10)
class SetCookieExpiresDaysHandler(RequestHandler):
def get(self):
self.set_cookie("foo", "bar", expires_days=10)
class SetCookieFalsyFlags(RequestHandler):
def get(self):
self.set_cookie("a", "1", secure=True)
self.set_cookie("b", "1", secure=False)
self.set_cookie("c", "1", httponly=True)
self.set_cookie("d", "1", httponly=False)
return [("/set", SetCookieHandler),
("/get", GetCookieHandler),
("/set_domain", SetCookieDomainHandler),
("/special_char", SetCookieSpecialCharHandler),
("/set_overwrite", SetCookieOverwriteHandler),
("/set_max_age", SetCookieMaxAgeHandler),
("/set_expires_days", SetCookieExpiresDaysHandler),
("/set_falsy_flags", SetCookieFalsyFlags)
]
def test_set_cookie(self):
response = self.fetch("/set")
self.assertEqual(sorted(response.headers.get_list("Set-Cookie")),
["bytes=zxcv; Path=/",
"str=asdf; Path=/",
"unicode=qwer; Path=/",
])
def test_get_cookie(self):
response = self.fetch("/get", headers={"Cookie": "foo=bar"})
self.assertEqual(response.body, b"bar")
response = self.fetch("/get", headers={"Cookie": 'foo="bar"'})
self.assertEqual(response.body, b"bar")
response = self.fetch("/get", headers={"Cookie": "/=exception;"})
self.assertEqual(response.body, b"default")
def test_set_cookie_domain(self):
response = self.fetch("/set_domain")
self.assertEqual(response.headers.get_list("Set-Cookie"),
["unicode_args=blah; Domain=foo.com; Path=/foo"])
def test_cookie_special_char(self):
response = self.fetch("/special_char")
headers = sorted(response.headers.get_list("Set-Cookie"))
self.assertEqual(len(headers), 3)
self.assertEqual(headers[0], 'equals="a=b"; Path=/')
self.assertEqual(headers[1], 'quote="a\\"b"; Path=/')
# python 2.7 octal-escapes the semicolon; older versions leave it alone
self.assertTrue(headers[2] in ('semicolon="a;b"; Path=/',
'semicolon="a\\073b"; Path=/'),
headers[2])
data = [('foo=a=b', 'a=b'),
('foo="a=b"', 'a=b'),
('foo="a;b"', '"a'), # even quoted, ";" is a delimiter
('foo=a\\073b', 'a\\073b'), # escapes only decoded in quotes
('foo="a\\073b"', 'a;b'),
('foo="a\\"b"', 'a"b'),
]
for header, expected in data:
logging.debug("trying %r", header)
response = self.fetch("/get", headers={"Cookie": header})
self.assertEqual(response.body, utf8(expected))
def test_set_cookie_overwrite(self):
response = self.fetch("/set_overwrite")
headers = response.headers.get_list("Set-Cookie")
self.assertEqual(sorted(headers),
["a=e; Path=/", "c=d; Domain=example.com; Path=/"])
def test_set_cookie_max_age(self):
response = self.fetch("/set_max_age")
headers = response.headers.get_list("Set-Cookie")
self.assertEqual(sorted(headers),
["foo=bar; Max-Age=10; Path=/"])
def test_set_cookie_expires_days(self):
response = self.fetch("/set_expires_days")
header = response.headers.get("Set-Cookie")
match = re.match("foo=bar; expires=(?P<expires>.+); Path=/", header)
self.assertIsNotNone(match)
expires = datetime.datetime.utcnow() + datetime.timedelta(days=10)
header_expires = datetime.datetime(
*email.utils.parsedate(match.groupdict()["expires"])[:6])
self.assertTrue(abs(timedelta_to_seconds(expires - header_expires)) < 10)
def test_set_cookie_false_flags(self):
response = self.fetch("/set_falsy_flags")
headers = sorted(response.headers.get_list("Set-Cookie"))
# The secure and httponly headers are capitalized in py35 and
# lowercase in older versions.
self.assertEqual(headers[0].lower(), 'a=1; path=/; secure')
self.assertEqual(headers[1].lower(), 'b=1; path=/')
self.assertEqual(headers[2].lower(), 'c=1; httponly; path=/')
self.assertEqual(headers[3].lower(), 'd=1; path=/')
class AuthRedirectRequestHandler(RequestHandler):
def initialize(self, login_url):
self.login_url = login_url
def get_login_url(self):
return self.login_url
@authenticated
def get(self):
# we'll never actually get here because the test doesn't follow redirects
self.send_error(500)
class AuthRedirectTest(WebTestCase):
def get_handlers(self):
return [('/relative', AuthRedirectRequestHandler,
dict(login_url='/login')),
('/absolute', AuthRedirectRequestHandler,
dict(login_url='http://example.com/login'))]
def test_relative_auth_redirect(self):
self.http_client.fetch(self.get_url('/relative'), self.stop,
follow_redirects=False)
response = self.wait()
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], '/login?next=%2Frelative')
def test_absolute_auth_redirect(self):
self.http_client.fetch(self.get_url('/absolute'), self.stop,
follow_redirects=False)
response = self.wait()
self.assertEqual(response.code, 302)
self.assertTrue(re.match(
'http://example.com/login\?next=http%3A%2F%2Flocalhost%3A[0-9]+%2Fabsolute',
response.headers['Location']), response.headers['Location'])
class ConnectionCloseHandler(RequestHandler):
def initialize(self, test):
self.test = test
@asynchronous
def get(self):
self.test.on_handler_waiting()
def on_connection_close(self):
self.test.on_connection_close()
class ConnectionCloseTest(WebTestCase):
def get_handlers(self):
return [('/', ConnectionCloseHandler, dict(test=self))]
def test_connection_close(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.connect(("127.0.0.1", self.get_http_port()))
self.stream = IOStream(s, io_loop=self.io_loop)
self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
self.wait()
def on_handler_waiting(self):
logging.debug('handler waiting')
self.stream.close()
def on_connection_close(self):
logging.debug('connection closed')
self.stop()
class EchoHandler(RequestHandler):
def get(self, *path_args):
# Type checks: web.py interfaces convert argument values to
# unicode strings (by default, but see also decode_argument).
# In httpserver.py (i.e. self.request.arguments), they're left
# as bytes. Keys are always native strings.
for key in self.request.arguments:
if type(key) != str:
raise Exception("incorrect type for key: %r" % type(key))
for value in self.request.arguments[key]:
if type(value) != bytes:
raise Exception("incorrect type for value: %r" %
type(value))
for value in self.get_arguments(key):
if type(value) != unicode_type:
raise Exception("incorrect type for value: %r" %
type(value))
for arg in path_args:
if type(arg) != unicode_type:
raise Exception("incorrect type for path arg: %r" % type(arg))
self.write(dict(path=self.request.path,
path_args=path_args,
args=recursive_unicode(self.request.arguments)))
class RequestEncodingTest(WebTestCase):
def get_handlers(self):
return [("/group/(.*)", EchoHandler),
("/slashes/([^/]*)/([^/]*)", EchoHandler),
]
def fetch_json(self, path):
return json_decode(self.fetch(path).body)
def test_group_question_mark(self):
# Ensure that url-encoded question marks are handled properly
self.assertEqual(self.fetch_json('/group/%3F'),
dict(path='/group/%3F', path_args=['?'], args={}))
self.assertEqual(self.fetch_json('/group/%3F?%3F=%3F'),
dict(path='/group/%3F', path_args=['?'], args={'?': ['?']}))
def test_group_encoding(self):
# Path components and query arguments should be decoded the same way
self.assertEqual(self.fetch_json('/group/%C3%A9?arg=%C3%A9'),
{u"path": u"/group/%C3%A9",
u"path_args": [u"\u00e9"],
u"args": {u"arg": [u"\u00e9"]}})
def test_slashes(self):
# Slashes may be escaped to appear as a single "directory" in the path,
# but they are then unescaped when passed to the get() method.
self.assertEqual(self.fetch_json('/slashes/foo/bar'),
dict(path="/slashes/foo/bar",
path_args=["foo", "bar"],
args={}))
self.assertEqual(self.fetch_json('/slashes/a%2Fb/c%2Fd'),
dict(path="/slashes/a%2Fb/c%2Fd",
path_args=["a/b", "c/d"],
args={}))
def test_error(self):
# Percent signs (encoded as %25) should not mess up printf-style
# messages in logs
with ExpectLog(gen_log, ".*Invalid unicode"):
self.fetch("/group/?arg=%25%e9")
class TypeCheckHandler(RequestHandler):
def prepare(self):
self.errors = {}
self.check_type('status', self.get_status(), int)
# get_argument is an exception from the general rule of using
# type str for non-body data mainly for historical reasons.
self.check_type('argument', self.get_argument('foo'), unicode_type)
self.check_type('cookie_key', list(self.cookies.keys())[0], str)
self.check_type('cookie_value', list(self.cookies.values())[0].value, str)
# Secure cookies return bytes because they can contain arbitrary
# data, but regular cookies are native strings.
if list(self.cookies.keys()) != ['asdf']:
raise Exception("unexpected values for cookie keys: %r" %
self.cookies.keys())
self.check_type('get_secure_cookie', self.get_secure_cookie('asdf'), bytes)
self.check_type('get_cookie', self.get_cookie('asdf'), str)
self.check_type('xsrf_token', self.xsrf_token, bytes)
self.check_type('xsrf_form_html', self.xsrf_form_html(), str)
self.check_type('reverse_url', self.reverse_url('typecheck', 'foo'), str)
self.check_type('request_summary', self._request_summary(), str)
def get(self, path_component):
# path_component uses type unicode instead of str for consistency
# with get_argument()
self.check_type('path_component', path_component, unicode_type)
self.write(self.errors)
def post(self, path_component):
self.check_type('path_component', path_component, unicode_type)
self.write(self.errors)
def check_type(self, name, obj, expected_type):
actual_type = type(obj)
if expected_type != actual_type:
self.errors[name] = "expected %s, got %s" % (expected_type,
actual_type)
class DecodeArgHandler(RequestHandler):
def decode_argument(self, value, name=None):
if type(value) != bytes:
raise Exception("unexpected type for value: %r" % type(value))
# use self.request.arguments directly to avoid recursion
if 'encoding' in self.request.arguments:
return value.decode(to_unicode(self.request.arguments['encoding'][0]))
else:
return value
def get(self, arg):
def describe(s):
if type(s) == bytes:
return ["bytes", native_str(binascii.b2a_hex(s))]
elif type(s) == unicode_type:
return ["unicode", s]
raise Exception("unknown type")
self.write({'path': describe(arg),
'query': describe(self.get_argument("foo")),
})
class LinkifyHandler(RequestHandler):
def get(self):
self.render("linkify.html", message="http://example.com")
class UIModuleResourceHandler(RequestHandler):
def get(self):
self.render("page.html", entries=[1, 2])
class OptionalPathHandler(RequestHandler):
def get(self, path):
self.write({"path": path})
class FlowControlHandler(RequestHandler):
# These writes are too small to demonstrate real flow control,
# but at least it shows that the callbacks get run.
@asynchronous
def get(self):
self.write("1")
self.flush(callback=self.step2)
def step2(self):
self.write("2")
self.flush(callback=self.step3)
def step3(self):
self.write("3")
self.finish()
class MultiHeaderHandler(RequestHandler):
def get(self):
self.set_header("x-overwrite", "1")
self.set_header("X-Overwrite", 2)
self.add_header("x-multi", 3)
self.add_header("X-Multi", "4")
class RedirectHandler(RequestHandler):
def get(self):
if self.get_argument('permanent', None) is not None:
self.redirect('/', permanent=int(self.get_argument('permanent')))
elif self.get_argument('status', None) is not None:
self.redirect('/', status=int(self.get_argument('status')))
else:
raise Exception("didn't get permanent or status arguments")
class EmptyFlushCallbackHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
# Ensure that the flush callback is run whether or not there
# was any output. The gen.Task and direct yield forms are
# equivalent.
yield gen.Task(self.flush) # "empty" flush, but writes headers
yield gen.Task(self.flush) # empty flush
self.write("o")
yield self.flush() # flushes the "o"
yield self.flush() # empty flush
self.finish("k")
class HeaderInjectionHandler(RequestHandler):
def get(self):
try:
self.set_header("X-Foo", "foo\r\nX-Bar: baz")
raise Exception("Didn't get expected exception")
except ValueError as e:
if "Unsafe header value" in str(e):
self.finish(b"ok")
else:
raise
class GetArgumentHandler(RequestHandler):
def prepare(self):
if self.get_argument('source', None) == 'query':
method = self.get_query_argument
elif self.get_argument('source', None) == 'body':
method = self.get_body_argument
else:
method = self.get_argument
self.finish(method("foo", "default"))
class GetArgumentsHandler(RequestHandler):
def prepare(self):
self.finish(dict(default=self.get_arguments("foo"),
query=self.get_query_arguments("foo"),
body=self.get_body_arguments("foo")))
# This test is shared with wsgi_test.py
@wsgi_safe
class WSGISafeWebTest(WebTestCase):
COOKIE_SECRET = "WebTest.COOKIE_SECRET"
def get_app_kwargs(self):
loader = DictLoader({
"linkify.html": "{% module linkify(message) %}",
"page.html": """\
<html><head></head><body>
{% for e in entries %}
{% module Template("entry.html", entry=e) %}
{% end %}
</body></html>""",
"entry.html": """\
{{ set_resources(embedded_css=".entry { margin-bottom: 1em; }", embedded_javascript="js_embed()", css_files=["/base.css", "/foo.css"], javascript_files="/common.js", html_head="<meta>", html_body='<script src="/analytics.js"/>') }}
<div class="entry">...</div>""",
})
return dict(template_loader=loader,
autoescape="xhtml_escape",
cookie_secret=self.COOKIE_SECRET)
def tearDown(self):
super(WSGISafeWebTest, self).tearDown()
RequestHandler._template_loaders.clear()
def get_handlers(self):
urls = [
url("/typecheck/(.*)", TypeCheckHandler, name='typecheck'),
url("/decode_arg/(.*)", DecodeArgHandler, name='decode_arg'),
url("/decode_arg_kw/(?P<arg>.*)", DecodeArgHandler),
url("/linkify", LinkifyHandler),
url("/uimodule_resources", UIModuleResourceHandler),
url("/optional_path/(.+)?", OptionalPathHandler),
url("/multi_header", MultiHeaderHandler),
url("/redirect", RedirectHandler),
url("/web_redirect_permanent", WebRedirectHandler, {"url": "/web_redirect_newpath"}),
url("/web_redirect", WebRedirectHandler, {"url": "/web_redirect_newpath", "permanent": False}),
url("//web_redirect_double_slash", WebRedirectHandler, {"url": '/web_redirect_newpath'}),
url("/header_injection", HeaderInjectionHandler),
url("/get_argument", GetArgumentHandler),
url("/get_arguments", GetArgumentsHandler),
]
return urls
def fetch_json(self, *args, **kwargs):
response = self.fetch(*args, **kwargs)
response.rethrow()
return json_decode(response.body)
def test_types(self):
cookie_value = to_unicode(create_signed_value(self.COOKIE_SECRET,
"asdf", "qwer"))
response = self.fetch("/typecheck/asdf?foo=bar",
headers={"Cookie": "asdf=" + cookie_value})
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck/asdf?foo=bar", method="POST",
headers={"Cookie": "asdf=" + cookie_value},
body="foo=bar")
def test_decode_argument(self):
# These urls all decode to the same thing
urls = ["/decode_arg/%C3%A9?foo=%C3%A9&encoding=utf-8",
"/decode_arg/%E9?foo=%E9&encoding=latin1",
"/decode_arg_kw/%E9?foo=%E9&encoding=latin1",
]
for req_url in urls:
response = self.fetch(req_url)
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {u'path': [u'unicode', u'\u00e9'],
u'query': [u'unicode', u'\u00e9'],
})
response = self.fetch("/decode_arg/%C3%A9?foo=%C3%A9")
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {u'path': [u'bytes', u'c3a9'],
u'query': [u'bytes', u'c3a9'],
})
def test_decode_argument_invalid_unicode(self):
# test that invalid unicode in URLs causes 400, not 500
with ExpectLog(gen_log, ".*Invalid unicode.*"):
response = self.fetch("/typecheck/invalid%FF")
self.assertEqual(response.code, 400)
response = self.fetch("/typecheck/invalid?foo=%FF")
self.assertEqual(response.code, 400)
def test_decode_argument_plus(self):
# These urls are all equivalent.
urls = ["/decode_arg/1%20%2B%201?foo=1%20%2B%201&encoding=utf-8",
"/decode_arg/1%20+%201?foo=1+%2B+1&encoding=utf-8"]
for req_url in urls:
response = self.fetch(req_url)
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {u'path': [u'unicode', u'1 + 1'],
u'query': [u'unicode', u'1 + 1'],
})
def test_reverse_url(self):
self.assertEqual(self.app.reverse_url('decode_arg', 'foo'),
'/decode_arg/foo')
self.assertEqual(self.app.reverse_url('decode_arg', 42),
'/decode_arg/42')
self.assertEqual(self.app.reverse_url('decode_arg', b'\xe9'),
'/decode_arg/%E9')
self.assertEqual(self.app.reverse_url('decode_arg', u'\u00e9'),
'/decode_arg/%C3%A9')
self.assertEqual(self.app.reverse_url('decode_arg', '1 + 1'),
'/decode_arg/1%20%2B%201')
def test_uimodule_unescaped(self):
response = self.fetch("/linkify")
self.assertEqual(response.body,
b"<a href=\"http://example.com\">http://example.com</a>")
def test_uimodule_resources(self):
response = self.fetch("/uimodule_resources")
self.assertEqual(response.body, b"""\
<html><head><link href="/base.css" type="text/css" rel="stylesheet"/><link href="/foo.css" type="text/css" rel="stylesheet"/>
<style type="text/css">
.entry { margin-bottom: 1em; }
</style>
<meta>
</head><body>
<div class="entry">...</div>
<div class="entry">...</div>
<script src="/common.js" type="text/javascript"></script>
<script type="text/javascript">
//<![CDATA[
js_embed()
//]]>
</script>
<script src="/analytics.js"/>
</body></html>""")
def test_optional_path(self):
self.assertEqual(self.fetch_json("/optional_path/foo"),
{u"path": u"foo"})
self.assertEqual(self.fetch_json("/optional_path/"),
{u"path": None})
def test_multi_header(self):
response = self.fetch("/multi_header")
self.assertEqual(response.headers["x-overwrite"], "2")
self.assertEqual(response.headers.get_list("x-multi"), ["3", "4"])
def test_redirect(self):
response = self.fetch("/redirect?permanent=1", follow_redirects=False)
self.assertEqual(response.code, 301)
response = self.fetch("/redirect?permanent=0", follow_redirects=False)
self.assertEqual(response.code, 302)
response = self.fetch("/redirect?status=307", follow_redirects=False)
self.assertEqual(response.code, 307)
def test_web_redirect(self):
response = self.fetch("/web_redirect_permanent", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
response = self.fetch("/web_redirect", follow_redirects=False)
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
def test_web_redirect_double_slash(self):
response = self.fetch("//web_redirect_double_slash", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
def test_header_injection(self):
response = self.fetch("/header_injection")
self.assertEqual(response.body, b"ok")
def test_get_argument(self):
response = self.fetch("/get_argument?foo=bar")
self.assertEqual(response.body, b"bar")
response = self.fetch("/get_argument?foo=")
self.assertEqual(response.body, b"")
response = self.fetch("/get_argument")
self.assertEqual(response.body, b"default")
# Test merging of query and body arguments.
# In singular form, body arguments take precedence over query arguments.
body = urllib_parse.urlencode(dict(foo="hello"))
response = self.fetch("/get_argument?foo=bar", method="POST", body=body)
self.assertEqual(response.body, b"hello")
# In plural methods they are merged.
response = self.fetch("/get_arguments?foo=bar",
method="POST", body=body)
self.assertEqual(json_decode(response.body),
dict(default=['bar', 'hello'],
query=['bar'],
body=['hello']))
def test_get_query_arguments(self):
# send as a post so we can ensure the separation between query
# string and body arguments.
body = urllib_parse.urlencode(dict(foo="hello"))
response = self.fetch("/get_argument?source=query&foo=bar",
method="POST", body=body)
self.assertEqual(response.body, b"bar")
response = self.fetch("/get_argument?source=query&foo=",
method="POST", body=body)
self.assertEqual(response.body, b"")
response = self.fetch("/get_argument?source=query",
method="POST", body=body)
self.assertEqual(response.body, b"default")
def test_get_body_arguments(self):
body = urllib_parse.urlencode(dict(foo="bar"))
response = self.fetch("/get_argument?source=body&foo=hello",
method="POST", body=body)
self.assertEqual(response.body, b"bar")
body = urllib_parse.urlencode(dict(foo=""))
response = self.fetch("/get_argument?source=body&foo=hello",
method="POST", body=body)
self.assertEqual(response.body, b"")
body = urllib_parse.urlencode(dict())
response = self.fetch("/get_argument?source=body&foo=hello",
method="POST", body=body)
self.assertEqual(response.body, b"default")
def test_no_gzip(self):
response = self.fetch('/get_argument')
self.assertNotIn('Accept-Encoding', response.headers.get('Vary', ''))
self.assertNotIn('gzip', response.headers.get('Content-Encoding', ''))
class NonWSGIWebTests(WebTestCase):
def get_handlers(self):
return [("/flow_control", FlowControlHandler),
("/empty_flush", EmptyFlushCallbackHandler),
]
def test_flow_control(self):
self.assertEqual(self.fetch("/flow_control").body, b"123")
def test_empty_flush(self):
response = self.fetch("/empty_flush")
self.assertEqual(response.body, b"ok")
@wsgi_safe
class ErrorResponseTest(WebTestCase):
def get_handlers(self):
class DefaultHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
raise HTTPError(int(self.get_argument("status")))
1 / 0
class WriteErrorHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
self.send_error(int(self.get_argument("status")))
else:
1 / 0
def write_error(self, status_code, **kwargs):
self.set_header("Content-Type", "text/plain")
if "exc_info" in kwargs:
self.write("Exception: %s" % kwargs["exc_info"][0].__name__)
else:
self.write("Status: %d" % status_code)
class FailedWriteErrorHandler(RequestHandler):
def get(self):
1 / 0
def write_error(self, status_code, **kwargs):
raise Exception("exception in write_error")
return [url("/default", DefaultHandler),
url("/write_error", WriteErrorHandler),
url("/failed_write_error", FailedWriteErrorHandler),
]
def test_default(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/default")
self.assertEqual(response.code, 500)
self.assertTrue(b"500: Internal Server Error" in response.body)
response = self.fetch("/default?status=503")
self.assertEqual(response.code, 503)
self.assertTrue(b"503: Service Unavailable" in response.body)
def test_write_error(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/write_error")
self.assertEqual(response.code, 500)
self.assertEqual(b"Exception: ZeroDivisionError", response.body)
response = self.fetch("/write_error?status=503")
self.assertEqual(response.code, 503)
self.assertEqual(b"Status: 503", response.body)
def test_failed_write_error(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/failed_write_error")
self.assertEqual(response.code, 500)
self.assertEqual(b"", response.body)
@wsgi_safe
class StaticFileTest(WebTestCase):
# The expected MD5 hash of robots.txt, used in tests that call
# StaticFileHandler.get_version
robots_txt_hash = b"f71d20196d4caf35b6a670db8c70b03d"
static_dir = os.path.join(os.path.dirname(__file__), 'static')
def get_handlers(self):
class StaticUrlHandler(RequestHandler):
def get(self, path):
with_v = int(self.get_argument('include_version', 1))
self.write(self.static_url(path, include_version=with_v))
class AbsoluteStaticUrlHandler(StaticUrlHandler):
include_host = True
class OverrideStaticUrlHandler(RequestHandler):
def get(self, path):
do_include = bool(self.get_argument("include_host"))
self.include_host = not do_include
regular_url = self.static_url(path)
override_url = self.static_url(path, include_host=do_include)
if override_url == regular_url:
return self.write(str(False))
protocol = self.request.protocol + "://"
protocol_length = len(protocol)
check_regular = regular_url.find(protocol, 0, protocol_length)
check_override = override_url.find(protocol, 0, protocol_length)
if do_include:
result = (check_override == 0 and check_regular == -1)
else:
result = (check_override == -1 and check_regular == 0)
self.write(str(result))
return [('/static_url/(.*)', StaticUrlHandler),
('/abs_static_url/(.*)', AbsoluteStaticUrlHandler),
('/override_static_url/(.*)', OverrideStaticUrlHandler),
('/root_static/(.*)', StaticFileHandler, dict(path='/'))]
def get_app_kwargs(self):
return dict(static_path=relpath('static'))
def test_static_files(self):
response = self.fetch('/robots.txt')
self.assertTrue(b"Disallow: /" in response.body)
response = self.fetch('/static/robots.txt')
self.assertTrue(b"Disallow: /" in response.body)
self.assertEqual(response.headers.get("Content-Type"), "text/plain")
def test_static_compressed_files(self):
response = self.fetch("/static/sample.xml.gz")
self.assertEqual(response.headers.get("Content-Type"),
"application/gzip")
response = self.fetch("/static/sample.xml.bz2")
self.assertEqual(response.headers.get("Content-Type"),
"application/octet-stream")
# make sure the uncompressed file still has the correct type
response = self.fetch("/static/sample.xml")
self.assertTrue(response.headers.get("Content-Type")
in set(("text/xml", "application/xml")))
def test_static_url(self):
response = self.fetch("/static_url/robots.txt")
self.assertEqual(response.body,
b"/static/robots.txt?v=" + self.robots_txt_hash)
def test_absolute_static_url(self):
response = self.fetch("/abs_static_url/robots.txt")
self.assertEqual(response.body, (
utf8(self.get_url("/")) +
b"static/robots.txt?v=" +
self.robots_txt_hash
))
def test_relative_version_exclusion(self):
response = self.fetch("/static_url/robots.txt?include_version=0")
self.assertEqual(response.body, b"/static/robots.txt")
def test_absolute_version_exclusion(self):
response = self.fetch("/abs_static_url/robots.txt?include_version=0")
self.assertEqual(response.body,
utf8(self.get_url("/") + "static/robots.txt"))
def test_include_host_override(self):
self._trigger_include_host_check(False)
self._trigger_include_host_check(True)
def _trigger_include_host_check(self, include_host):
path = "/override_static_url/robots.txt?include_host=%s"
response = self.fetch(path % int(include_host))
self.assertEqual(response.body, utf8(str(True)))
def get_and_head(self, *args, **kwargs):
"""Performs a GET and HEAD request and returns the GET response.
Fails if any ``Content-*`` headers returned by the two requests
differ.
"""
head_response = self.fetch(*args, method="HEAD", **kwargs)
get_response = self.fetch(*args, method="GET", **kwargs)
content_headers = set()
for h in itertools.chain(head_response.headers, get_response.headers):
if h.startswith('Content-'):
content_headers.add(h)
for h in content_headers:
self.assertEqual(head_response.headers.get(h),
get_response.headers.get(h),
"%s differs between GET (%s) and HEAD (%s)" %
(h, head_response.headers.get(h),
get_response.headers.get(h)))
return get_response
def test_static_304_if_modified_since(self):
response1 = self.get_and_head("/static/robots.txt")
response2 = self.get_and_head("/static/robots.txt", headers={
'If-Modified-Since': response1.headers['Last-Modified']})
self.assertEqual(response2.code, 304)
self.assertTrue('Content-Length' not in response2.headers)
self.assertTrue('Last-Modified' not in response2.headers)
def test_static_304_if_none_match(self):
response1 = self.get_and_head("/static/robots.txt")
response2 = self.get_and_head("/static/robots.txt", headers={
'If-None-Match': response1.headers['Etag']})
self.assertEqual(response2.code, 304)
def test_static_if_modified_since_pre_epoch(self):
# On windows, the functions that work with time_t do not accept
# negative values, and at least one client (processing.js) seems
# to use if-modified-since 1/1/1960 as a cache-busting technique.
response = self.get_and_head("/static/robots.txt", headers={
'If-Modified-Since': 'Fri, 01 Jan 1960 00:00:00 GMT'})
self.assertEqual(response.code, 200)
def test_static_if_modified_since_time_zone(self):
# Instead of the value from Last-Modified, make requests with times
# chosen just before and after the known modification time
# of the file to ensure that the right time zone is being used
# when parsing If-Modified-Since.
stat = os.stat(relpath('static/robots.txt'))
response = self.get_and_head('/static/robots.txt', headers={
'If-Modified-Since': format_timestamp(stat.st_mtime - 1)})
self.assertEqual(response.code, 200)
response = self.get_and_head('/static/robots.txt', headers={
'If-Modified-Since': format_timestamp(stat.st_mtime + 1)})
self.assertEqual(response.code, 304)
def test_static_etag(self):
response = self.get_and_head('/static/robots.txt')
self.assertEqual(utf8(response.headers.get("Etag")),
b'"' + self.robots_txt_hash + b'"')
def test_static_with_range(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=0-9'})
self.assertEqual(response.code, 206)
self.assertEqual(response.body, b"User-agent")
self.assertEqual(utf8(response.headers.get("Etag")),
b'"' + self.robots_txt_hash + b'"')
self.assertEqual(response.headers.get("Content-Length"), "10")
self.assertEqual(response.headers.get("Content-Range"),
"bytes 0-9/26")
def test_static_with_range_full_file(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=0-'})
# Note: Chrome refuses to play audio if it gets an HTTP 206 in response
# to ``Range: bytes=0-`` :(
self.assertEqual(response.code, 200)
robots_file_path = os.path.join(self.static_dir, "robots.txt")
with open(robots_file_path) as f:
self.assertEqual(response.body, utf8(f.read()))
self.assertEqual(response.headers.get("Content-Length"), "26")
self.assertEqual(response.headers.get("Content-Range"), None)
def test_static_with_range_full_past_end(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=0-10000000'})
self.assertEqual(response.code, 200)
robots_file_path = os.path.join(self.static_dir, "robots.txt")
with open(robots_file_path) as f:
self.assertEqual(response.body, utf8(f.read()))
self.assertEqual(response.headers.get("Content-Length"), "26")
self.assertEqual(response.headers.get("Content-Range"), None)
def test_static_with_range_partial_past_end(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=1-10000000'})
self.assertEqual(response.code, 206)
robots_file_path = os.path.join(self.static_dir, "robots.txt")
with open(robots_file_path) as f:
self.assertEqual(response.body, utf8(f.read()[1:]))
self.assertEqual(response.headers.get("Content-Length"), "25")
self.assertEqual(response.headers.get("Content-Range"), "bytes 1-25/26")
def test_static_with_range_end_edge(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=22-'})
self.assertEqual(response.body, b": /\n")
self.assertEqual(response.headers.get("Content-Length"), "4")
self.assertEqual(response.headers.get("Content-Range"),
"bytes 22-25/26")
def test_static_with_range_neg_end(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=-4'})
self.assertEqual(response.body, b": /\n")
self.assertEqual(response.headers.get("Content-Length"), "4")
self.assertEqual(response.headers.get("Content-Range"),
"bytes 22-25/26")
def test_static_invalid_range(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'asdf'})
self.assertEqual(response.code, 200)
def test_static_unsatisfiable_range_zero_suffix(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=-0'})
self.assertEqual(response.headers.get("Content-Range"),
"bytes */26")
self.assertEqual(response.code, 416)
def test_static_unsatisfiable_range_invalid_start(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=26'})
self.assertEqual(response.code, 416)
self.assertEqual(response.headers.get("Content-Range"),
"bytes */26")
def test_static_head(self):
response = self.fetch('/static/robots.txt', method='HEAD')
self.assertEqual(response.code, 200)
# No body was returned, but we did get the right content length.
self.assertEqual(response.body, b'')
self.assertEqual(response.headers['Content-Length'], '26')
self.assertEqual(utf8(response.headers['Etag']),
b'"' + self.robots_txt_hash + b'"')
def test_static_head_range(self):
response = self.fetch('/static/robots.txt', method='HEAD',
headers={'Range': 'bytes=1-4'})
self.assertEqual(response.code, 206)
self.assertEqual(response.body, b'')
self.assertEqual(response.headers['Content-Length'], '4')
self.assertEqual(utf8(response.headers['Etag']),
b'"' + self.robots_txt_hash + b'"')
def test_static_range_if_none_match(self):
response = self.get_and_head('/static/robots.txt', headers={
'Range': 'bytes=1-4',
'If-None-Match': b'"' + self.robots_txt_hash + b'"'})
self.assertEqual(response.code, 304)
self.assertEqual(response.body, b'')
self.assertTrue('Content-Length' not in response.headers)
self.assertEqual(utf8(response.headers['Etag']),
b'"' + self.robots_txt_hash + b'"')
def test_static_404(self):
response = self.get_and_head('/static/blarg')
self.assertEqual(response.code, 404)
def test_path_traversal_protection(self):
# curl_httpclient processes ".." on the client side, so we
# must test this with simple_httpclient.
self.http_client.close()
self.http_client = SimpleAsyncHTTPClient()
with ExpectLog(gen_log, ".*not in root static directory"):
response = self.get_and_head('/static/../static_foo.txt')
# Attempted path traversal should result in 403, not 200
# (which means the check failed and the file was served)
# or 404 (which means that the file didn't exist and
# is probably a packaging error).
self.assertEqual(response.code, 403)
@unittest.skipIf(os.name != 'posix', 'non-posix OS')
def test_root_static_path(self):
# Sometimes people set the StaticFileHandler's path to '/'
# to disable Tornado's path validation (in conjunction with
# their own validation in get_absolute_path). Make sure
# that the stricter validation in 4.2.1 doesn't break them.
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'static/robots.txt')
response = self.get_and_head('/root_static' + urllib_parse.quote(path))
self.assertEqual(response.code, 200)
@wsgi_safe
class StaticDefaultFilenameTest(WebTestCase):
def get_app_kwargs(self):
return dict(static_path=relpath('static'),
static_handler_args=dict(default_filename='index.html'))
def get_handlers(self):
return []
def test_static_default_filename(self):
response = self.fetch('/static/dir/', follow_redirects=False)
self.assertEqual(response.code, 200)
self.assertEqual(b'this is the index\n', response.body)
def test_static_default_redirect(self):
response = self.fetch('/static/dir', follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertTrue(response.headers['Location'].endswith('/static/dir/'))
@wsgi_safe
class StaticFileWithPathTest(WebTestCase):
def get_app_kwargs(self):
return dict(static_path=relpath('static'),
static_handler_args=dict(default_filename='index.html'))
def get_handlers(self):
return [("/foo/(.*)", StaticFileHandler, {
"path": relpath("templates/"),
})]
def test_serve(self):
response = self.fetch("/foo/utf8.html")
self.assertEqual(response.body, b"H\xc3\xa9llo\n")
@wsgi_safe
class CustomStaticFileTest(WebTestCase):
def get_handlers(self):
class MyStaticFileHandler(StaticFileHandler):
@classmethod
def make_static_url(cls, settings, path):
version_hash = cls.get_version(settings, path)
extension_index = path.rindex('.')
before_version = path[:extension_index]
after_version = path[(extension_index + 1):]
return '/static/%s.%s.%s' % (before_version, version_hash,
after_version)
def parse_url_path(self, url_path):
extension_index = url_path.rindex('.')
version_index = url_path.rindex('.', 0, extension_index)
return '%s%s' % (url_path[:version_index],
url_path[extension_index:])
@classmethod
def get_absolute_path(cls, settings, path):
return 'CustomStaticFileTest:' + path
def validate_absolute_path(self, root, absolute_path):
return absolute_path
@classmethod
def get_content(self, path, start=None, end=None):
assert start is None and end is None
if path == 'CustomStaticFileTest:foo.txt':
return b'bar'
raise Exception("unexpected path %r" % path)
def get_content_size(self):
if self.absolute_path == 'CustomStaticFileTest:foo.txt':
return 3
raise Exception("unexpected path %r" % self.absolute_path)
def get_modified_time(self):
return None
@classmethod
def get_version(cls, settings, path):
return "42"
class StaticUrlHandler(RequestHandler):
def get(self, path):
self.write(self.static_url(path))
self.static_handler_class = MyStaticFileHandler
return [("/static_url/(.*)", StaticUrlHandler)]
def get_app_kwargs(self):
return dict(static_path="dummy",
static_handler_class=self.static_handler_class)
def test_serve(self):
response = self.fetch("/static/foo.42.txt")
self.assertEqual(response.body, b"bar")
def test_static_url(self):
with ExpectLog(gen_log, "Could not open static file", required=False):
response = self.fetch("/static_url/foo.txt")
self.assertEqual(response.body, b"/static/foo.42.txt")
@wsgi_safe
class HostMatchingTest(WebTestCase):
class Handler(RequestHandler):
def initialize(self, reply):
self.reply = reply
def get(self):
self.write(self.reply)
def get_handlers(self):
return [("/foo", HostMatchingTest.Handler, {"reply": "wildcard"})]
def test_host_matching(self):
self.app.add_handlers("www.example.com",
[("/foo", HostMatchingTest.Handler, {"reply": "[0]"})])
self.app.add_handlers(r"www\.example\.com",
[("/bar", HostMatchingTest.Handler, {"reply": "[1]"})])
self.app.add_handlers("www.example.com",
[("/baz", HostMatchingTest.Handler, {"reply": "[2]"})])
self.app.add_handlers("www.e.*e.com",
[("/baz", HostMatchingTest.Handler, {"reply": "[3]"})])
response = self.fetch("/foo")
self.assertEqual(response.body, b"wildcard")
response = self.fetch("/bar")
self.assertEqual(response.code, 404)
response = self.fetch("/baz")
self.assertEqual(response.code, 404)
response = self.fetch("/foo", headers={'Host': 'www.example.com'})
self.assertEqual(response.body, b"[0]")
response = self.fetch("/bar", headers={'Host': 'www.example.com'})
self.assertEqual(response.body, b"[1]")
response = self.fetch("/baz", headers={'Host': 'www.example.com'})
self.assertEqual(response.body, b"[2]")
response = self.fetch("/baz", headers={'Host': 'www.exe.com'})
self.assertEqual(response.body, b"[3]")
@wsgi_safe
class DefaultHostMatchingTest(WebTestCase):
def get_handlers(self):
return []
def get_app_kwargs(self):
return {'default_host': "www.example.com"}
def test_default_host_matching(self):
self.app.add_handlers("www.example.com",
[("/foo", HostMatchingTest.Handler, {"reply": "[0]"})])
self.app.add_handlers(r"www\.example\.com",
[("/bar", HostMatchingTest.Handler, {"reply": "[1]"})])
self.app.add_handlers("www.test.com",
[("/baz", HostMatchingTest.Handler, {"reply": "[2]"})])
response = self.fetch("/foo")
self.assertEqual(response.body, b"[0]")
response = self.fetch("/bar")
self.assertEqual(response.body, b"[1]")
response = self.fetch("/baz")
self.assertEqual(response.code, 404)
response = self.fetch("/foo", headers={"X-Real-Ip": "127.0.0.1"})
self.assertEqual(response.code, 404)
self.app.default_host = "www.test.com"
response = self.fetch("/baz")
self.assertEqual(response.body, b"[2]")
@wsgi_safe
class NamedURLSpecGroupsTest(WebTestCase):
def get_handlers(self):
class EchoHandler(RequestHandler):
def get(self, path):
self.write(path)
return [("/str/(?P<path>.*)", EchoHandler),
(u"/unicode/(?P<path>.*)", EchoHandler)]
def test_named_urlspec_groups(self):
response = self.fetch("/str/foo")
self.assertEqual(response.body, b"foo")
response = self.fetch("/unicode/bar")
self.assertEqual(response.body, b"bar")
@wsgi_safe
class ClearHeaderTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.set_header("h1", "foo")
self.set_header("h2", "bar")
self.clear_header("h1")
self.clear_header("nonexistent")
def test_clear_header(self):
response = self.fetch("/")
self.assertTrue("h1" not in response.headers)
self.assertEqual(response.headers["h2"], "bar")
class Header204Test(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.set_status(204)
self.finish()
def test_204_headers(self):
response = self.fetch('/')
self.assertEqual(response.code, 204)
self.assertNotIn("Content-Length", response.headers)
self.assertNotIn("Transfer-Encoding", response.headers)
@wsgi_safe
class Header304Test(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.set_header("Content-Language", "en_US")
self.write("hello")
def test_304_headers(self):
response1 = self.fetch('/')
self.assertEqual(response1.headers["Content-Length"], "5")
self.assertEqual(response1.headers["Content-Language"], "en_US")
response2 = self.fetch('/', headers={
'If-None-Match': response1.headers["Etag"]})
self.assertEqual(response2.code, 304)
self.assertTrue("Content-Length" not in response2.headers)
self.assertTrue("Content-Language" not in response2.headers)
# Not an entity header, but should not be added to 304s by chunking
self.assertTrue("Transfer-Encoding" not in response2.headers)
@wsgi_safe
class StatusReasonTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
reason = self.request.arguments.get('reason', [])
self.set_status(int(self.get_argument('code')),
reason=reason[0] if reason else None)
def get_http_client(self):
# simple_httpclient only: curl doesn't expose the reason string
return SimpleAsyncHTTPClient(io_loop=self.io_loop)
def test_status(self):
response = self.fetch("/?code=304")
self.assertEqual(response.code, 304)
self.assertEqual(response.reason, "Not Modified")
response = self.fetch("/?code=304&reason=Foo")
self.assertEqual(response.code, 304)
self.assertEqual(response.reason, "Foo")
response = self.fetch("/?code=682&reason=Bar")
self.assertEqual(response.code, 682)
self.assertEqual(response.reason, "Bar")
with ExpectLog(app_log, 'Uncaught exception'):
response = self.fetch("/?code=682")
self.assertEqual(response.code, 500)
@wsgi_safe
class DateHeaderTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.write("hello")
def test_date_header(self):
response = self.fetch('/')
header_date = datetime.datetime(
*email.utils.parsedate(response.headers['Date'])[:6])
self.assertTrue(header_date - datetime.datetime.utcnow() <
datetime.timedelta(seconds=2))
@wsgi_safe
class RaiseWithReasonTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
raise HTTPError(682, reason="Foo")
def get_http_client(self):
# simple_httpclient only: curl doesn't expose the reason string
return SimpleAsyncHTTPClient(io_loop=self.io_loop)
def test_raise_with_reason(self):
response = self.fetch("/")
self.assertEqual(response.code, 682)
self.assertEqual(response.reason, "Foo")
self.assertIn(b'682: Foo', response.body)
def test_httperror_str(self):
self.assertEqual(str(HTTPError(682, reason="Foo")), "HTTP 682: Foo")
def test_httperror_str_from_httputil(self):
self.assertEqual(str(HTTPError(682)), "HTTP 682: Unknown")
@wsgi_safe
class ErrorHandlerXSRFTest(WebTestCase):
def get_handlers(self):
# note that if the handlers list is empty we get the default_host
# redirect fallback instead of a 404, so test with both an
# explicitly defined error handler and an implicit 404.
return [('/error', ErrorHandler, dict(status_code=417))]
def get_app_kwargs(self):
return dict(xsrf_cookies=True)
def test_error_xsrf(self):
response = self.fetch('/error', method='POST', body='')
self.assertEqual(response.code, 417)
def test_404_xsrf(self):
response = self.fetch('/404', method='POST', body='')
self.assertEqual(response.code, 404)
@wsgi_safe
class GzipTestCase(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
for v in self.get_arguments('vary'):
self.add_header('Vary', v)
# Must write at least MIN_LENGTH bytes to activate compression.
self.write('hello world' + ('!' * GZipContentEncoding.MIN_LENGTH))
def get_app_kwargs(self):
return dict(
gzip=True,
static_path=os.path.join(os.path.dirname(__file__), 'static'))
def assert_compressed(self, response):
# simple_httpclient renames the content-encoding header;
# curl_httpclient doesn't.
self.assertEqual(
response.headers.get(
'Content-Encoding',
response.headers.get('X-Consumed-Content-Encoding')),
'gzip')
def test_gzip(self):
response = self.fetch('/')
self.assert_compressed(response)
self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
def test_gzip_static(self):
# The streaming responses in StaticFileHandler have subtle
# interactions with the gzip output so test this case separately.
response = self.fetch('/robots.txt')
self.assert_compressed(response)
self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
def test_gzip_not_requested(self):
response = self.fetch('/', use_gzip=False)
self.assertNotIn('Content-Encoding', response.headers)
self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
def test_vary_already_present(self):
response = self.fetch('/?vary=Accept-Language')
self.assert_compressed(response)
self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
['Accept-Language', 'Accept-Encoding'])
def test_vary_already_present_multiple(self):
# Regression test for https://github.com/tornadoweb/tornado/issues/1670
response = self.fetch('/?vary=Accept-Language&vary=Cookie')
self.assert_compressed(response)
self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
['Accept-Language', 'Cookie', 'Accept-Encoding'])
@wsgi_safe
class PathArgsInPrepareTest(WebTestCase):
class Handler(RequestHandler):
def prepare(self):
self.write(dict(args=self.path_args, kwargs=self.path_kwargs))
def get(self, path):
assert path == 'foo'
self.finish()
def get_handlers(self):
return [('/pos/(.*)', self.Handler),
('/kw/(?P<path>.*)', self.Handler)]
def test_pos(self):
response = self.fetch('/pos/foo')
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {'args': ['foo'], 'kwargs': {}})
def test_kw(self):
response = self.fetch('/kw/foo')
response.rethrow()
data = json_decode(response.body)
self.assertEqual(data, {'args': [], 'kwargs': {'path': 'foo'}})
@wsgi_safe
class ClearAllCookiesTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.clear_all_cookies()
self.write('ok')
def test_clear_all_cookies(self):
response = self.fetch('/', headers={'Cookie': 'foo=bar; baz=xyzzy'})
set_cookies = sorted(response.headers.get_list('Set-Cookie'))
# Python 3.5 sends 'baz="";'; older versions use 'baz=;'
self.assertTrue(set_cookies[0].startswith('baz=;') or
set_cookies[0].startswith('baz="";'))
self.assertTrue(set_cookies[1].startswith('foo=;') or
set_cookies[1].startswith('foo="";'))
class PermissionError(Exception):
pass
@wsgi_safe
class ExceptionHandlerTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
exc = self.get_argument('exc')
if exc == 'http':
raise HTTPError(410, "no longer here")
elif exc == 'zero':
1 / 0
elif exc == 'permission':
raise PermissionError('not allowed')
def write_error(self, status_code, **kwargs):
if 'exc_info' in kwargs:
typ, value, tb = kwargs['exc_info']
if isinstance(value, PermissionError):
self.set_status(403)
self.write('PermissionError')
return
RequestHandler.write_error(self, status_code, **kwargs)
def log_exception(self, typ, value, tb):
if isinstance(value, PermissionError):
app_log.warning('custom logging for PermissionError: %s',
value.args[0])
else:
RequestHandler.log_exception(self, typ, value, tb)
def test_http_error(self):
# HTTPErrors are logged as warnings with no stack trace.
# TODO: extend ExpectLog to test this more precisely
with ExpectLog(gen_log, '.*no longer here'):
response = self.fetch('/?exc=http')
self.assertEqual(response.code, 410)
def test_unknown_error(self):
# Unknown errors are logged as errors with a stack trace.
with ExpectLog(app_log, 'Uncaught exception'):
response = self.fetch('/?exc=zero')
self.assertEqual(response.code, 500)
def test_known_error(self):
# log_exception can override logging behavior, and write_error
# can override the response.
with ExpectLog(app_log,
'custom logging for PermissionError: not allowed'):
response = self.fetch('/?exc=permission')
self.assertEqual(response.code, 403)
@wsgi_safe
class BuggyLoggingTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
1 / 0
def log_exception(self, typ, value, tb):
1 / 0
def test_buggy_log_exception(self):
# Something gets logged even though the application's
# logger is broken.
with ExpectLog(app_log, '.*'):
self.fetch('/')
@wsgi_safe
class UIMethodUIModuleTest(SimpleHandlerTestCase):
"""Test that UI methods and modules are created correctly and
associated with the handler.
"""
class Handler(RequestHandler):
def get(self):
self.render('foo.html')
def value(self):
return self.get_argument("value")
def get_app_kwargs(self):
def my_ui_method(handler, x):
return "In my_ui_method(%s) with handler value %s." % (
x, handler.value())
class MyModule(UIModule):
def render(self, x):
return "In MyModule(%s) with handler value %s." % (
x, self.handler.value())
loader = DictLoader({
'foo.html': '{{ my_ui_method(42) }} {% module MyModule(123) %}',
})
return dict(template_loader=loader,
ui_methods={'my_ui_method': my_ui_method},
ui_modules={'MyModule': MyModule})
def tearDown(self):
super(UIMethodUIModuleTest, self).tearDown()
# TODO: fix template loader caching so this isn't necessary.
RequestHandler._template_loaders.clear()
def test_ui_method(self):
response = self.fetch('/?value=asdf')
self.assertEqual(response.body,
b'In my_ui_method(42) with handler value asdf. '
b'In MyModule(123) with handler value asdf.')
@wsgi_safe
class GetArgumentErrorTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
try:
self.get_argument('foo')
self.write({})
except MissingArgumentError as e:
self.write({'arg_name': e.arg_name,
'log_message': e.log_message})
def test_catch_error(self):
response = self.fetch('/')
self.assertEqual(json_decode(response.body),
{'arg_name': 'foo',
'log_message': 'Missing argument foo'})
class MultipleExceptionTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
exc_count = 0
@asynchronous
def get(self):
from tornado.ioloop import IOLoop
IOLoop.current().add_callback(lambda: 1 / 0)
IOLoop.current().add_callback(lambda: 1 / 0)
def log_exception(self, typ, value, tb):
MultipleExceptionTest.Handler.exc_count += 1
def test_multi_exception(self):
# This test verifies that multiple exceptions raised into the same
# ExceptionStackContext do not generate extraneous log entries
# due to "Cannot send error response after headers written".
# log_exception is called, but it does not proceed to send_error.
response = self.fetch('/')
self.assertEqual(response.code, 500)
response = self.fetch('/')
self.assertEqual(response.code, 500)
# Each of our two requests generated two exceptions, we should have
# seen at least three of them by now (the fourth may still be
# in the queue).
self.assertGreater(MultipleExceptionTest.Handler.exc_count, 2)
@wsgi_safe
class SetLazyPropertiesTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def prepare(self):
self.current_user = 'Ben'
self.locale = locale.get('en_US')
def get_user_locale(self):
raise NotImplementedError()
def get_current_user(self):
raise NotImplementedError()
def get(self):
self.write('Hello %s (%s)' % (self.current_user, self.locale.code))
def test_set_properties(self):
# Ensure that current_user can be assigned to normally for apps
# that want to forgo the lazy get_current_user property
response = self.fetch('/')
self.assertEqual(response.body, b'Hello Ben (en_US)')
@wsgi_safe
class GetCurrentUserTest(WebTestCase):
def get_app_kwargs(self):
class WithoutUserModule(UIModule):
def render(self):
return ''
class WithUserModule(UIModule):
def render(self):
return str(self.current_user)
loader = DictLoader({
'without_user.html': '',
'with_user.html': '{{ current_user }}',
'without_user_module.html': '{% module WithoutUserModule() %}',
'with_user_module.html': '{% module WithUserModule() %}',
})
return dict(template_loader=loader,
ui_modules={'WithUserModule': WithUserModule,
'WithoutUserModule': WithoutUserModule})
def tearDown(self):
super(GetCurrentUserTest, self).tearDown()
RequestHandler._template_loaders.clear()
def get_handlers(self):
class CurrentUserHandler(RequestHandler):
def prepare(self):
self.has_loaded_current_user = False
def get_current_user(self):
self.has_loaded_current_user = True
return ''
class WithoutUserHandler(CurrentUserHandler):
def get(self):
self.render_string('without_user.html')
self.finish(str(self.has_loaded_current_user))
class WithUserHandler(CurrentUserHandler):
def get(self):
self.render_string('with_user.html')
self.finish(str(self.has_loaded_current_user))
class CurrentUserModuleHandler(CurrentUserHandler):
def get_template_namespace(self):
# If RequestHandler.get_template_namespace is called, then
# get_current_user is evaluated. Until #820 is fixed, this
# is a small hack to circumvent the issue.
return self.ui
class WithoutUserModuleHandler(CurrentUserModuleHandler):
def get(self):
self.render_string('without_user_module.html')
self.finish(str(self.has_loaded_current_user))
class WithUserModuleHandler(CurrentUserModuleHandler):
def get(self):
self.render_string('with_user_module.html')
self.finish(str(self.has_loaded_current_user))
return [('/without_user', WithoutUserHandler),
('/with_user', WithUserHandler),
('/without_user_module', WithoutUserModuleHandler),
('/with_user_module', WithUserModuleHandler)]
@unittest.skip('needs fix')
def test_get_current_user_is_lazy(self):
# TODO: Make this test pass. See #820.
response = self.fetch('/without_user')
self.assertEqual(response.body, b'False')
def test_get_current_user_works(self):
response = self.fetch('/with_user')
self.assertEqual(response.body, b'True')
def test_get_current_user_from_ui_module_is_lazy(self):
response = self.fetch('/without_user_module')
self.assertEqual(response.body, b'False')
def test_get_current_user_from_ui_module_works(self):
response = self.fetch('/with_user_module')
self.assertEqual(response.body, b'True')
@wsgi_safe
class UnimplementedHTTPMethodsTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
pass
def test_unimplemented_standard_methods(self):
for method in ['HEAD', 'GET', 'DELETE', 'OPTIONS']:
response = self.fetch('/', method=method)
self.assertEqual(response.code, 405)
for method in ['POST', 'PUT']:
response = self.fetch('/', method=method, body=b'')
self.assertEqual(response.code, 405)
class UnimplementedNonStandardMethodsTest(SimpleHandlerTestCase):
# wsgiref.validate complains about unknown methods in a way that makes
# this test not wsgi_safe.
class Handler(RequestHandler):
def other(self):
# Even though this method exists, it won't get called automatically
# because it is not in SUPPORTED_METHODS.
self.write('other')
def test_unimplemented_patch(self):
# PATCH is recently standardized; Tornado supports it by default
# but wsgiref.validate doesn't like it.
response = self.fetch('/', method='PATCH', body=b'')
self.assertEqual(response.code, 405)
def test_unimplemented_other(self):
response = self.fetch('/', method='OTHER',
allow_nonstandard_methods=True)
self.assertEqual(response.code, 405)
@wsgi_safe
class AllHTTPMethodsTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def method(self):
self.write(self.request.method)
get = delete = options = post = put = method
def test_standard_methods(self):
response = self.fetch('/', method='HEAD')
self.assertEqual(response.body, b'')
for method in ['GET', 'DELETE', 'OPTIONS']:
response = self.fetch('/', method=method)
self.assertEqual(response.body, utf8(method))
for method in ['POST', 'PUT']:
response = self.fetch('/', method=method, body=b'')
self.assertEqual(response.body, utf8(method))
class PatchMethodTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
def patch(self):
self.write('patch')
def other(self):
self.write('other')
def test_patch(self):
response = self.fetch('/', method='PATCH', body=b'')
self.assertEqual(response.body, b'patch')
def test_other(self):
response = self.fetch('/', method='OTHER',
allow_nonstandard_methods=True)
self.assertEqual(response.body, b'other')
@wsgi_safe
class FinishInPrepareTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def prepare(self):
self.finish('done')
def get(self):
# It's difficult to assert for certain that a method did not
# or will not be called in an asynchronous context, but this
# will be logged noisily if it is reached.
raise Exception('should not reach this method')
def test_finish_in_prepare(self):
response = self.fetch('/')
self.assertEqual(response.body, b'done')
@wsgi_safe
class Default404Test(WebTestCase):
def get_handlers(self):
# If there are no handlers at all a default redirect handler gets added.
return [('/foo', RequestHandler)]
def test_404(self):
response = self.fetch('/')
self.assertEqual(response.code, 404)
self.assertEqual(response.body,
b'<html><title>404: Not Found</title>'
b'<body>404: Not Found</body></html>')
@wsgi_safe
class Custom404Test(WebTestCase):
def get_handlers(self):
return [('/foo', RequestHandler)]
def get_app_kwargs(self):
class Custom404Handler(RequestHandler):
def get(self):
self.set_status(404)
self.write('custom 404 response')
return dict(default_handler_class=Custom404Handler)
def test_404(self):
response = self.fetch('/')
self.assertEqual(response.code, 404)
self.assertEqual(response.body, b'custom 404 response')
@wsgi_safe
class DefaultHandlerArgumentsTest(WebTestCase):
def get_handlers(self):
return [('/foo', RequestHandler)]
def get_app_kwargs(self):
return dict(default_handler_class=ErrorHandler,
default_handler_args=dict(status_code=403))
def test_403(self):
response = self.fetch('/')
self.assertEqual(response.code, 403)
@wsgi_safe
class HandlerByNameTest(WebTestCase):
def get_handlers(self):
# All three are equivalent.
return [('/hello1', HelloHandler),
('/hello2', 'tornado.test.web_test.HelloHandler'),
url('/hello3', 'tornado.test.web_test.HelloHandler'),
]
def test_handler_by_name(self):
resp = self.fetch('/hello1')
self.assertEqual(resp.body, b'hello')
resp = self.fetch('/hello2')
self.assertEqual(resp.body, b'hello')
resp = self.fetch('/hello3')
self.assertEqual(resp.body, b'hello')
class StreamingRequestBodyTest(WebTestCase):
def get_handlers(self):
@stream_request_body
class StreamingBodyHandler(RequestHandler):
def initialize(self, test):
self.test = test
def prepare(self):
self.test.prepared.set_result(None)
def data_received(self, data):
self.test.data.set_result(data)
def get(self):
self.test.finished.set_result(None)
self.write({})
@stream_request_body
class EarlyReturnHandler(RequestHandler):
def prepare(self):
# If we finish the response in prepare, it won't continue to
# the (non-existent) data_received.
raise HTTPError(401)
@stream_request_body
class CloseDetectionHandler(RequestHandler):
def initialize(self, test):
self.test = test
def on_connection_close(self):
super(CloseDetectionHandler, self).on_connection_close()
self.test.close_future.set_result(None)
return [('/stream_body', StreamingBodyHandler, dict(test=self)),
('/early_return', EarlyReturnHandler),
('/close_detection', CloseDetectionHandler, dict(test=self))]
def connect(self, url, connection_close):
# Use a raw connection so we can control the sending of data.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
s.connect(("127.0.0.1", self.get_http_port()))
stream = IOStream(s, io_loop=self.io_loop)
stream.write(b"GET " + url + b" HTTP/1.1\r\n")
if connection_close:
stream.write(b"Connection: close\r\n")
stream.write(b"Transfer-Encoding: chunked\r\n\r\n")
return stream
@gen_test
def test_streaming_body(self):
self.prepared = Future()
self.data = Future()
self.finished = Future()
stream = self.connect(b"/stream_body", connection_close=True)
yield self.prepared
stream.write(b"4\r\nasdf\r\n")
# Ensure the first chunk is received before we send the second.
data = yield self.data
self.assertEqual(data, b"asdf")
self.data = Future()
stream.write(b"4\r\nqwer\r\n")
data = yield self.data
self.assertEquals(data, b"qwer")
stream.write(b"0\r\n")
yield self.finished
data = yield gen.Task(stream.read_until_close)
# This would ideally use an HTTP1Connection to read the response.
self.assertTrue(data.endswith(b"{}"))
stream.close()
@gen_test
def test_early_return(self):
stream = self.connect(b"/early_return", connection_close=False)
data = yield gen.Task(stream.read_until_close)
self.assertTrue(data.startswith(b"HTTP/1.1 401"))
@gen_test
def test_early_return_with_data(self):
stream = self.connect(b"/early_return", connection_close=False)
stream.write(b"4\r\nasdf\r\n")
data = yield gen.Task(stream.read_until_close)
self.assertTrue(data.startswith(b"HTTP/1.1 401"))
@gen_test
def test_close_during_upload(self):
self.close_future = Future()
stream = self.connect(b"/close_detection", connection_close=False)
stream.close()
yield self.close_future
# Each method in this handler returns a yieldable object and yields to the
# IOLoop so the future is not immediately ready. Ensure that the
# yieldables are respected and no method is called before the previous
# one has completed.
@stream_request_body
class BaseFlowControlHandler(RequestHandler):
def initialize(self, test):
self.test = test
self.method = None
self.methods = []
@contextlib.contextmanager
def in_method(self, method):
if self.method is not None:
self.test.fail("entered method %s while in %s" %
(method, self.method))
self.method = method
self.methods.append(method)
try:
yield
finally:
self.method = None
@gen.coroutine
def prepare(self):
# Note that asynchronous prepare() does not block data_received,
# so we don't use in_method here.
self.methods.append('prepare')
yield gen.Task(IOLoop.current().add_callback)
@gen.coroutine
def post(self):
with self.in_method('post'):
yield gen.Task(IOLoop.current().add_callback)
self.write(dict(methods=self.methods))
class BaseStreamingRequestFlowControlTest(object):
def get_httpserver_options(self):
# Use a small chunk size so flow control is relevant even though
# all the data arrives at once.
return dict(chunk_size=10, decompress_request=True)
def get_http_client(self):
# simple_httpclient only: curl doesn't support body_producer.
return SimpleAsyncHTTPClient(io_loop=self.io_loop)
# Test all the slightly different code paths for fixed, chunked, etc bodies.
def test_flow_control_fixed_body(self):
response = self.fetch('/', body='abcdefghijklmnopqrstuvwxyz',
method='POST')
response.rethrow()
self.assertEqual(json_decode(response.body),
dict(methods=['prepare', 'data_received',
'data_received', 'data_received',
'post']))
def test_flow_control_chunked_body(self):
chunks = [b'abcd', b'efgh', b'ijkl']
@gen.coroutine
def body_producer(write):
for i in chunks:
yield write(i)
response = self.fetch('/', body_producer=body_producer, method='POST')
response.rethrow()
self.assertEqual(json_decode(response.body),
dict(methods=['prepare', 'data_received',
'data_received', 'data_received',
'post']))
def test_flow_control_compressed_body(self):
bytesio = BytesIO()
gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio)
gzip_file.write(b'abcdefghijklmnopqrstuvwxyz')
gzip_file.close()
compressed_body = bytesio.getvalue()
response = self.fetch('/', body=compressed_body, method='POST',
headers={'Content-Encoding': 'gzip'})
response.rethrow()
self.assertEqual(json_decode(response.body),
dict(methods=['prepare', 'data_received',
'data_received', 'data_received',
'post']))
class DecoratedStreamingRequestFlowControlTest(
BaseStreamingRequestFlowControlTest,
WebTestCase):
def get_handlers(self):
class DecoratedFlowControlHandler(BaseFlowControlHandler):
@gen.coroutine
def data_received(self, data):
with self.in_method('data_received'):
yield gen.Task(IOLoop.current().add_callback)
return [('/', DecoratedFlowControlHandler, dict(test=self))]
@skipBefore35
class NativeStreamingRequestFlowControlTest(
BaseStreamingRequestFlowControlTest,
WebTestCase):
def get_handlers(self):
class NativeFlowControlHandler(BaseFlowControlHandler):
data_received = exec_test(globals(), locals(), """
async def data_received(self, data):
with self.in_method('data_received'):
await gen.Task(IOLoop.current().add_callback)
""")["data_received"]
return [('/', NativeFlowControlHandler, dict(test=self))]
@wsgi_safe
class IncorrectContentLengthTest(SimpleHandlerTestCase):
def get_handlers(self):
test = self
self.server_error = None
# Manually set a content-length that doesn't match the actual content.
class TooHigh(RequestHandler):
def get(self):
self.set_header("Content-Length", "42")
try:
self.finish("ok")
except Exception as e:
test.server_error = e
raise
class TooLow(RequestHandler):
def get(self):
self.set_header("Content-Length", "2")
try:
self.finish("hello")
except Exception as e:
test.server_error = e
raise
return [('/high', TooHigh),
('/low', TooLow)]
def test_content_length_too_high(self):
# When the content-length is too high, the connection is simply
# closed without completing the response. An error is logged on
# the server.
with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
with ExpectLog(gen_log,
"(Cannot send error response after headers written"
"|Failed to flush partial response)"):
response = self.fetch("/high")
self.assertEqual(response.code, 599)
self.assertEqual(str(self.server_error),
"Tried to write 40 bytes less than Content-Length")
def test_content_length_too_low(self):
# When the content-length is too low, the connection is closed
# without writing the last chunk, so the client never sees the request
# complete (which would be a framing error).
with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
with ExpectLog(gen_log,
"(Cannot send error response after headers written"
"|Failed to flush partial response)"):
response = self.fetch("/low")
self.assertEqual(response.code, 599)
self.assertEqual(str(self.server_error),
"Tried to write more data than Content-Length")
class ClientCloseTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
if self.request.version.startswith('HTTP/1'):
# Simulate a connection closed by the client during
# request processing. The client will see an error, but the
# server should respond gracefully (without logging errors
# because we were unable to write out as many bytes as
# Content-Length said we would)
self.request.connection.stream.close()
self.write('hello')
else:
# TODO: add a HTTP2-compatible version of this test.
self.write('requires HTTP/1.x')
def test_client_close(self):
response = self.fetch('/')
if response.body == b'requires HTTP/1.x':
self.skipTest('requires HTTP/1.x')
self.assertEqual(response.code, 599)
class SignedValueTest(unittest.TestCase):
SECRET = "It's a secret to everybody"
SECRET_DICT = {0: "asdfbasdf", 1: "12312312", 2: "2342342"}
def past(self):
return self.present() - 86400 * 32
def present(self):
return 1300000000
def test_known_values(self):
signed_v1 = create_signed_value(SignedValueTest.SECRET, "key", "value",
version=1, clock=self.present)
self.assertEqual(
signed_v1,
b"dmFsdWU=|1300000000|31c934969f53e48164c50768b40cbd7e2daaaa4f")
signed_v2 = create_signed_value(SignedValueTest.SECRET, "key", "value",
version=2, clock=self.present)
self.assertEqual(
signed_v2,
b"2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
b"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152")
signed_default = create_signed_value(SignedValueTest.SECRET,
"key", "value", clock=self.present)
self.assertEqual(signed_default, signed_v2)
decoded_v1 = decode_signed_value(SignedValueTest.SECRET, "key",
signed_v1, min_version=1,
clock=self.present)
self.assertEqual(decoded_v1, b"value")
decoded_v2 = decode_signed_value(SignedValueTest.SECRET, "key",
signed_v2, min_version=2,
clock=self.present)
self.assertEqual(decoded_v2, b"value")
def test_name_swap(self):
signed1 = create_signed_value(SignedValueTest.SECRET, "key1", "value",
clock=self.present)
signed2 = create_signed_value(SignedValueTest.SECRET, "key2", "value",
clock=self.present)
# Try decoding each string with the other's "name"
decoded1 = decode_signed_value(SignedValueTest.SECRET, "key2", signed1,
clock=self.present)
self.assertIs(decoded1, None)
decoded2 = decode_signed_value(SignedValueTest.SECRET, "key1", signed2,
clock=self.present)
self.assertIs(decoded2, None)
def test_expired(self):
signed = create_signed_value(SignedValueTest.SECRET, "key1", "value",
clock=self.past)
decoded_past = decode_signed_value(SignedValueTest.SECRET, "key1",
signed, clock=self.past)
self.assertEqual(decoded_past, b"value")
decoded_present = decode_signed_value(SignedValueTest.SECRET, "key1",
signed, clock=self.present)
self.assertIs(decoded_present, None)
def test_payload_tampering(self):
# These cookies are variants of the one in test_known_values.
sig = "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"
def validate(prefix):
return (b'value' ==
decode_signed_value(SignedValueTest.SECRET, "key",
prefix + sig, clock=self.present))
self.assertTrue(validate("2|1:0|10:1300000000|3:key|8:dmFsdWU=|"))
# Change key version
self.assertFalse(validate("2|1:1|10:1300000000|3:key|8:dmFsdWU=|"))
# length mismatch (field too short)
self.assertFalse(validate("2|1:0|10:130000000|3:key|8:dmFsdWU=|"))
# length mismatch (field too long)
self.assertFalse(validate("2|1:0|10:1300000000|3:keey|8:dmFsdWU=|"))
def test_signature_tampering(self):
prefix = "2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
def validate(sig):
return (b'value' ==
decode_signed_value(SignedValueTest.SECRET, "key",
prefix + sig, clock=self.present))
self.assertTrue(validate(
"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
# All zeros
self.assertFalse(validate("0" * 32))
# Change one character
self.assertFalse(validate(
"4d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
# Change another character
self.assertFalse(validate(
"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e153"))
# Truncate
self.assertFalse(validate(
"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e15"))
# Lengthen
self.assertFalse(validate(
"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e1538"))
def test_non_ascii(self):
value = b"\xe9"
signed = create_signed_value(SignedValueTest.SECRET, "key", value,
clock=self.present)
decoded = decode_signed_value(SignedValueTest.SECRET, "key", signed,
clock=self.present)
self.assertEqual(value, decoded)
def test_key_versioning_read_write_default_key(self):
value = b"\xe9"
signed = create_signed_value(SignedValueTest.SECRET_DICT,
"key", value, clock=self.present,
key_version=0)
decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
"key", signed, clock=self.present)
self.assertEqual(value, decoded)
def test_key_versioning_read_write_non_default_key(self):
value = b"\xe9"
signed = create_signed_value(SignedValueTest.SECRET_DICT,
"key", value, clock=self.present,
key_version=1)
decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
"key", signed, clock=self.present)
self.assertEqual(value, decoded)
def test_key_versioning_invalid_key(self):
value = b"\xe9"
signed = create_signed_value(SignedValueTest.SECRET_DICT,
"key", value, clock=self.present,
key_version=0)
newkeys = SignedValueTest.SECRET_DICT.copy()
newkeys.pop(0)
decoded = decode_signed_value(newkeys,
"key", signed, clock=self.present)
self.assertEqual(None, decoded)
def test_key_version_retrieval(self):
value = b"\xe9"
signed = create_signed_value(SignedValueTest.SECRET_DICT,
"key", value, clock=self.present,
key_version=1)
key_version = get_signature_key_version(signed)
self.assertEqual(1, key_version)
@wsgi_safe
class XSRFTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
version = int(self.get_argument("version", "2"))
# This would be a bad idea in a real app, but in this test
# it's fine.
self.settings["xsrf_cookie_version"] = version
self.write(self.xsrf_token)
def post(self):
self.write("ok")
def get_app_kwargs(self):
return dict(xsrf_cookies=True)
def setUp(self):
super(XSRFTest, self).setUp()
self.xsrf_token = self.get_token()
def get_token(self, old_token=None, version=None):
if old_token is not None:
headers = self.cookie_headers(old_token)
else:
headers = None
response = self.fetch(
"/" if version is None else ("/?version=%d" % version),
headers=headers)
response.rethrow()
return native_str(response.body)
def cookie_headers(self, token=None):
if token is None:
token = self.xsrf_token
return {"Cookie": "_xsrf=" + token}
def test_xsrf_fail_no_token(self):
with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
response = self.fetch("/", method="POST", body=b"")
self.assertEqual(response.code, 403)
def test_xsrf_fail_body_no_cookie(self):
with ExpectLog(gen_log, ".*XSRF cookie does not match POST"):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)))
self.assertEqual(response.code, 403)
def test_xsrf_fail_argument_invalid_format(self):
with ExpectLog(gen_log, ".*'_xsrf' argument has invalid format"):
response = self.fetch(
"/", method="POST",
headers=self.cookie_headers(),
body=urllib_parse.urlencode(dict(_xsrf='3|')))
self.assertEqual(response.code, 403)
def test_xsrf_fail_cookie_invalid_format(self):
with ExpectLog(gen_log, ".*XSRF cookie does not match POST"):
response = self.fetch(
"/", method="POST",
headers=self.cookie_headers(token='3|'),
body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)))
self.assertEqual(response.code, 403)
def test_xsrf_fail_cookie_no_body(self):
with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
response = self.fetch(
"/", method="POST", body=b"",
headers=self.cookie_headers())
self.assertEqual(response.code, 403)
def test_xsrf_success_short_token(self):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf='deadbeef')),
headers=self.cookie_headers(token='deadbeef'))
self.assertEqual(response.code, 200)
def test_xsrf_success_non_hex_token(self):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf='xoxo')),
headers=self.cookie_headers(token='xoxo'))
self.assertEqual(response.code, 200)
def test_xsrf_success_post_body(self):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
headers=self.cookie_headers())
self.assertEqual(response.code, 200)
def test_xsrf_success_query_string(self):
response = self.fetch(
"/?" + urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
method="POST", body=b"",
headers=self.cookie_headers())
self.assertEqual(response.code, 200)
def test_xsrf_success_header(self):
response = self.fetch("/", method="POST", body=b"",
headers=dict({"X-Xsrftoken": self.xsrf_token}, # type: ignore
**self.cookie_headers()))
self.assertEqual(response.code, 200)
def test_distinct_tokens(self):
# Every request gets a distinct token.
NUM_TOKENS = 10
tokens = set()
for i in range(NUM_TOKENS):
tokens.add(self.get_token())
self.assertEqual(len(tokens), NUM_TOKENS)
def test_cross_user(self):
token2 = self.get_token()
# Each token can be used to authenticate its own request.
for token in (self.xsrf_token, token2):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=token)),
headers=self.cookie_headers(token))
self.assertEqual(response.code, 200)
# Sending one in the cookie and the other in the body is not allowed.
for cookie_token, body_token in ((self.xsrf_token, token2),
(token2, self.xsrf_token)):
with ExpectLog(gen_log, '.*XSRF cookie does not match POST'):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=body_token)),
headers=self.cookie_headers(cookie_token))
self.assertEqual(response.code, 403)
def test_refresh_token(self):
token = self.xsrf_token
tokens_seen = set([token])
# A user's token is stable over time. Refreshing the page in one tab
# might update the cookie while an older tab still has the old cookie
# in its DOM. Simulate this scenario by passing a constant token
# in the body and re-querying for the token.
for i in range(5):
token = self.get_token(token)
# Tokens are encoded uniquely each time
tokens_seen.add(token)
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
headers=self.cookie_headers(token))
self.assertEqual(response.code, 200)
self.assertEqual(len(tokens_seen), 6)
def test_versioning(self):
# Version 1 still produces distinct tokens per request.
self.assertNotEqual(self.get_token(version=1),
self.get_token(version=1))
# Refreshed v1 tokens are all identical.
v1_token = self.get_token(version=1)
for i in range(5):
self.assertEqual(self.get_token(v1_token, version=1), v1_token)
# Upgrade to a v2 version of the same token
v2_token = self.get_token(v1_token)
self.assertNotEqual(v1_token, v2_token)
# Each v1 token can map to many v2 tokens.
self.assertNotEqual(v2_token, self.get_token(v1_token))
# The tokens are cross-compatible.
for cookie_token, body_token in ((v1_token, v2_token),
(v2_token, v1_token)):
response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=body_token)),
headers=self.cookie_headers(cookie_token))
self.assertEqual(response.code, 200)
@wsgi_safe
class XSRFCookieKwargsTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.write(self.xsrf_token)
def get_app_kwargs(self):
return dict(xsrf_cookies=True,
xsrf_cookie_kwargs=dict(httponly=True))
def test_xsrf_httponly(self):
response = self.fetch("/")
self.assertIn('httponly;', response.headers['Set-Cookie'].lower())
@wsgi_safe
class FinishExceptionTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.set_status(401)
self.set_header('WWW-Authenticate', 'Basic realm="something"')
if self.get_argument('finish_value', ''):
raise Finish('authentication required')
else:
self.write('authentication required')
raise Finish()
def test_finish_exception(self):
for u in ['/', '/?finish_value=1']:
response = self.fetch(u)
self.assertEqual(response.code, 401)
self.assertEqual('Basic realm="something"',
response.headers.get('WWW-Authenticate'))
self.assertEqual(b'authentication required', response.body)
@wsgi_safe
class DecoratorTest(WebTestCase):
def get_handlers(self):
class RemoveSlashHandler(RequestHandler):
@removeslash
def get(self):
pass
class AddSlashHandler(RequestHandler):
@addslash
def get(self):
pass
return [("/removeslash/", RemoveSlashHandler),
("/addslash", AddSlashHandler),
]
def test_removeslash(self):
response = self.fetch("/removeslash/", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], "/removeslash")
response = self.fetch("/removeslash/?foo=bar", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], "/removeslash?foo=bar")
def test_addslash(self):
response = self.fetch("/addslash", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], "/addslash/")
response = self.fetch("/addslash?foo=bar", follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], "/addslash/?foo=bar")
@wsgi_safe
class CacheTest(WebTestCase):
def get_handlers(self):
class EtagHandler(RequestHandler):
def get(self, computed_etag):
self.write(computed_etag)
def compute_etag(self):
return self._write_buffer[0]
return [
('/etag/(.*)', EtagHandler)
]
def test_wildcard_etag(self):
computed_etag = '"xyzzy"'
etags = '*'
self._test_etag(computed_etag, etags, 304)
def test_strong_etag_match(self):
computed_etag = '"xyzzy"'
etags = '"xyzzy"'
self._test_etag(computed_etag, etags, 304)
def test_multiple_strong_etag_match(self):
computed_etag = '"xyzzy1"'
etags = '"xyzzy1", "xyzzy2"'
self._test_etag(computed_etag, etags, 304)
def test_strong_etag_not_match(self):
computed_etag = '"xyzzy"'
etags = '"xyzzy1"'
self._test_etag(computed_etag, etags, 200)
def test_multiple_strong_etag_not_match(self):
computed_etag = '"xyzzy"'
etags = '"xyzzy1", "xyzzy2"'
self._test_etag(computed_etag, etags, 200)
def test_weak_etag_match(self):
computed_etag = '"xyzzy1"'
etags = 'W/"xyzzy1"'
self._test_etag(computed_etag, etags, 304)
def test_multiple_weak_etag_match(self):
computed_etag = '"xyzzy2"'
etags = 'W/"xyzzy1", W/"xyzzy2"'
self._test_etag(computed_etag, etags, 304)
def test_weak_etag_not_match(self):
computed_etag = '"xyzzy2"'
etags = 'W/"xyzzy1"'
self._test_etag(computed_etag, etags, 200)
def test_multiple_weak_etag_not_match(self):
computed_etag = '"xyzzy3"'
etags = 'W/"xyzzy1", W/"xyzzy2"'
self._test_etag(computed_etag, etags, 200)
def _test_etag(self, computed_etag, etags, status_code):
response = self.fetch(
'/etag/' + computed_etag,
headers={'If-None-Match': etags}
)
self.assertEqual(response.code, status_code)
@wsgi_safe
class RequestSummaryTest(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
# remote_ip is optional, although it's set by
# both HTTPServer and WSGIAdapter.
# Clobber it to make sure it doesn't break logging.
self.request.remote_ip = None
self.finish(self._request_summary())
def test_missing_remote_ip(self):
resp = self.fetch("/")
self.assertEqual(resp.body, b"GET / (None)")
class HTTPErrorTest(unittest.TestCase):
def test_copy(self):
e = HTTPError(403, reason="Go away")
e2 = copy.copy(e)
self.assertIsNot(e, e2)
self.assertEqual(e.status_code, e2.status_code)
self.assertEqual(e.reason, e2.reason)
class ApplicationTest(AsyncTestCase):
def test_listen(self):
app = Application([])
server = app.listen(0, address='127.0.0.1')
server.stop()
class URLSpecReverseTest(unittest.TestCase):
def test_reverse(self):
self.assertEqual('/favicon.ico', url(r'/favicon\.ico', None).reverse())
self.assertEqual('/favicon.ico', url(r'^/favicon\.ico$', None).reverse())
def test_non_reversible(self):
# URLSpecs are non-reversible if they include non-constant
# regex features outside capturing groups. Currently, this is
# only strictly enforced for backslash-escaped character
# classes.
paths = [
r'^/api/v\d+/foo/(\w+)$',
]
for path in paths:
# A URLSpec can still be created even if it cannot be reversed.
url_spec = url(path, None)
try:
result = url_spec.reverse()
self.fail("did not get expected exception when reversing %s. "
"result: %s" % (path, result))
except ValueError:
pass
def test_reverse_arguments(self):
self.assertEqual('/api/v1/foo/bar',
url(r'^/api/v1/foo/(\w+)$', None).reverse('bar'))
class RedirectHandlerTest(WebTestCase):
def get_handlers(self):
return [
('/src', WebRedirectHandler, {'url': '/dst'}),
(r'/(.*?)/(.*?)/(.*)', WebRedirectHandler, {'url': '/{1}/{0}/{2}'})]
def test_basic_redirect(self):
response = self.fetch('/src', follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], '/dst')
def test_redirect_pattern(self):
response = self.fetch('/a/b/c', follow_redirects=False)
self.assertEqual(response.code, 301)
self.assertEqual(response.headers['Location'], '/b/a/c')
| {
"repo_name": "ammarkhann/FinalSeniorCode",
"path": "lib/python2.7/site-packages/tornado/test/web_test.py",
"copies": "18",
"size": "114101",
"license": "mit",
"hash": -3832307563025421300,
"line_mean": 38.4949809623,
"line_max": 364,
"alpha_frac": 0.5886100911,
"autogenerated": false,
"ratio": 4.0375442321302195,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00020578156913580604,
"num_lines": 2889
} |
from __future__ import absolute_import, division, print_function
from tornado.concurrent import Future
from tornado import gen
from tornado import netutil
from tornado.iostream import IOStream, SSLIOStream, PipeIOStream, StreamClosedError
from tornado.httputil import HTTPHeaders
from tornado.log import gen_log, app_log
from tornado.netutil import ssl_wrap_socket
from tornado.stack_context import NullContext
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, bind_unused_port, ExpectLog, gen_test
from tornado.test.util import unittest, skipIfNonUnix, refusing_port
from tornado.web import RequestHandler, Application
import errno
import logging
import os
import platform
import socket
import ssl
import sys
try:
from unittest import mock # type: ignore
except ImportError:
try:
import mock # type: ignore
except ImportError:
mock = None
def _server_ssl_options():
return dict(
certfile=os.path.join(os.path.dirname(__file__), 'test.crt'),
keyfile=os.path.join(os.path.dirname(__file__), 'test.key'),
)
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello")
class TestIOStreamWebMixin(object):
def _make_client_iostream(self):
raise NotImplementedError()
def get_app(self):
return Application([('/', HelloHandler)])
def test_connection_closed(self):
# When a server sends a response and then closes the connection,
# the client must be allowed to read the data before the IOStream
# closes itself. Epoll reports closed connections with a separate
# EPOLLRDHUP event delivered at the same time as the read event,
# while kqueue reports them as a second read/write event with an EOF
# flag.
response = self.fetch("/", headers={"Connection": "close"})
response.rethrow()
def test_read_until_close(self):
stream = self._make_client_iostream()
stream.connect(('127.0.0.1', self.get_http_port()), callback=self.stop)
self.wait()
stream.write(b"GET / HTTP/1.0\r\n\r\n")
stream.read_until_close(self.stop)
data = self.wait()
self.assertTrue(data.startswith(b"HTTP/1.1 200"))
self.assertTrue(data.endswith(b"Hello"))
def test_read_zero_bytes(self):
self.stream = self._make_client_iostream()
self.stream.connect(("127.0.0.1", self.get_http_port()),
callback=self.stop)
self.wait()
self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
# normal read
self.stream.read_bytes(9, self.stop)
data = self.wait()
self.assertEqual(data, b"HTTP/1.1 ")
# zero bytes
self.stream.read_bytes(0, self.stop)
data = self.wait()
self.assertEqual(data, b"")
# another normal read
self.stream.read_bytes(3, self.stop)
data = self.wait()
self.assertEqual(data, b"200")
self.stream.close()
def test_write_while_connecting(self):
stream = self._make_client_iostream()
connected = [False]
def connected_callback():
connected[0] = True
self.stop()
stream.connect(("127.0.0.1", self.get_http_port()),
callback=connected_callback)
# unlike the previous tests, try to write before the connection
# is complete.
written = [False]
def write_callback():
written[0] = True
self.stop()
stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n",
callback=write_callback)
self.assertTrue(not connected[0])
# by the time the write has flushed, the connection callback has
# also run
try:
self.wait(lambda: connected[0] and written[0])
finally:
logging.debug((connected, written))
stream.read_until_close(self.stop)
data = self.wait()
self.assertTrue(data.endswith(b"Hello"))
stream.close()
@gen_test
def test_future_interface(self):
"""Basic test of IOStream's ability to return Futures."""
stream = self._make_client_iostream()
connect_result = yield stream.connect(
("127.0.0.1", self.get_http_port()))
self.assertIs(connect_result, stream)
yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
first_line = yield stream.read_until(b"\r\n")
self.assertEqual(first_line, b"HTTP/1.1 200 OK\r\n")
# callback=None is equivalent to no callback.
header_data = yield stream.read_until(b"\r\n\r\n", callback=None)
headers = HTTPHeaders.parse(header_data.decode('latin1'))
content_length = int(headers['Content-Length'])
body = yield stream.read_bytes(content_length)
self.assertEqual(body, b'Hello')
stream.close()
@gen_test
def test_future_close_while_reading(self):
stream = self._make_client_iostream()
yield stream.connect(("127.0.0.1", self.get_http_port()))
yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
with self.assertRaises(StreamClosedError):
yield stream.read_bytes(1024 * 1024)
stream.close()
@gen_test
def test_future_read_until_close(self):
# Ensure that the data comes through before the StreamClosedError.
stream = self._make_client_iostream()
yield stream.connect(("127.0.0.1", self.get_http_port()))
yield stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n")
yield stream.read_until(b"\r\n\r\n")
body = yield stream.read_until_close()
self.assertEqual(body, b"Hello")
# Nothing else to read; the error comes immediately without waiting
# for yield.
with self.assertRaises(StreamClosedError):
stream.read_bytes(1)
class TestIOStreamMixin(object):
def _make_server_iostream(self, connection, **kwargs):
raise NotImplementedError()
def _make_client_iostream(self, connection, **kwargs):
raise NotImplementedError()
def make_iostream_pair(self, **kwargs):
listener, port = bind_unused_port()
streams = [None, None]
def accept_callback(connection, address):
streams[0] = self._make_server_iostream(connection, **kwargs)
self.stop()
def connect_callback():
streams[1] = client_stream
self.stop()
netutil.add_accept_handler(listener, accept_callback)
client_stream = self._make_client_iostream(socket.socket(), **kwargs)
client_stream.connect(('127.0.0.1', port),
callback=connect_callback)
self.wait(condition=lambda: all(streams))
self.io_loop.remove_handler(listener.fileno())
listener.close()
return streams
def test_streaming_callback_with_data_in_buffer(self):
server, client = self.make_iostream_pair()
client.write(b"abcd\r\nefgh")
server.read_until(b"\r\n", self.stop)
data = self.wait()
self.assertEqual(data, b"abcd\r\n")
def closed_callback(chunk):
self.fail()
server.read_until_close(callback=closed_callback,
streaming_callback=self.stop)
# self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
data = self.wait()
self.assertEqual(data, b"efgh")
server.close()
client.close()
def test_write_zero_bytes(self):
# Attempting to write zero bytes should run the callback without
# going into an infinite loop.
server, client = self.make_iostream_pair()
server.write(b'', callback=self.stop)
self.wait()
server.close()
client.close()
def test_connection_refused(self):
# When a connection is refused, the connect callback should not
# be run. (The kqueue IOLoop used to behave differently from the
# epoll IOLoop in this respect)
cleanup_func, port = refusing_port()
self.addCleanup(cleanup_func)
stream = IOStream(socket.socket())
self.connect_called = False
def connect_callback():
self.connect_called = True
self.stop()
stream.set_close_callback(self.stop)
# log messages vary by platform and ioloop implementation
with ExpectLog(gen_log, ".*", required=False):
stream.connect(("127.0.0.1", port), connect_callback)
self.wait()
self.assertFalse(self.connect_called)
self.assertTrue(isinstance(stream.error, socket.error), stream.error)
if sys.platform != 'cygwin':
_ERRNO_CONNREFUSED = (errno.ECONNREFUSED,)
if hasattr(errno, "WSAECONNREFUSED"):
_ERRNO_CONNREFUSED += (errno.WSAECONNREFUSED,)
# cygwin's errnos don't match those used on native windows python
self.assertTrue(stream.error.args[0] in _ERRNO_CONNREFUSED)
@unittest.skipIf(mock is None, 'mock package not present')
def test_gaierror(self):
# Test that IOStream sets its exc_info on getaddrinfo error.
# It's difficult to reliably trigger a getaddrinfo error;
# some resolvers own't even return errors for malformed names,
# so we mock it instead. If IOStream changes to call a Resolver
# before sock.connect, the mock target will need to change too.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = IOStream(s)
stream.set_close_callback(self.stop)
with mock.patch('socket.socket.connect',
side_effect=socket.gaierror(errno.EIO, 'boom')):
with ExpectLog(gen_log, "Connect error"):
stream.connect(('localhost', 80), callback=self.stop)
self.wait()
self.assertIsInstance(stream.error, socket.gaierror)
def test_read_callback_error(self):
# Test that IOStream sets its exc_info when a read callback throws
server, client = self.make_iostream_pair()
try:
server.set_close_callback(self.stop)
with ExpectLog(
app_log, "(Uncaught exception|Exception in callback)"
):
# Clear ExceptionStackContext so IOStream catches error
with NullContext():
server.read_bytes(1, callback=lambda data: 1 / 0)
client.write(b"1")
self.wait()
self.assertTrue(isinstance(server.error, ZeroDivisionError))
finally:
server.close()
client.close()
def test_streaming_callback(self):
server, client = self.make_iostream_pair()
try:
chunks = []
final_called = []
def streaming_callback(data):
chunks.append(data)
self.stop()
def final_callback(data):
self.assertFalse(data)
final_called.append(True)
self.stop()
server.read_bytes(6, callback=final_callback,
streaming_callback=streaming_callback)
client.write(b"1234")
self.wait(condition=lambda: chunks)
client.write(b"5678")
self.wait(condition=lambda: final_called)
self.assertEqual(chunks, [b"1234", b"56"])
# the rest of the last chunk is still in the buffer
server.read_bytes(2, callback=self.stop)
data = self.wait()
self.assertEqual(data, b"78")
finally:
server.close()
client.close()
def test_streaming_until_close(self):
server, client = self.make_iostream_pair()
try:
chunks = []
closed = [False]
def streaming_callback(data):
chunks.append(data)
self.stop()
def close_callback(data):
assert not data, data
closed[0] = True
self.stop()
client.read_until_close(callback=close_callback,
streaming_callback=streaming_callback)
server.write(b"1234")
self.wait(condition=lambda: len(chunks) == 1)
server.write(b"5678", self.stop)
self.wait()
server.close()
self.wait(condition=lambda: closed[0])
self.assertEqual(chunks, [b"1234", b"5678"])
finally:
server.close()
client.close()
def test_streaming_until_close_future(self):
server, client = self.make_iostream_pair()
try:
chunks = []
@gen.coroutine
def client_task():
yield client.read_until_close(streaming_callback=chunks.append)
@gen.coroutine
def server_task():
yield server.write(b"1234")
yield gen.sleep(0.01)
yield server.write(b"5678")
server.close()
@gen.coroutine
def f():
yield [client_task(), server_task()]
self.io_loop.run_sync(f)
self.assertEqual(chunks, [b"1234", b"5678"])
finally:
server.close()
client.close()
def test_delayed_close_callback(self):
# The scenario: Server closes the connection while there is a pending
# read that can be served out of buffered data. The client does not
# run the close_callback as soon as it detects the close, but rather
# defers it until after the buffered read has finished.
server, client = self.make_iostream_pair()
try:
client.set_close_callback(self.stop)
server.write(b"12")
chunks = []
def callback1(data):
chunks.append(data)
client.read_bytes(1, callback2)
server.close()
def callback2(data):
chunks.append(data)
client.read_bytes(1, callback1)
self.wait() # stopped by close_callback
self.assertEqual(chunks, [b"1", b"2"])
finally:
server.close()
client.close()
def test_future_delayed_close_callback(self):
# Same as test_delayed_close_callback, but with the future interface.
server, client = self.make_iostream_pair()
# We can't call make_iostream_pair inside a gen_test function
# because the ioloop is not reentrant.
@gen_test
def f(self):
server.write(b"12")
chunks = []
chunks.append((yield client.read_bytes(1)))
server.close()
chunks.append((yield client.read_bytes(1)))
self.assertEqual(chunks, [b"1", b"2"])
try:
f(self)
finally:
server.close()
client.close()
def test_close_buffered_data(self):
# Similar to the previous test, but with data stored in the OS's
# socket buffers instead of the IOStream's read buffer. Out-of-band
# close notifications must be delayed until all data has been
# drained into the IOStream buffer. (epoll used to use out-of-band
# close events with EPOLLRDHUP, but no longer)
#
# This depends on the read_chunk_size being smaller than the
# OS socket buffer, so make it small.
server, client = self.make_iostream_pair(read_chunk_size=256)
try:
server.write(b"A" * 512)
client.read_bytes(256, self.stop)
data = self.wait()
self.assertEqual(b"A" * 256, data)
server.close()
# Allow the close to propagate to the client side of the
# connection. Using add_callback instead of add_timeout
# doesn't seem to work, even with multiple iterations
self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
self.wait()
client.read_bytes(256, self.stop)
data = self.wait()
self.assertEqual(b"A" * 256, data)
finally:
server.close()
client.close()
def test_read_until_close_after_close(self):
# Similar to test_delayed_close_callback, but read_until_close takes
# a separate code path so test it separately.
server, client = self.make_iostream_pair()
try:
server.write(b"1234")
server.close()
# Read one byte to make sure the client has received the data.
# It won't run the close callback as long as there is more buffered
# data that could satisfy a later read.
client.read_bytes(1, self.stop)
data = self.wait()
self.assertEqual(data, b"1")
client.read_until_close(self.stop)
data = self.wait()
self.assertEqual(data, b"234")
finally:
server.close()
client.close()
@unittest.skipIf(mock is None, 'mock package not present')
def test_read_until_close_with_error(self):
server, client = self.make_iostream_pair()
try:
with mock.patch('tornado.iostream.BaseIOStream._try_inline_read',
side_effect=IOError('boom')):
with self.assertRaisesRegexp(IOError, 'boom'):
client.read_until_close(self.stop)
finally:
server.close()
client.close()
def test_streaming_read_until_close_after_close(self):
# Same as the preceding test but with a streaming_callback.
# All data should go through the streaming callback,
# and the final read callback just gets an empty string.
server, client = self.make_iostream_pair()
try:
server.write(b"1234")
server.close()
client.read_bytes(1, self.stop)
data = self.wait()
self.assertEqual(data, b"1")
streaming_data = []
client.read_until_close(self.stop,
streaming_callback=streaming_data.append)
data = self.wait()
self.assertEqual(b'', data)
self.assertEqual(b''.join(streaming_data), b"234")
finally:
server.close()
client.close()
def test_large_read_until(self):
# Performance test: read_until used to have a quadratic component
# so a read_until of 4MB would take 8 seconds; now it takes 0.25
# seconds.
server, client = self.make_iostream_pair()
try:
# This test fails on pypy with ssl. I think it's because
# pypy's gc defeats moves objects, breaking the
# "frozen write buffer" assumption.
if (isinstance(server, SSLIOStream) and
platform.python_implementation() == 'PyPy'):
raise unittest.SkipTest(
"pypy gc causes problems with openssl")
NUM_KB = 4096
for i in range(NUM_KB):
client.write(b"A" * 1024)
client.write(b"\r\n")
server.read_until(b"\r\n", self.stop)
data = self.wait()
self.assertEqual(len(data), NUM_KB * 1024 + 2)
finally:
server.close()
client.close()
def test_close_callback_with_pending_read(self):
# Regression test for a bug that was introduced in 2.3
# where the IOStream._close_callback would never be called
# if there were pending reads.
OK = b"OK\r\n"
server, client = self.make_iostream_pair()
client.set_close_callback(self.stop)
try:
server.write(OK)
client.read_until(b"\r\n", self.stop)
res = self.wait()
self.assertEqual(res, OK)
server.close()
client.read_until(b"\r\n", lambda x: x)
# If _close_callback (self.stop) is not called,
# an AssertionError: Async operation timed out after 5 seconds
# will be raised.
res = self.wait()
self.assertTrue(res is None)
finally:
server.close()
client.close()
@skipIfNonUnix
def test_inline_read_error(self):
# An error on an inline read is raised without logging (on the
# assumption that it will eventually be noticed or logged further
# up the stack).
#
# This test is posix-only because windows os.close() doesn't work
# on socket FDs, but we can't close the socket object normally
# because we won't get the error we want if the socket knows
# it's closed.
server, client = self.make_iostream_pair()
try:
os.close(server.socket.fileno())
with self.assertRaises(socket.error):
server.read_bytes(1, lambda data: None)
finally:
server.close()
client.close()
def test_async_read_error_logging(self):
# Socket errors on asynchronous reads should be logged (but only
# once).
server, client = self.make_iostream_pair()
server.set_close_callback(self.stop)
try:
# Start a read that will be fulfilled asynchronously.
server.read_bytes(1, lambda data: None)
client.write(b'a')
# Stub out read_from_fd to make it fail.
def fake_read_from_fd():
os.close(server.socket.fileno())
server.__class__.read_from_fd(server)
server.read_from_fd = fake_read_from_fd
# This log message is from _handle_read (not read_from_fd).
with ExpectLog(gen_log, "error on read"):
self.wait()
finally:
server.close()
client.close()
def test_future_close_callback(self):
# Regression test for interaction between the Future read interfaces
# and IOStream._maybe_add_error_listener.
server, client = self.make_iostream_pair()
closed = [False]
def close_callback():
closed[0] = True
self.stop()
server.set_close_callback(close_callback)
try:
client.write(b'a')
future = server.read_bytes(1)
self.io_loop.add_future(future, self.stop)
self.assertEqual(self.wait().result(), b'a')
self.assertFalse(closed[0])
client.close()
self.wait()
self.assertTrue(closed[0])
finally:
server.close()
client.close()
def test_write_memoryview(self):
server, client = self.make_iostream_pair()
try:
client.read_bytes(4, self.stop)
server.write(memoryview(b"hello"))
data = self.wait()
self.assertEqual(data, b"hell")
finally:
server.close()
client.close()
def test_read_bytes_partial(self):
server, client = self.make_iostream_pair()
try:
# Ask for more than is available with partial=True
client.read_bytes(50, self.stop, partial=True)
server.write(b"hello")
data = self.wait()
self.assertEqual(data, b"hello")
# Ask for less than what is available; num_bytes is still
# respected.
client.read_bytes(3, self.stop, partial=True)
server.write(b"world")
data = self.wait()
self.assertEqual(data, b"wor")
# Partial reads won't return an empty string, but read_bytes(0)
# will.
client.read_bytes(0, self.stop, partial=True)
data = self.wait()
self.assertEqual(data, b'')
finally:
server.close()
client.close()
def test_read_until_max_bytes(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Extra room under the limit
client.read_until(b"def", self.stop, max_bytes=50)
server.write(b"abcdef")
data = self.wait()
self.assertEqual(data, b"abcdef")
# Just enough space
client.read_until(b"def", self.stop, max_bytes=6)
server.write(b"abcdef")
data = self.wait()
self.assertEqual(data, b"abcdef")
# Not enough space, but we don't know it until all we can do is
# log a warning and close the connection.
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until(b"def", self.stop, max_bytes=5)
server.write(b"123456")
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_read_until_max_bytes_inline(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Similar to the error case in the previous test, but the
# server writes first so client reads are satisfied
# inline. For consistency with the out-of-line case, we
# do not raise the error synchronously.
server.write(b"123456")
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until(b"def", self.stop, max_bytes=5)
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_read_until_max_bytes_ignores_extra(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Even though data that matches arrives the same packet that
# puts us over the limit, we fail the request because it was not
# found within the limit.
server.write(b"abcdef")
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until(b"def", self.stop, max_bytes=5)
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_read_until_regex_max_bytes(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Extra room under the limit
client.read_until_regex(b"def", self.stop, max_bytes=50)
server.write(b"abcdef")
data = self.wait()
self.assertEqual(data, b"abcdef")
# Just enough space
client.read_until_regex(b"def", self.stop, max_bytes=6)
server.write(b"abcdef")
data = self.wait()
self.assertEqual(data, b"abcdef")
# Not enough space, but we don't know it until all we can do is
# log a warning and close the connection.
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until_regex(b"def", self.stop, max_bytes=5)
server.write(b"123456")
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_read_until_regex_max_bytes_inline(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Similar to the error case in the previous test, but the
# server writes first so client reads are satisfied
# inline. For consistency with the out-of-line case, we
# do not raise the error synchronously.
server.write(b"123456")
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until_regex(b"def", self.stop, max_bytes=5)
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_read_until_regex_max_bytes_ignores_extra(self):
server, client = self.make_iostream_pair()
client.set_close_callback(lambda: self.stop("closed"))
try:
# Even though data that matches arrives the same packet that
# puts us over the limit, we fail the request because it was not
# found within the limit.
server.write(b"abcdef")
with ExpectLog(gen_log, "Unsatisfiable read"):
client.read_until_regex(b"def", self.stop, max_bytes=5)
data = self.wait()
self.assertEqual(data, "closed")
finally:
server.close()
client.close()
def test_small_reads_from_large_buffer(self):
# 10KB buffer size, 100KB available to read.
# Read 1KB at a time and make sure that the buffer is not eagerly
# filled.
server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
try:
server.write(b"a" * 1024 * 100)
for i in range(100):
client.read_bytes(1024, self.stop)
data = self.wait()
self.assertEqual(data, b"a" * 1024)
finally:
server.close()
client.close()
def test_small_read_untils_from_large_buffer(self):
# 10KB buffer size, 100KB available to read.
# Read 1KB at a time and make sure that the buffer is not eagerly
# filled.
server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
try:
server.write((b"a" * 1023 + b"\n") * 100)
for i in range(100):
client.read_until(b"\n", self.stop, max_bytes=4096)
data = self.wait()
self.assertEqual(data, b"a" * 1023 + b"\n")
finally:
server.close()
client.close()
def test_flow_control(self):
MB = 1024 * 1024
server, client = self.make_iostream_pair(max_buffer_size=5 * MB)
try:
# Client writes more than the server will accept.
client.write(b"a" * 10 * MB)
# The server pauses while reading.
server.read_bytes(MB, self.stop)
self.wait()
self.io_loop.call_later(0.1, self.stop)
self.wait()
# The client's writes have been blocked; the server can
# continue to read gradually.
for i in range(9):
server.read_bytes(MB, self.stop)
self.wait()
finally:
server.close()
client.close()
def test_future_write(self):
"""
Test that write() Futures are never orphaned.
"""
# Run concurrent writers that will write enough bytes so as to
# clog the socket buffer and accumulate bytes in our write buffer.
m, n = 10000, 1000
nproducers = 10
total_bytes = m * n * nproducers
server, client = self.make_iostream_pair(max_buffer_size=total_bytes)
@gen.coroutine
def produce():
data = b'x' * m
for i in range(n):
yield server.write(data)
@gen.coroutine
def consume():
nread = 0
while nread < total_bytes:
res = yield client.read_bytes(m)
nread += len(res)
@gen.coroutine
def main():
yield [produce() for i in range(nproducers)] + [consume()]
try:
self.io_loop.run_sync(main)
finally:
server.close()
client.close()
class TestIOStreamWebHTTP(TestIOStreamWebMixin, AsyncHTTPTestCase):
def _make_client_iostream(self):
return IOStream(socket.socket())
class TestIOStreamWebHTTPS(TestIOStreamWebMixin, AsyncHTTPSTestCase):
def _make_client_iostream(self):
return SSLIOStream(socket.socket(),
ssl_options=dict(cert_reqs=ssl.CERT_NONE))
class TestIOStream(TestIOStreamMixin, AsyncTestCase):
def _make_server_iostream(self, connection, **kwargs):
return IOStream(connection, **kwargs)
def _make_client_iostream(self, connection, **kwargs):
return IOStream(connection, **kwargs)
class TestIOStreamSSL(TestIOStreamMixin, AsyncTestCase):
def _make_server_iostream(self, connection, **kwargs):
connection = ssl.wrap_socket(connection,
server_side=True,
do_handshake_on_connect=False,
**_server_ssl_options())
return SSLIOStream(connection, **kwargs)
def _make_client_iostream(self, connection, **kwargs):
return SSLIOStream(connection,
ssl_options=dict(cert_reqs=ssl.CERT_NONE),
**kwargs)
# This will run some tests that are basically redundant but it's the
# simplest way to make sure that it works to pass an SSLContext
# instead of an ssl_options dict to the SSLIOStream constructor.
@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present')
class TestIOStreamSSLContext(TestIOStreamMixin, AsyncTestCase):
def _make_server_iostream(self, connection, **kwargs):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.load_cert_chain(
os.path.join(os.path.dirname(__file__), 'test.crt'),
os.path.join(os.path.dirname(__file__), 'test.key'))
connection = ssl_wrap_socket(connection, context,
server_side=True,
do_handshake_on_connect=False)
return SSLIOStream(connection, **kwargs)
def _make_client_iostream(self, connection, **kwargs):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
return SSLIOStream(connection, ssl_options=context, **kwargs)
class TestIOStreamStartTLS(AsyncTestCase):
def setUp(self):
try:
super(TestIOStreamStartTLS, self).setUp()
self.listener, self.port = bind_unused_port()
self.server_stream = None
self.server_accepted = Future()
netutil.add_accept_handler(self.listener, self.accept)
self.client_stream = IOStream(socket.socket())
self.io_loop.add_future(self.client_stream.connect(
('127.0.0.1', self.port)), self.stop)
self.wait()
self.io_loop.add_future(self.server_accepted, self.stop)
self.wait()
except Exception as e:
print(e)
raise
def tearDown(self):
if self.server_stream is not None:
self.server_stream.close()
if self.client_stream is not None:
self.client_stream.close()
self.listener.close()
super(TestIOStreamStartTLS, self).tearDown()
def accept(self, connection, address):
if self.server_stream is not None:
self.fail("should only get one connection")
self.server_stream = IOStream(connection)
self.server_accepted.set_result(None)
@gen.coroutine
def client_send_line(self, line):
self.client_stream.write(line)
recv_line = yield self.server_stream.read_until(b"\r\n")
self.assertEqual(line, recv_line)
@gen.coroutine
def server_send_line(self, line):
self.server_stream.write(line)
recv_line = yield self.client_stream.read_until(b"\r\n")
self.assertEqual(line, recv_line)
def client_start_tls(self, ssl_options=None, server_hostname=None):
client_stream = self.client_stream
self.client_stream = None
return client_stream.start_tls(False, ssl_options, server_hostname)
def server_start_tls(self, ssl_options=None):
server_stream = self.server_stream
self.server_stream = None
return server_stream.start_tls(True, ssl_options)
@gen_test
def test_start_tls_smtp(self):
# This flow is simplified from RFC 3207 section 5.
# We don't really need all of this, but it helps to make sure
# that after realistic back-and-forth traffic the buffers end up
# in a sane state.
yield self.server_send_line(b"220 mail.example.com ready\r\n")
yield self.client_send_line(b"EHLO mail.example.com\r\n")
yield self.server_send_line(b"250-mail.example.com welcome\r\n")
yield self.server_send_line(b"250 STARTTLS\r\n")
yield self.client_send_line(b"STARTTLS\r\n")
yield self.server_send_line(b"220 Go ahead\r\n")
client_future = self.client_start_tls(dict(cert_reqs=ssl.CERT_NONE))
server_future = self.server_start_tls(_server_ssl_options())
self.client_stream = yield client_future
self.server_stream = yield server_future
self.assertTrue(isinstance(self.client_stream, SSLIOStream))
self.assertTrue(isinstance(self.server_stream, SSLIOStream))
yield self.client_send_line(b"EHLO mail.example.com\r\n")
yield self.server_send_line(b"250 mail.example.com welcome\r\n")
@gen_test
def test_handshake_fail(self):
server_future = self.server_start_tls(_server_ssl_options())
# Certificates are verified with the default configuration.
client_future = self.client_start_tls(server_hostname="localhost")
with ExpectLog(gen_log, "SSL Error"):
with self.assertRaises(ssl.SSLError):
yield client_future
with self.assertRaises((ssl.SSLError, socket.error)):
yield server_future
@unittest.skipIf(not hasattr(ssl, 'create_default_context'),
'ssl.create_default_context not present')
@gen_test
def test_check_hostname(self):
# Test that server_hostname parameter to start_tls is being used.
# The check_hostname functionality is only available in python 2.7 and
# up and in python 3.4 and up.
server_future = self.server_start_tls(_server_ssl_options())
client_future = self.client_start_tls(
ssl.create_default_context(),
server_hostname=b'127.0.0.1')
with ExpectLog(gen_log, "SSL Error"):
with self.assertRaises(ssl.SSLError):
# The client fails to connect with an SSL error.
yield client_future
with self.assertRaises(Exception):
# The server fails to connect, but the exact error is unspecified.
yield server_future
class WaitForHandshakeTest(AsyncTestCase):
@gen.coroutine
def connect_to_server(self, server_cls):
server = client = None
try:
sock, port = bind_unused_port()
server = server_cls(ssl_options=_server_ssl_options())
server.add_socket(sock)
client = SSLIOStream(socket.socket(),
ssl_options=dict(cert_reqs=ssl.CERT_NONE))
yield client.connect(('127.0.0.1', port))
self.assertIsNotNone(client.socket.cipher())
finally:
if server is not None:
server.stop()
if client is not None:
client.close()
@gen_test
def test_wait_for_handshake_callback(self):
test = self
handshake_future = Future()
class TestServer(TCPServer):
def handle_stream(self, stream, address):
# The handshake has not yet completed.
test.assertIsNone(stream.socket.cipher())
self.stream = stream
stream.wait_for_handshake(self.handshake_done)
def handshake_done(self):
# Now the handshake is done and ssl information is available.
test.assertIsNotNone(self.stream.socket.cipher())
handshake_future.set_result(None)
yield self.connect_to_server(TestServer)
yield handshake_future
@gen_test
def test_wait_for_handshake_future(self):
test = self
handshake_future = Future()
class TestServer(TCPServer):
def handle_stream(self, stream, address):
test.assertIsNone(stream.socket.cipher())
test.io_loop.spawn_callback(self.handle_connection, stream)
@gen.coroutine
def handle_connection(self, stream):
yield stream.wait_for_handshake()
handshake_future.set_result(None)
yield self.connect_to_server(TestServer)
yield handshake_future
@gen_test
def test_wait_for_handshake_already_waiting_error(self):
test = self
handshake_future = Future()
class TestServer(TCPServer):
def handle_stream(self, stream, address):
stream.wait_for_handshake(self.handshake_done)
test.assertRaises(RuntimeError, stream.wait_for_handshake)
def handshake_done(self):
handshake_future.set_result(None)
yield self.connect_to_server(TestServer)
yield handshake_future
@gen_test
def test_wait_for_handshake_already_connected(self):
handshake_future = Future()
class TestServer(TCPServer):
def handle_stream(self, stream, address):
self.stream = stream
stream.wait_for_handshake(self.handshake_done)
def handshake_done(self):
self.stream.wait_for_handshake(self.handshake2_done)
def handshake2_done(self):
handshake_future.set_result(None)
yield self.connect_to_server(TestServer)
yield handshake_future
@skipIfNonUnix
class TestPipeIOStream(AsyncTestCase):
def test_pipe_iostream(self):
r, w = os.pipe()
rs = PipeIOStream(r)
ws = PipeIOStream(w)
ws.write(b"hel")
ws.write(b"lo world")
rs.read_until(b' ', callback=self.stop)
data = self.wait()
self.assertEqual(data, b"hello ")
rs.read_bytes(3, self.stop)
data = self.wait()
self.assertEqual(data, b"wor")
ws.close()
rs.read_until_close(self.stop)
data = self.wait()
self.assertEqual(data, b"ld")
rs.close()
def test_pipe_iostream_big_write(self):
r, w = os.pipe()
rs = PipeIOStream(r)
ws = PipeIOStream(w)
NUM_BYTES = 1048576
# Write 1MB of data, which should fill the buffer
ws.write(b"1" * NUM_BYTES)
rs.read_bytes(NUM_BYTES, self.stop)
data = self.wait()
self.assertEqual(data, b"1" * NUM_BYTES)
ws.close()
rs.close()
| {
"repo_name": "eklitzke/tornado",
"path": "tornado/test/iostream_test.py",
"copies": "4",
"size": "43030",
"license": "apache-2.0",
"hash": -6732686946741691000,
"line_mean": 36.7787532924,
"line_max": 119,
"alpha_frac": 0.5815942366,
"autogenerated": false,
"ratio": 4.057520037718057,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0000177683013503909,
"num_lines": 1139
} |
from __future__ import absolute_import, division, print_function
from webob import Request, Response
from .visitor import Visitor
from .util import nonce, transparent_pixel, pixel_tag, Signer, SignerError
class ManhattanMiddleware(object):
def __init__(self, app, log, secret, cookie_name='manhattan',
pixel_path='/vpixel.gif', host_map=None, buffer_writes=True):
self.app = app
self.cookie_name = cookie_name
self.log = log
self.signer = Signer(secret)
self.pixel_path = pixel_path
self.host_map = host_map or {}
self.buffer_writes = buffer_writes
def inject_pixel(self, resp):
tag = pixel_tag(self.pixel_path)
def wrap_iter(orig_iter):
for chunk in orig_iter:
yield chunk.replace('</body>', '%s</body>' % tag)
resp.app_iter = wrap_iter(resp.app_iter)
def handle_pixel(self, visitor, fresh):
if not fresh:
visitor.pixel()
resp = Response(transparent_pixel)
resp.content_type = 'image/gif'
return resp
def count_page(self, req):
return (req.method in ('GET', 'POST') and
req.headers.get('X-Purpose') != 'preview')
def get_visitor_id(self, req):
signed_value = req.cookies[self.cookie_name]
return self.signer.unsign(signed_value)
def __call__(self, environ, start_response):
req = Request(environ)
fresh = vid = None
if self.cookie_name in req.cookies:
try:
vid = self.get_visitor_id(req)
except SignerError:
pass
else:
fresh = False
if not vid:
vid = nonce()
fresh = True
site_id = self.host_map.get(req.host.split(':', 1)[0], 0)
req.environ['manhattan.visitor'] = visitor = Visitor(
vid, self.log, site_id, self.buffer_writes)
if self.pixel_path and req.path_info == self.pixel_path:
resp = self.handle_pixel(visitor, fresh)
else:
resp = req.get_response(self.app)
if self.count_page(req):
visitor.page(req)
if fresh:
resp.set_cookie(self.cookie_name, self.signer.sign(visitor.id),
httponly=True)
if self.pixel_path and resp.content_type == 'text/html':
self.inject_pixel(resp)
visitor.flush()
return resp(environ, start_response)
| {
"repo_name": "storborg/manhattan",
"path": "manhattan/middleware.py",
"copies": "1",
"size": "2526",
"license": "mit",
"hash": 5807826770790484000,
"line_mean": 30.1851851852,
"line_max": 79,
"alpha_frac": 0.5629453682,
"autogenerated": false,
"ratio": 3.8564885496183208,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9919433917818321,
"avg_score": 0,
"num_lines": 81
} |
from __future__ import absolute_import, division, print_function
from wsgiref.validate import validator
from tornado.escape import json_decode
from tornado.test.httpserver_test import TypeCheckHandler
from tornado.test.util import ignore_deprecation
from tornado.testing import AsyncHTTPTestCase
from tornado.web import RequestHandler, Application
from tornado.wsgi import WSGIApplication, WSGIContainer, WSGIAdapter
from tornado.test import httpserver_test
from tornado.test import web_test
class WSGIContainerTest(AsyncHTTPTestCase):
def wsgi_app(self, environ, start_response):
status = "200 OK"
response_headers = [("Content-Type", "text/plain")]
start_response(status, response_headers)
return [b"Hello world!"]
def get_app(self):
return WSGIContainer(validator(self.wsgi_app))
def test_simple(self):
response = self.fetch("/")
self.assertEqual(response.body, b"Hello world!")
class WSGIAdapterTest(AsyncHTTPTestCase):
def get_app(self):
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello world!")
class PathQuotingHandler(RequestHandler):
def get(self, path):
self.write(path)
# It would be better to run the wsgiref server implementation in
# another thread instead of using our own WSGIContainer, but this
# fits better in our async testing framework and the wsgiref
# validator should keep us honest
with ignore_deprecation():
return WSGIContainer(validator(WSGIAdapter(
Application([
("/", HelloHandler),
("/path/(.*)", PathQuotingHandler),
("/typecheck", TypeCheckHandler),
]))))
def test_simple(self):
response = self.fetch("/")
self.assertEqual(response.body, b"Hello world!")
def test_path_quoting(self):
response = self.fetch("/path/foo%20bar%C3%A9")
self.assertEqual(response.body, u"foo bar\u00e9".encode("utf-8"))
def test_types(self):
headers = {"Cookie": "foo=bar"}
response = self.fetch("/typecheck?foo=bar", headers=headers)
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
data = json_decode(response.body)
self.assertEqual(data, {})
# This is kind of hacky, but run some of the HTTPServer and web tests
# through WSGIContainer and WSGIApplication to make sure everything
# survives repeated disassembly and reassembly.
class WSGIConnectionTest(httpserver_test.HTTPConnectionTest):
def get_app(self):
with ignore_deprecation():
return WSGIContainer(validator(WSGIAdapter(Application(self.get_handlers()))))
def wrap_web_tests_application():
result = {}
for cls in web_test.wsgi_safe_tests:
def class_factory():
class WSGIApplicationWrappedTest(cls): # type: ignore
def setUp(self):
self.warning_catcher = ignore_deprecation()
self.warning_catcher.__enter__()
super(WSGIApplicationWrappedTest, self).setUp()
def tearDown(self):
super(WSGIApplicationWrappedTest, self).tearDown()
self.warning_catcher.__exit__(None, None, None)
def get_app(self):
self.app = WSGIApplication(self.get_handlers(),
**self.get_app_kwargs())
return WSGIContainer(validator(self.app))
result["WSGIApplication_" + cls.__name__] = class_factory()
return result
globals().update(wrap_web_tests_application())
def wrap_web_tests_adapter():
result = {}
for cls in web_test.wsgi_safe_tests:
class WSGIAdapterWrappedTest(cls): # type: ignore
def get_app(self):
self.app = Application(self.get_handlers(),
**self.get_app_kwargs())
with ignore_deprecation():
return WSGIContainer(validator(WSGIAdapter(self.app)))
result["WSGIAdapter_" + cls.__name__] = WSGIAdapterWrappedTest
return result
globals().update(wrap_web_tests_adapter())
| {
"repo_name": "hhru/tornado",
"path": "tornado/test/wsgi_test.py",
"copies": "2",
"size": "4383",
"license": "apache-2.0",
"hash": 1837799232772868000,
"line_mean": 36.1440677966,
"line_max": 91,
"alpha_frac": 0.6242299795,
"autogenerated": false,
"ratio": 4.396188565697091,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00018524217124069742,
"num_lines": 118
} |
from __future__ import (absolute_import, division, print_function)
from xml.dom import minidom
import numpy as np
class Utilities:
""" Utilities related to work in master table """
def __init__(self, parent=None):
self.parent = parent
self.table_ui = parent.processing_ui.h3_table
def get_row_index_from_row_key(self, row_key=None):
""" This methods returns the row for the given row key """
if row_key is None:
return -1
master_table_row_ui = self.parent.master_table_list_ui
nbr_row = self.table_ui.rowCount()
checkbox_ui_of_row_key = master_table_row_ui[row_key]['active']
for _row in np.arange(nbr_row):
_ui_checkbox = self.table_ui.cellWidget(_row, 0).children()[1]
if _ui_checkbox == checkbox_ui_of_row_key:
return _row
return -1
def get_row_key_from_row_index(self, row=-1):
""" This method returns the key (random key) of the given row in master table.
An example of its use is if we want to retrieve the placzek settings for this row
as they are saved in the master_table_row_ui using random key as the key
"""
if row == -1:
return None
master_table_row_ui = self.parent.master_table_list_ui
for _key in master_table_row_ui.keys():
_activate_ui = master_table_row_ui[_key]["active"]
_activate_ui_of_given_row = self.table_ui.cellWidget(row, 0).children()[1]
if _activate_ui == _activate_ui_of_given_row:
return _key
class LoadGroupingFile:
""" This class reads the XML file and will return the
number of groups <group ID=""> found in that file
"""
def __init__(self, filename=''):
self.filename = filename
def get_number_of_groups(self):
try:
xmldoc = minidom.parse(self.filename)
itemlist = xmldoc.getElementsByTagName('group')
return len(itemlist)
except:
return 'N/A'
| {
"repo_name": "neutrons/FastGR",
"path": "addie/processing/mantid/master_table/utilities.py",
"copies": "1",
"size": "2053",
"license": "mit",
"hash": 608491601324772200,
"line_mean": 29.6417910448,
"line_max": 89,
"alpha_frac": 0.5986361422,
"autogenerated": false,
"ratio": 3.7463503649635035,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9836856261203012,
"avg_score": 0.001626049192098257,
"num_lines": 67
} |
from __future__ import absolute_import, division, print_function
## Glyph Processor
## Version 0.1 by Jens Kutilek 2013-03-27
import vanilla
from mojo.roboFont import CurrentFont, CurrentGlyph
from defconAppKit.windows.baseWindow import BaseWindowController
from mojo.extensions import getExtensionDefault, setExtensionDefault
from mojo.UI import UpdateCurrentGlyphView
from mojo.events import addObserver, removeObserver
class GlyphProcessorUI(BaseWindowController):
def __init__(self):
self.extensionID = self.getExtensionID()
self.font = CurrentFont()
self.glyph = CurrentGlyph()
self._initSettings()
self._loadSettings()
self.w = self._buildUI()
self._addObservers()
self.setUpBaseWindowBehavior()
self.w.open()
#self.glyph.prepareUndo("Glyph Processor")
UpdateCurrentGlyphView()
def windowCloseCallback(self, sender):
#self.glyph.performUndo()
self._removeObservers()
self._saveSettings()
super(GlyphProcessorUI, self).windowCloseCallback(sender)
UpdateCurrentGlyphView()
def getExtensionID(self):
return "com.netzallee.glyphProcessor"
def _getObservers(self):
return {
#"draw": ["_curvePreview",],
#"drawInactive": ["_curvePreview"],
"currentGlyphChanged": ["_currentGlyphChangedObserver",],
"fontDidOpen": ["_fontDidOpenOrCloseObserver",],
"fontWillClose": ["_fontWillCloseObserver",],
"fontDidClose": ["_fontDidOpenOrCloseObserver",],
}
def _addObservers(self):
for event, observer in self._getObservers().items():
for method in observer:
addObserver(self, method, event)
def _initSettings(self):
self.settings = {}
def _loadSettings(self):
#print("Load settings ...")
for k, v in self.settings.items():
#print(" Setting: '%s': (Default: %s)" % (k, v),)
self.settings[k] = getExtensionDefault("%s.%s" %(self.extensionID, k), v)
#print(self.settings[k])
def _loadSettingsFromFont(self):
if self.font is None:
self._initSettings()
else:
if self.extensionID in self.font.lib.keys():
self.settings = self.font.lib[self.extensionID]
else:
self._initSettings()
def _saveSettings(self):
#print("Save settings ...")
for k, v in self.settings.items():
#print(" Setting: '%s': %s" % (k, v))
setExtensionDefault("%s.%s" % (self.extensionID, k), v)
def _saveSettingsToFont(self):
if self.font is not None:
self.font.lib[self.extensionID] = self.settings
def _removeObservers(self):
for event in self._getObservers().keys():
removeObserver(self, event)
def _buildUI(self):
w = vanilla.FloatingWindow((200, 100), "Glyph Processor")
return w
# Callbacks and observers
def _currentGlyphChangedObserver(self, info=None):
self.glyph = CurrentGlyph()
if self.font != CurrentFont():
self._saveSettingsToFont()
del self.settings
self.font = CurrentFont()
self._noFontCallback()
self._loadSettingsFromFont()
UpdateCurrentGlyphView()
def _fontDidOpenOrCloseObserver(self, info=None):
self.font = CurrentFont()
self._noFontCallback()
self._loadSettingsFromFont()
UpdateCurrentGlyphView()
def _fontWillCloseObserver(self, info=None):
if info["font"] == self.font:
self._saveSettingsToFont()
def _noFontCallback(self):
# Enable or disable UI elements ...
if self.font is None:
print("Font is None")
else:
print("Font is not None")
| {
"repo_name": "jenskutilek/jkRFoTools",
"path": "Lib/jkRFoTools/GlyphProcessor.py",
"copies": "2",
"size": "3990",
"license": "mit",
"hash": -1860964783216833500,
"line_mean": 32.25,
"line_max": 85,
"alpha_frac": 0.5937343358,
"autogenerated": false,
"ratio": 4.186778593913956,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.032016637582779846,
"num_lines": 120
} |
from __future__ import absolute_import, division, print_function
import argparse
from functools import partial
import sys
import nfldb
import nflcmd
__all__ = ['run']
def eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
def run():
"""Runs the `nflrank` command."""
db = nfldb.connect()
_, cur_year, _ = nfldb.current(db)
parser = argparse.ArgumentParser(
description='Show NFL player rankings for statistical categories.')
aa = parser.add_argument
aa(dest='categories', metavar='CATEGORY', nargs='+')
aa('--years', type=str, default=str(cur_year),
help='Show rankings only for the inclusive range of years given,\n'
'e.g., "2010-2011". Other valid examples: "2010", "-2010",\n'
'"2010-".')
aa('--weeks', type=str, default='',
help='Show rankings only for the inclusive range of weeks given,\n'
'e.g., "4-8". Other valid examples: "4", "-8",\n'
'"4-".')
aa('--pre', action='store_true',
help='When set, only data from the preseason will be used.')
aa('--post', action='store_true',
help='When set, only data from the postseason will be used.')
aa('--pos', type=str, default=[], nargs='+',
help='When set, only show players in the given positions.')
aa('--teams', type=str, default=[], nargs='+',
help='When set, only show players currently on the given teams.')
aa('--limit', type=int, default=10,
help='Restrict the number of results shown.')
args = parser.parse_args()
for cat in args.categories:
if cat not in nfldb.stat_categories:
eprint("%s is not a valid statistical category.", cat)
sys.exit(1)
stype = 'Regular'
if args.pre:
stype = 'Preseason'
if args.post:
stype = 'Postseason'
years = nflcmd.arg_range(args.years, 2009, cur_year)
weeks = nflcmd.arg_range(args.weeks, 1, 17)
def to_games(agg):
syrs = years[0] if len(years) == 1 else '%d-%d' % (years[0], years[-1])
qgames = nflcmd.query_games(db, agg.player, years, stype, weeks)
return nflcmd.Games(db, syrs, qgames.as_games(), agg)
catq = nfldb.QueryOR(db)
for cat in args.categories:
k = cat + '__ne'
catq.play_player(**{k: 0})
q = nfldb.Query(db)
q.game(season_year=years, season_type=stype, week=weeks)
q.andalso(catq)
if len(args.pos) > 0:
posq = nfldb.QueryOR(db)
for pos in args.pos:
posq.player(position=nfldb.Enums.player_pos[pos])
q.andalso(posq)
if len(args.teams) > 0:
q.player(team=args.teams)
q.sort([(cat, 'desc') for cat in args.categories])
q.limit(args.limit)
pstats = map(to_games, q.as_aggregate())
spec = ['name', 'team', 'game_count'] + args.categories
rows = [nflcmd.header_row(spec)]
rows += map(partial(nflcmd.pstat_to_row, spec), pstats)
print(nflcmd.table(rows))
| {
"repo_name": "BurntSushi/nflcmd",
"path": "nflcmd/cmds/rank.py",
"copies": "1",
"size": "2980",
"license": "unlicense",
"hash": -2045149952611657000,
"line_mean": 32.4831460674,
"line_max": 79,
"alpha_frac": 0.6003355705,
"autogenerated": false,
"ratio": 3.2964601769911503,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9396795747491151,
"avg_score": 0,
"num_lines": 89
} |
from __future__ import absolute_import, division, print_function
import argparse
from functools import partial
import sys
import nfldb
import nflcmd
__all__ = ['run']
prefix_game = ['week', 'outcome', 'game_date', 'opp']
prefix_season = ['year', 'teams', 'game_count']
def eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
def show_game_table(db, player, year, stype, week_range=None, pos=None):
if pos is None:
pos = player.position
games = nflcmd.query_games(db, player, year, stype, week_range).as_games()
pstats = map(partial(nflcmd.Game.make, db, player), games)
spec = prefix_game + nflcmd.columns['game'][nflcmd.pcolumns[pos]]
rows = [nflcmd.header_row(spec)]
rows += map(partial(nflcmd.pstat_to_row, spec), pstats)
if len(pstats) > 1:
summary = nfldb.aggregate(pstat._pstat for pstat in pstats)[0]
allrows = nflcmd.Game(db, None, '-', summary)
allrows._fgs = []
for pstat in pstats:
allrows._fgs += pstat.fgs
rows.append(nflcmd.pstat_to_row(spec, allrows))
print(nflcmd.table(rows))
def show_season_table(db, player, stype, week_range=None, pos=None):
if pos is None:
pos = player.position
_, cur_year, _ = nfldb.current(db)
pstats = []
for year in range(2009, cur_year+1):
qgames = nflcmd.query_games(db, player, year, stype, week_range)
games = qgames.as_games()
if len(games) == 0:
continue
game_stats = map(partial(nflcmd.Game.make, db, player), games)
agg = qgames.sort([]).as_aggregate()
pstats.append(nflcmd.Games(db, year, game_stats, agg[0]))
spec = prefix_season + nflcmd.columns['season'][nflcmd.pcolumns[pos]]
rows = [nflcmd.header_row(spec)]
rows += map(partial(nflcmd.pstat_to_row, spec), pstats)
if len(pstats) > 1:
summary = nfldb.aggregate(pstat._pstat for pstat in pstats)[0]
allrows = nflcmd.Games(db, '-', [], summary)
allrows._fgs = []
for pstat in pstats:
allrows._fgs += pstat.fgs
allrows.games += pstat.games
rows.append(nflcmd.pstat_to_row(spec, allrows))
print(nflcmd.table(rows))
def run():
"""Runs the `nflstats` command."""
db = nfldb.connect()
_, cur_year, _ = nfldb.current(db)
parser = argparse.ArgumentParser(
description='Show NFL game stats for a player.')
aa = parser.add_argument
aa(dest='player_query', metavar='PLAYER', nargs='+')
aa('--team', type=str, default=None,
help='Specify the team of the player to help the search.')
aa('--pos', type=str, default=None,
help='Specify the position of the player to help the search.')
aa('--soundex', action='store_true',
help='When set, player names are compared using Soundex instead '
'of Levenshtein.')
aa('--year', type=str, default=cur_year,
help='Show game logs for only this year. (Not applicable if '
'--season is set.)')
aa('--pre', action='store_true',
help='When set, only games from the preseason will be used.')
aa('--post', action='store_true',
help='When set, only games from the postseason will be used.')
aa('--weeks', type=str, default='',
help='Show stats only for the inclusive range of weeks given,\n'
'e.g., "4-8". Other valid examples: "4", "-8",\n'
'"4-". Has no effect when --season is used.')
aa('--season', action='store_true',
help='When set, statistics are shown by season instead of by game.')
aa('--show-as', type=str, default=None,
help='Force display of player as a particular position. This may need '
'to be set for inactive players.')
args = parser.parse_args()
args.player_query = ' '.join(args.player_query)
player = nflcmd.search(db, args.player_query, args.team, args.pos,
args.soundex)
if player is None:
eprint("Could not find a player given the criteria.")
sys.exit(1)
print('Player matched: %s' % player)
week_range = nflcmd.arg_range(args.weeks, 1, 17)
stype = 'Regular'
if args.pre:
stype = 'Preseason'
if args.post:
stype = 'Postseason'
pos = None
if args.show_as is not None:
pos = nfldb.Enums.player_pos[args.show_as]
elif player.position == nfldb.Enums.player_pos.UNK:
q = nfldb.Query(db)
q.play_player(player_id=player.player_id)
q.sort(('gsis_id', 'desc'))
pos = nfldb.guess_position(q.as_play_players())
if pos == nfldb.Enums.player_pos.UNK:
eprint("The player matched is not active and I could not guess\n"
"his position. Specify it with the '--show-as' flag.")
sys.exit(1)
print("Guessed position: %s" % pos)
if args.season:
show_season_table(db, player, stype, week_range, pos)
else:
show_game_table(db, player, args.year, stype, week_range, pos)
| {
"repo_name": "BurntSushi/nflcmd",
"path": "nflcmd/cmds/stats.py",
"copies": "1",
"size": "5032",
"license": "unlicense",
"hash": -7886024349477219000,
"line_mean": 34.9428571429,
"line_max": 78,
"alpha_frac": 0.6069157393,
"autogenerated": false,
"ratio": 3.2888888888888888,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4395804628188889,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import argparse
import csrmesh as cm
from time import sleep
from phue import Bridge
from settings import *
import traceback
def read_bulb_state():
state = None
try:
b = Bridge(HUE_BRIGE_IP)
# If the app is not registered and the button is not pressed, press the button and call connect() (this only needs to be run a single time)
b.connect()
bulb_state = b.get_light(HUE_BULB_NAME)
if 'name' in bulb_state:
print(bulb_state['state']['on'])
print(bulb_state['state']['bri'])
state = bulb_state['state']
else:
print("Error reading bulb state: ", bulb_state[0]['error'])
except Exception as e:
s = traceback.format_exc()
print("unexpected failure, {} ,{}".format(e, s))
return state
def set_homebrite_bulb(level):
cm.lightbulb.set_light(HOMEBRITE_MAC, HOMEBRITE_PIN, level, 255, 255, 255, 0)
if __name__ == "__main__":
while True:
state = read_bulb_state()
print(state)
if (state is not None):
if state['on']:
level = state['bri']
else:
level = 0
print("Setting level to ", level)
set_homebrite_bulb(level)
sleep(0.5) | {
"repo_name": "OrenLederman/csrmesh-hue-bridge",
"path": "csrmesh-hue-bridge.py",
"copies": "1",
"size": "1335",
"license": "mit",
"hash": -8443289592093393000,
"line_mean": 28.0434782609,
"line_max": 147,
"alpha_frac": 0.5790262172,
"autogenerated": false,
"ratio": 3.588709677419355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4667735894619355,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import argparse
import json
import redis
from manhattan.log.timerotating import TimeRotatingLog
DEFAULT_REDIS_KEY = 'manhattan:log:queue'
def make_redis(kwargs=None, **defaults):
if kwargs is None:
kwargs = {}
for k in defaults:
if k not in kwargs:
kwargs[k] = defaults[k]
return redis.Redis(**kwargs)
class RemoteLog(object):
"""Sends log entries to a remote server."""
def __init__(self, key=DEFAULT_REDIS_KEY, redis_kwargs=None):
self.key = key
self.db = make_redis(redis_kwargs, socket_timeout=1)
def write(self, *records):
"""Send ``records`` to remote logger."""
self.db.rpush(self.key, json.dumps(records))
def send_command(self, command):
self.db.rpush(self.key, command)
class RemoteLogServer(object):
"""Consumes log entries from a Redis queue and writes them to a log."""
def __init__(self, log, key=DEFAULT_REDIS_KEY, redis_kwargs=None):
self.log = log
self.key = key
self.db = make_redis(redis_kwargs)
self.running = False
def run(self):
self.running = True
while self.running:
records = self.db.blpop(self.key)[1]
if records == 'STOP':
self.stop()
else:
records = json.loads(records)
self.log.write(*records)
def stop(self):
self.running = False
def server(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path', default='log/manhattan.log')
parser.add_argument('-k', '--key', default=DEFAULT_REDIS_KEY)
args = parser.parse_args(argv)
log_server = RemoteLogServer(TimeRotatingLog(args.path), args.key)
log_server.run()
| {
"repo_name": "storborg/manhattan",
"path": "manhattan/log/remote.py",
"copies": "1",
"size": "1821",
"license": "mit",
"hash": -5704861250133079000,
"line_mean": 25.7794117647,
"line_max": 75,
"alpha_frac": 0.6172432729,
"autogenerated": false,
"ratio": 3.6202783300198806,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9737521602919881,
"avg_score": 0,
"num_lines": 68
} |
from __future__ import absolute_import, division, print_function
import argparse
import sys
import os
import py
import pytest
from _pytest.config import argparsing as parseopt
@pytest.fixture
def parser():
return parseopt.Parser()
class TestParser(object):
def test_no_help_by_default(self, capsys):
parser = parseopt.Parser(usage="xyz")
pytest.raises(SystemExit, lambda: parser.parse(["-h"]))
out, err = capsys.readouterr()
assert err.find("error: unrecognized arguments") != -1
def test_argument(self):
with pytest.raises(parseopt.ArgumentError):
# need a short or long option
argument = parseopt.Argument()
argument = parseopt.Argument("-t")
assert argument._short_opts == ["-t"]
assert argument._long_opts == []
assert argument.dest == "t"
argument = parseopt.Argument("-t", "--test")
assert argument._short_opts == ["-t"]
assert argument._long_opts == ["--test"]
assert argument.dest == "test"
argument = parseopt.Argument("-t", "--test", dest="abc")
assert argument.dest == "abc"
assert str(argument) == (
"Argument(_short_opts: ['-t'], _long_opts: ['--test'], dest: 'abc')"
)
def test_argument_type(self):
argument = parseopt.Argument("-t", dest="abc", type=int)
assert argument.type is int
argument = parseopt.Argument("-t", dest="abc", type=str)
assert argument.type is str
argument = parseopt.Argument("-t", dest="abc", type=float)
assert argument.type is float
with pytest.warns(DeprecationWarning):
with pytest.raises(KeyError):
argument = parseopt.Argument("-t", dest="abc", type="choice")
argument = parseopt.Argument(
"-t", dest="abc", type=str, choices=["red", "blue"]
)
assert argument.type is str
def test_argument_processopt(self):
argument = parseopt.Argument("-t", type=int)
argument.default = 42
argument.dest = "abc"
res = argument.attrs()
assert res["default"] == 42
assert res["dest"] == "abc"
def test_group_add_and_get(self, parser):
group = parser.getgroup("hello", description="desc")
assert group.name == "hello"
assert group.description == "desc"
def test_getgroup_simple(self, parser):
group = parser.getgroup("hello", description="desc")
assert group.name == "hello"
assert group.description == "desc"
group2 = parser.getgroup("hello")
assert group2 is group
def test_group_ordering(self, parser):
parser.getgroup("1")
parser.getgroup("2")
parser.getgroup("3", after="1")
groups = parser._groups
groups_names = [x.name for x in groups]
assert groups_names == list("132")
def test_group_addoption(self):
group = parseopt.OptionGroup("hello")
group.addoption("--option1", action="store_true")
assert len(group.options) == 1
assert isinstance(group.options[0], parseopt.Argument)
def test_group_addoption_conflict(self):
group = parseopt.OptionGroup("hello again")
group.addoption("--option1", "--option-1", action="store_true")
with pytest.raises(ValueError) as err:
group.addoption("--option1", "--option-one", action="store_true")
assert str({"--option1"}) in str(err.value)
def test_group_shortopt_lowercase(self, parser):
group = parser.getgroup("hello")
pytest.raises(
ValueError,
"""
group.addoption("-x", action="store_true")
""",
)
assert len(group.options) == 0
group._addoption("-x", action="store_true")
assert len(group.options) == 1
def test_parser_addoption(self, parser):
group = parser.getgroup("custom options")
assert len(group.options) == 0
group.addoption("--option1", action="store_true")
assert len(group.options) == 1
def test_parse(self, parser):
parser.addoption("--hello", dest="hello", action="store")
args = parser.parse(["--hello", "world"])
assert args.hello == "world"
assert not getattr(args, parseopt.FILE_OR_DIR)
def test_parse2(self, parser):
args = parser.parse([py.path.local()])
assert getattr(args, parseopt.FILE_OR_DIR)[0] == py.path.local()
def test_parse_known_args(self, parser):
parser.parse_known_args([py.path.local()])
parser.addoption("--hello", action="store_true")
ns = parser.parse_known_args(["x", "--y", "--hello", "this"])
assert ns.hello
assert ns.file_or_dir == ["x"]
def test_parse_known_and_unknown_args(self, parser):
parser.addoption("--hello", action="store_true")
ns, unknown = parser.parse_known_and_unknown_args(
["x", "--y", "--hello", "this"]
)
assert ns.hello
assert ns.file_or_dir == ["x"]
assert unknown == ["--y", "this"]
def test_parse_will_set_default(self, parser):
parser.addoption("--hello", dest="hello", default="x", action="store")
option = parser.parse([])
assert option.hello == "x"
del option.hello
parser.parse_setoption([], option)
assert option.hello == "x"
def test_parse_setoption(self, parser):
parser.addoption("--hello", dest="hello", action="store")
parser.addoption("--world", dest="world", default=42)
class A(object):
pass
option = A()
args = parser.parse_setoption(["--hello", "world"], option)
assert option.hello == "world"
assert option.world == 42
assert not args
def test_parse_special_destination(self, parser):
parser.addoption("--ultimate-answer", type=int)
args = parser.parse(["--ultimate-answer", "42"])
assert args.ultimate_answer == 42
def test_parse_split_positional_arguments(self, parser):
parser.addoption("-R", action="store_true")
parser.addoption("-S", action="store_false")
args = parser.parse(["-R", "4", "2", "-S"])
assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
args = parser.parse(["-R", "-S", "4", "2", "-R"])
assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
assert args.R is True
assert args.S is False
args = parser.parse(["-R", "4", "-S", "2"])
assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
assert args.R is True
assert args.S is False
def test_parse_defaultgetter(self):
def defaultget(option):
if not hasattr(option, "type"):
return
if option.type is int:
option.default = 42
elif option.type is str:
option.default = "world"
parser = parseopt.Parser(processopt=defaultget)
parser.addoption("--this", dest="this", type=int, action="store")
parser.addoption("--hello", dest="hello", type=str, action="store")
parser.addoption("--no", dest="no", action="store_true")
option = parser.parse([])
assert option.hello == "world"
assert option.this == 42
assert option.no is False
def test_drop_short_helper(self):
parser = argparse.ArgumentParser(
formatter_class=parseopt.DropShorterLongHelpFormatter
)
parser.add_argument(
"-t", "--twoword", "--duo", "--two-word", "--two", help="foo"
).map_long_option = {"two": "two-word"}
# throws error on --deux only!
parser.add_argument(
"-d", "--deuxmots", "--deux-mots", action="store_true", help="foo"
).map_long_option = {"deux": "deux-mots"}
parser.add_argument("-s", action="store_true", help="single short")
parser.add_argument("--abc", "-a", action="store_true", help="bar")
parser.add_argument("--klm", "-k", "--kl-m", action="store_true", help="bar")
parser.add_argument(
"-P", "--pq-r", "-p", "--pqr", action="store_true", help="bar"
)
parser.add_argument(
"--zwei-wort", "--zweiwort", "--zweiwort", action="store_true", help="bar"
)
parser.add_argument(
"-x", "--exit-on-first", "--exitfirst", action="store_true", help="spam"
).map_long_option = {"exitfirst": "exit-on-first"}
parser.add_argument("files_and_dirs", nargs="*")
args = parser.parse_args(["-k", "--duo", "hallo", "--exitfirst"])
assert args.twoword == "hallo"
assert args.klm is True
assert args.zwei_wort is False
assert args.exit_on_first is True
assert args.s is False
args = parser.parse_args(["--deux-mots"])
with pytest.raises(AttributeError):
assert args.deux_mots is True
assert args.deuxmots is True
args = parser.parse_args(["file", "dir"])
assert "|".join(args.files_and_dirs) == "file|dir"
def test_drop_short_0(self, parser):
parser.addoption("--funcarg", "--func-arg", action="store_true")
parser.addoption("--abc-def", "--abc-def", action="store_true")
parser.addoption("--klm-hij", action="store_true")
args = parser.parse(["--funcarg", "--k"])
assert args.funcarg is True
assert args.abc_def is False
assert args.klm_hij is True
def test_drop_short_2(self, parser):
parser.addoption("--func-arg", "--doit", action="store_true")
args = parser.parse(["--doit"])
assert args.func_arg is True
def test_drop_short_3(self, parser):
parser.addoption("--func-arg", "--funcarg", "--doit", action="store_true")
args = parser.parse(["abcd"])
assert args.func_arg is False
assert args.file_or_dir == ["abcd"]
def test_drop_short_help0(self, parser, capsys):
parser.addoption("--func-args", "--doit", help="foo", action="store_true")
parser.parse([])
help = parser.optparser.format_help()
assert "--func-args, --doit foo" in help
# testing would be more helpful with all help generated
def test_drop_short_help1(self, parser, capsys):
group = parser.getgroup("general")
group.addoption("--doit", "--func-args", action="store_true", help="foo")
group._addoption(
"-h",
"--help",
action="store_true",
dest="help",
help="show help message and configuration info",
)
parser.parse(["-h"])
help = parser.optparser.format_help()
assert "-doit, --func-args foo" in help
def test_multiple_metavar_help(self, parser):
"""
Help text for options with a metavar tuple should display help
in the form "--preferences=value1 value2 value3" (#2004).
"""
group = parser.getgroup("general")
group.addoption(
"--preferences", metavar=("value1", "value2", "value3"), nargs=3
)
group._addoption("-h", "--help", action="store_true", dest="help")
parser.parse(["-h"])
help = parser.optparser.format_help()
assert "--preferences=value1 value2 value3" in help
def test_argcomplete(testdir, monkeypatch):
if not py.path.local.sysfind("bash"):
pytest.skip("bash not available")
script = str(testdir.tmpdir.join("test_argcomplete"))
pytest_bin = sys.argv[0]
if "pytest" not in os.path.basename(pytest_bin):
pytest.skip("need to be run with pytest executable, not {}".format(pytest_bin))
with open(str(script), "w") as fp:
# redirect output from argcomplete to stdin and stderr is not trivial
# http://stackoverflow.com/q/12589419/1307905
# so we use bash
fp.write('COMP_WORDBREAKS="$COMP_WORDBREAKS" %s 8>&1 9>&2' % pytest_bin)
# alternative would be exteneded Testdir.{run(),_run(),popen()} to be able
# to handle a keyword argument env that replaces os.environ in popen or
# extends the copy, advantage: could not forget to restore
monkeypatch.setenv("_ARGCOMPLETE", "1")
monkeypatch.setenv("_ARGCOMPLETE_IFS", "\x0b")
monkeypatch.setenv("COMP_WORDBREAKS", " \\t\\n\"\\'><=;|&(:")
arg = "--fu"
monkeypatch.setenv("COMP_LINE", "pytest " + arg)
monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
result = testdir.run("bash", str(script), arg)
if result.ret == 255:
# argcomplete not found
pytest.skip("argcomplete not available")
elif not result.stdout.str():
pytest.skip("bash provided no output, argcomplete not available?")
else:
result.stdout.fnmatch_lines(["--funcargs", "--fulltrace"])
os.mkdir("test_argcomplete.d")
arg = "test_argc"
monkeypatch.setenv("COMP_LINE", "pytest " + arg)
monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
result = testdir.run("bash", str(script), arg)
result.stdout.fnmatch_lines(["test_argcomplete", "test_argcomplete.d/"])
| {
"repo_name": "ddboline/pytest",
"path": "testing/test_parseopt.py",
"copies": "2",
"size": "13148",
"license": "mit",
"hash": -3864088925302201300,
"line_mean": 39.2079510703,
"line_max": 87,
"alpha_frac": 0.5851840584,
"autogenerated": false,
"ratio": 3.7673352435530085,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5352519301953008,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import argparse
import time
import json
import os
import sys
import urllib2
import xml.dom.minidom as xml
import nflgame
from nflgame import OrderedDict
def year_phase_week(year=None, phase=None, week=None):
cur_year, _ = nflgame.live.current_year_and_week()
season_types = (
('PRE', xrange(0, 4 + 1)),
('REG', xrange(1, 17 + 1)),
('POST', xrange(1, 4 + 1)),
)
for y in range(2009, cur_year+1):
if year is not None and year != y:
continue
for p, weeks in season_types:
if phase is not None and phase != p:
continue
for w in weeks:
if week is not None and week != w:
continue
yield y, p, w
def schedule_url(year, stype, week):
"""
Returns the NFL.com XML schedule URL. `year` should be an
integer, `stype` should be one of the strings `PRE`, `REG` or
`POST`, and `gsis_week` should be a value in the range
`[0, 17]`.
"""
xmlurl = 'http://www.nfl.com/ajax/scorestrip?'
if stype == 'POST':
week += 17
if week == 21: # NFL.com you so silly
week += 1
return '%sseason=%d&seasonType=%s&week=%d' % (xmlurl, year, stype, week)
def week_schedule(year, stype, week):
"""
Returns a list of dictionaries with information about each game in
the week specified. The games are ordered by gsis_id. `year` should
be an integer, `stype` should be one of the strings `PRE`, `REG` or
`POST`, and `gsis_week` should be a value in the range `[1, 17]`.
"""
url = schedule_url(year, stype, week)
try:
dom = xml.parse(urllib2.urlopen(url))
except urllib2.HTTPError:
print >> sys.stderr, 'Could not load %s' % url
return []
games = []
for g in dom.getElementsByTagName("g"):
gsis_id = g.getAttribute('eid')
games.append({
'eid': gsis_id,
'wday': g.getAttribute('d'),
'year': year,
'month': int(gsis_id[4:6]),
'day': int(gsis_id[6:8]),
'time': g.getAttribute('t'),
'season_type': stype,
'week': week,
'home': g.getAttribute('h'),
'away': g.getAttribute('v'),
'gamekey': g.getAttribute('gsis'),
})
return games
def new_schedule():
"""
Builds an entire schedule from scratch.
"""
sched = OrderedDict()
for year, stype, week in year_phase_week():
update_week(sched, year, stype, week)
return sched
def update_week(sched, year, stype, week):
"""
Updates the schedule for the given week in place. `year` should be
an integer year, `stype` should be one of the strings `PRE`, `REG`
or `POST`, and `week` should be an integer in the range `[1, 17]`.
"""
for game in week_schedule(year, stype, week):
sched[game['eid']] = game
def write_schedule(fpath, sched):
alist = []
for gsis_id in sorted(sched):
alist.append([gsis_id, sched[gsis_id]])
json.dump({'time': time.time(), 'games': alist},
open(fpath, 'w+'), indent=1, sort_keys=True,
separators=(',', ': '))
def eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
def run():
parser = argparse.ArgumentParser(
description='Updates nflgame\'s schedule to correspond to the latest '
'information.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
aa = parser.add_argument
aa('--json-update-file', type=str, default=None,
help='When set, the file provided will be updated in place with new '
'schedule data from NFL.com. If this option is not set, then the '
'"schedule.json" file that comes with nflgame will be updated '
'instead.')
aa('--rebuild', action='store_true',
help='When set, the entire schedule will be rebuilt.')
aa('--year', default=None, type=int,
help='Force the update to a specific year.')
aa('--phase', default=None, choices=['PRE', 'REG', 'POST'],
help='Force the update to a specific phase.')
aa('--week', default=None, type=int,
help='Force the update to a specific week.')
args = parser.parse_args()
if args.json_update_file is None:
args.json_update_file = nflgame.sched._sched_json_file
# Before doing anything laborious, make sure we have write access to
# the JSON database.
if not os.access(args.json_update_file, os.W_OK):
eprint('I do not have write access to "%s".' % args.json_update_file)
eprint('Without write access, I cannot update the schedule.')
sys.exit(1)
if args.rebuild:
sched = new_schedule()
else:
sched, last = nflgame.sched._create_schedule(args.json_update_file)
print('Last updated: %s' % last)
if (args.year, args.phase, args.week) == (None, None, None):
year, week = nflgame.live.current_year_and_week()
phase = nflgame.live._cur_season_phase
update_week(sched, year, phase, week)
else:
for y, p, w in year_phase_week(args.year, args.phase, args.week):
print('Updating (%d, %s, %d)...' % (y, p, w))
update_week(sched, y, p, w)
write_schedule(args.json_update_file, sched)
if __name__ == '__main__':
run()
| {
"repo_name": "jaythaceo/nflgame",
"path": "nflgame/update_sched.py",
"copies": "9",
"size": "5485",
"license": "unlicense",
"hash": -9002906548550877000,
"line_mean": 32.6503067485,
"line_max": 78,
"alpha_frac": 0.5799453054,
"autogenerated": false,
"ratio": 3.6038107752956634,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00022722108611679162,
"num_lines": 163
} |
from __future__ import absolute_import, division, print_function
import argparse
import time
import json
import os
import sys
import xml.dom.minidom as xml
import nflgame
from nflgame.compat import OrderedDict, range, urllib
def year_phase_week(year=None, phase=None, week=None):
cur_year, _ = nflgame.live.current_year_and_week()
season_types = (
('PRE', range(0, 4 + 1)),
('REG', range(1, 17 + 1)),
('POST', range(1, 4 + 1)),
)
for y in range(2009, cur_year+1):
if year is not None and year != y:
continue
for p, weeks in season_types:
if phase is not None and phase != p:
continue
for w in weeks:
if week is not None and week != w:
continue
yield y, p, w
def schedule_url(year, stype, week):
"""
Returns the NFL.com XML schedule URL. `year` should be an
integer, `stype` should be one of the strings `PRE`, `REG` or
`POST`, and `gsis_week` should be a value in the range
`[0, 17]`.
"""
xmlurl = 'http://www.nfl.com/ajax/scorestrip?'
if stype == 'POST':
week += 17
if week == 21: # NFL.com you so silly
week += 1
return '%sseason=%d&seasonType=%s&week=%d' % (xmlurl, year, stype, week)
def week_schedule(year, stype, week):
"""
Returns a list of dictionaries with information about each game in
the week specified. The games are ordered by gsis_id. `year` should
be an integer, `stype` should be one of the strings `PRE`, `REG` or
`POST`, and `gsis_week` should be a value in the range `[1, 17]`.
"""
url = schedule_url(year, stype, week)
try:
dom = xml.parse(urllib.urlopen(url))
except urllib.HTTPError:
print('Could not load %s' % url, file=sys.stderr)
return []
games = []
for g in dom.getElementsByTagName("g"):
gsis_id = g.getAttribute('eid')
games.append({
'eid': gsis_id,
'wday': g.getAttribute('d'),
'year': year,
'month': int(gsis_id[4:6]),
'day': int(gsis_id[6:8]),
'time': g.getAttribute('t'),
'season_type': stype,
'week': week,
'home': g.getAttribute('h'),
'away': g.getAttribute('v'),
'gamekey': g.getAttribute('gsis'),
})
return games
def new_schedule():
"""
Builds an entire schedule from scratch.
"""
sched = OrderedDict()
for year, stype, week in year_phase_week():
update_week(sched, year, stype, week)
return sched
def update_week(sched, year, stype, week):
"""
Updates the schedule for the given week in place. `year` should be
an integer year, `stype` should be one of the strings `PRE`, `REG`
or `POST`, and `week` should be an integer in the range `[1, 17]`.
"""
for game in week_schedule(year, stype, week):
sched[game['eid']] = game
def write_schedule(fpath, sched):
alist = []
for gsis_id in sorted(sched):
alist.append([gsis_id, sched[gsis_id]])
json.dump({'time': time.time(), 'games': alist},
open(fpath, 'w+'), indent=1, sort_keys=True,
separators=(',', ': '))
def eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
def run():
parser = argparse.ArgumentParser(
description='Updates nflgame\'s schedule to correspond to the latest '
'information.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
aa = parser.add_argument
aa('--json-update-file', type=str, default=None,
help='When set, the file provided will be updated in place with new '
'schedule data from NFL.com. If this option is not set, then the '
'"schedule.json" file that comes with nflgame will be updated '
'instead.')
aa('--rebuild', action='store_true',
help='When set, the entire schedule will be rebuilt.')
aa('--year', default=None, type=int,
help='Force the update to a specific year.')
aa('--phase', default=None, choices=['PRE', 'REG', 'POST'],
help='Force the update to a specific phase.')
aa('--week', default=None, type=int,
help='Force the update to a specific week.')
args = parser.parse_args()
if args.json_update_file is None:
args.json_update_file = nflgame.sched._sched_json_file
# Before doing anything laborious, make sure we have write access to
# the JSON database.
if not os.access(args.json_update_file, os.W_OK):
eprint('I do not have write access to "%s".' % args.json_update_file)
eprint('Without write access, I cannot update the schedule.')
sys.exit(1)
if args.rebuild:
sched = new_schedule()
else:
sched, last = nflgame.sched._create_schedule(args.json_update_file)
print('Last updated: %s' % last)
if (args.year, args.phase, args.week) == (None, None, None):
year, week = nflgame.live.current_year_and_week()
phase = nflgame.live._cur_season_phase
update_week(sched, year, phase, week)
else:
for y, p, w in year_phase_week(args.year, args.phase, args.week):
print('Updating (%d, %s, %d)...' % (y, p, w))
update_week(sched, y, p, w)
write_schedule(args.json_update_file, sched)
if __name__ == '__main__':
run()
| {
"repo_name": "playpauseandstop/nflgame",
"path": "nflgame/update_sched.py",
"copies": "1",
"size": "5490",
"license": "unlicense",
"hash": -6328514256151307000,
"line_mean": 32.8888888889,
"line_max": 78,
"alpha_frac": 0.5799635701,
"autogenerated": false,
"ratio": 3.6023622047244093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4682325774824409,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import argparse
import zmq
import code
default_bind = 'tcp://127.0.0.1:5555'
ctx = zmq.Context()
class ServerError(Exception):
pass
class TimeoutError(Exception):
pass
class Client(object):
def __init__(self, connect=default_bind, wait=3000):
self.sock = ctx.socket(zmq.REQ)
self.sock.setsockopt(zmq.LINGER, 0)
self.sock.connect(connect)
self.poller = zmq.Poller()
self.poller.register(self.sock, zmq.POLLIN)
self.wait = wait
def __getattr__(self, name):
def rpc_method(*args, **kwargs):
req = [name, args, kwargs]
self.sock.send_json(req)
if self.poller.poll(self.wait):
status, resp = self.sock.recv_json()
if status == 'ok':
return resp
else:
raise ServerError(resp)
else:
raise TimeoutError('Timed out after %d ms waiting for reply' %
self.wait)
return rpc_method
def main():
p = argparse.ArgumentParser(description='Run a Manhattan client.')
p.add_argument('--connect', type=str,
default=default_bind,
help='ZeroMQ socket description to connect to')
args = p.parse_args()
client = Client(args.connect)
code.interact("The 'client' object is available for queries.",
local=dict(client=client))
| {
"repo_name": "storborg/manhattan",
"path": "manhattan/client.py",
"copies": "1",
"size": "1522",
"license": "mit",
"hash": 1259590162801695200,
"line_mean": 25.2413793103,
"line_max": 78,
"alpha_frac": 0.5689881735,
"autogenerated": false,
"ratio": 4.1024258760107815,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5171414049510781,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import argparse
import codecs
import datetime
import imp
import os
import os.path as path
import subprocess
import sys
import tempfile
import glob
import pdoc
# `xrange` is `range` with Python3.
try:
xrange = xrange
except NameError:
xrange = range
version_suffix = '%d.%d' % (sys.version_info[0], sys.version_info[1])
default_http_dir = path.join(tempfile.gettempdir(), 'pdoc-%s' % version_suffix)
parser = argparse.ArgumentParser(
description='Automatically generate API docs for Python modules.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
aa = parser.add_argument
aa('module_name', type=str, nargs='?',
help='The Python module name. This may be an import path resolvable in '
'the current environment, or a file path to a Python module or '
'package.')
aa('ident_name', type=str, nargs='?',
help='When specified, only identifiers containing the name given '
'will be shown in the output. Search is case sensitive. '
'Has no effect when --http is set.')
aa('--version', action='store_true',
help='Print the version of pdoc and exit.')
aa('--html', action='store_true',
help='When set, the output will be HTML formatted.')
aa('--html-dir', type=str, default='.',
help='The directory to output HTML files to. This option is ignored when '
'outputting documentation as plain text.')
aa('--html-no-source', action='store_true',
help='When set, source code will not be viewable in the generated HTML. '
'This can speed up the time required to document large modules.')
aa('--overwrite', action='store_true',
help='Overwrites any existing HTML files instead of producing an error.')
aa('--all-submodules', action='store_true',
help='When set, every submodule will be included, regardless of whether '
'__all__ is set and contains the submodule.')
aa('--external-links', action='store_true',
help='When set, identifiers to external modules are turned into links. '
'This is automatically set when using --http.')
aa('--template-dir', type=str, default=None,
help='Specify a directory containing Mako templates. '
'Alternatively, put your templates in $XDG_CONFIG_HOME/pdoc and '
'pdoc will automatically find them.')
aa('--notebook-dir', type=str, default=None,
help='Specify a directory containing Notebooks. ')
aa('--link-prefix', type=str, default='',
help='A prefix to use for every link in the generated documentation. '
'No link prefix results in all links being relative. '
'Has no effect when combined with --http.')
aa('--only-pypath', action='store_true',
help='When set, only modules in your PYTHONPATH will be documented.')
aa('--http', action='store_true',
help='When set, pdoc will run as an HTTP server providing documentation '
'of all installed modules. Only modules found in PYTHONPATH will be '
'listed.')
aa('--http-dir', type=str, default=default_http_dir,
help='The directory to cache HTML documentation when running as an HTTP '
'server.')
aa('--http-host', type=str, default='localhost',
help='The host on which to run the HTTP server.')
aa('--http-port', type=int, default=8080,
help='The port on which to run the HTTP server.')
aa('--http-html', action='store_true',
help='Internal use only. Do not set.')
args = parser.parse_args()
def quick_desc(imp, name, ispkg):
if not hasattr(imp, 'path'):
# See issue #7.
return ''
if ispkg:
fp = path.join(imp.path, name, '__init__.py')
else:
fp = path.join(imp.path, '%s.py' % name)
if os.path.isfile(fp):
with codecs.open(fp, 'r', 'utf-8') as f:
quotes = None
doco = []
for i, line in enumerate(f):
if i == 0:
if len(line) >= 3 and line[0:3] in ("'''", '"""'):
quotes = line[0:3]
line = line[3:]
else:
break
line = line.rstrip()
if line.endswith(quotes):
doco.append(line[0:-3])
break
else:
doco.append(line)
desc = '\n'.join(doco)
if len(desc) > 200:
desc = desc[0:200] + '...'
return desc
return ''
def _eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
def last_modified(fp):
try:
return datetime.datetime.fromtimestamp(os.stat(fp).st_mtime)
except:
return datetime.datetime.min
def module_file(m):
mbase = path.join(args.html_dir, *m.name.split('.'))
if m.is_package():
return path.join(mbase, pdoc.html_package_name)
else:
return '%s%s' % (mbase, pdoc.html_module_suffix)
def quit_if_exists(m):
def check_file(f):
if os.access(f, os.R_OK):
_eprint('%s already exists. Delete it or run with --overwrite' % f)
sys.exit(1)
if args.overwrite:
return
f = module_file(m)
check_file(f)
# If this is a package, make sure the package directory doesn't exist
# either.
if m.is_package():
check_file(path.dirname(f))
def html_out(m, html=True, all_notebooks=[]):
f = module_file(m)
if not html:
f = module_file(m).replace(".html", ".md")
dirpath = path.dirname(f)
if not os.access(dirpath, os.R_OK):
os.makedirs(dirpath)
try:
with codecs.open(f, 'w+', 'utf-8') as w:
if not html:
out = m.text()
else:
out = m.html(external_links=args.external_links,
link_prefix=args.link_prefix,
http_server=args.http_html,
source=not args.html_no_source,
notebook=None,
all_notebooks=all_notebooks)
print(out, file=w)
except Exception:
try:
os.unlink(f)
except:
pass
raise
for submodule in m.submodules():
html_out(submodule, html, all_notebooks=all_notebooks)
def html_out_notebook(m, notebook, all_notebooks=[]):
f = module_file(m)
f = f.rsplit(sep="/", maxsplit=1)[0] + "/notebooks/" + notebook.rsplit(sep="/", maxsplit=1)[-1][:-3] + ".html"
dirpath = path.dirname(f)
if not os.access(dirpath, os.R_OK):
os.makedirs(dirpath)
try:
with codecs.open(f, 'w+', 'utf-8') as w:
out = m.html(external_links=args.external_links,
link_prefix=args.link_prefix,
http_server=args.http_html,
source=not args.html_no_source,
notebook=notebook,
all_notebooks=all_notebooks)
print(out, file=w)
except Exception:
try:
os.unlink(f)
except:
pass
raise
def process_html_out(impath):
# This unfortunate kludge is the only reasonable way I could think of
# to support reloading of modules. It's just too difficult to get
# modules to reload in the same process.
cmd = [sys.executable,
path.realpath(__file__),
'--html',
'--html-dir', args.html_dir,
'--http-html',
'--overwrite',
'--link-prefix', args.link_prefix]
if args.external_links:
cmd.append('--external-links')
if args.all_submodules:
cmd.append('--all-submodules')
if args.only_pypath:
cmd.append('--only-pypath')
if args.html_no_source:
cmd.append('--html-no-source')
if args.template_dir:
cmd.append('--template-dir')
cmd.append(args.template_dir)
cmd.append(impath)
# Can we make a good faith attempt to support 2.6?
# YES WE CAN!
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()[0].strip().decode('utf-8')
if p.returncode > 0:
err = subprocess.CalledProcessError(p.returncode, cmd)
err.output = out
raise err
if len(out) > 0:
print(out)
if __name__ == '__main__':
if args.version:
print(pdoc.__version__)
sys.exit(0)
# We close stdin because some modules, upon import, are not very polite
# and block on stdin.
try:
sys.stdin.close()
except:
pass
if not args.http and args.module_name is None:
_eprint('No module name specified.')
sys.exit(1)
if args.template_dir is not None:
pdoc.tpl_lookup.directories.insert(0, args.template_dir)
# If PYTHONPATH is set, let it override everything if we want it to.
pypath = os.getenv('PYTHONPATH')
if args.only_pypath and pypath is not None and len(pypath) > 0:
pdoc.import_path = pypath.split(path.pathsep)
docfilter = None
if args.ident_name and len(args.ident_name.strip()) > 0:
search = args.ident_name.strip()
def docfilter(o):
rname = o.refname
if rname.find(search) > -1 or search.find(o.name) > -1:
return True
if isinstance(o, pdoc.Class):
return search in o.doc or search in o.doc_init
return False
# Try to do a real import first. I think it's better to prefer
# import paths over files. If a file is really necessary, then
# specify the absolute path, which is guaranteed not to be a
# Python import path.
try:
module = pdoc.import_module(args.module_name)
except Exception as e:
module = None
# Get the module that we're documenting. Accommodate for import paths,
# files and directories.
if module is None:
print(module)
isdir = path.isdir(args.module_name)
isfile = path.isfile(args.module_name)
if isdir or isfile:
fp = path.realpath(args.module_name)
module_name = path.basename(fp)
if isdir:
fp = path.join(fp, '__init__.py')
else:
module_name, _ = path.splitext(module_name)
# Use a special module name to avoid import conflicts.
# It is hidden from view via the `Module` class.
with open(fp) as f:
module = imp.load_source('__pdoc_file_module__', fp, f)
if isdir:
module.__path__ = [path.realpath(args.module_name)]
module.__pdoc_module_name = module_name
else:
module = pdoc.import_module(args.module_name)
module = pdoc.Module(module, docfilter=docfilter,
allsubmodules=args.all_submodules)
# Plain text?
if not args.html and not args.all_submodules:
output = module.text()
try:
print(output)
except IOError as e:
# This seems to happen for long documentation.
# This is obviously a hack. What's the real cause? Dunno.
if e.errno == 32:
pass
else:
raise e
sys.exit(0)
# Hook notebook generation
all_notebooks = []
if args.notebook_dir:
all_notebooks = [f for f in sorted(glob.glob("%s/*.md" % args.notebook_dir))]
for notebook in all_notebooks:
html_out_notebook(module, notebook, all_notebooks=all_notebooks)
# HTML output depends on whether the module being documented is a package
# or not. If not, then output is written to {MODULE_NAME}.html in
# `html-dir`. If it is a package, then a directory called {MODULE_NAME}
# is created, and output is written to {MODULE_NAME}/index.html.
# Submodules are written to {MODULE_NAME}/{MODULE_NAME}.m.html and
# subpackages are written to {MODULE_NAME}/{MODULE_NAME}/index.html. And
# so on... The same rules apply for `http_dir` when `pdoc` is run as an
# HTTP server.
if not args.http:
quit_if_exists(module)
html_out(module, args.html, all_notebooks=all_notebooks)
sys.exit(0)
| {
"repo_name": "diana-hep/carl",
"path": "ci/make_doc.py",
"copies": "5",
"size": "12250",
"license": "bsd-3-clause",
"hash": -846070401630320800,
"line_mean": 34.100286533,
"line_max": 114,
"alpha_frac": 0.588,
"autogenerated": false,
"ratio": 3.8341158059467917,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000989465785531969,
"num_lines": 349
} |
from __future__ import absolute_import, division, print_function
import ast
from jaspyx.ast_util import ast_call, ast_load
from jaspyx.visitor import BaseVisitor
class Compare(BaseVisitor):
def visit_Compare(self, node):
if len(node.ops) > 1:
self.output('(')
first = True
left = node.left
for op, comparator in zip(node.ops, node.comparators):
if not first:
self.output(' && ')
else:
first = False
comp_op = getattr(self, 'CmpOp_%s' % op.__class__.__name__)
comp_op(left, comparator)
left = comparator
if len(node.ops) > 1:
self.output(')')
for key, value in {
'Eq': '==',
'NotEq': '!=',
'Lt': '<',
'LtE': '<=',
'Gt': '>',
'GtE': '>=',
'Is': '===',
'IsNot': '!==',
}.items():
def gen_op(op):
def f_op(self, left, comparator):
self.group([left, op, comparator])
return f_op
exec('CmpOp_%s = gen_op("%s")' % (key, value))
def CmpOp_In(self, left, comparator):
self.visit(ast_call(
ast.FunctionDef(
'',
ast.arguments([ast_load('l'), ast_load('c')], None, None, []),
[
ast.Return(
ast.IfExp(
ast_call(
ast_load('Array.isArray'),
ast_load('c'),
),
ast.Compare(
ast_call(
ast_load('Array.prototype.indexOf.call'),
ast_load('c'),
ast_load('l'),
),
[ast.Gt()],
[ast.Num(-1)]
),
ast_call(
ast_load('JS'),
ast.Str("l in c"),
)
)
)
],
[]
),
left,
comparator
))
def CmpOp_NotIn(self, left, comparator):
self.visit(
ast.UnaryOp(
ast.Not(),
ast.Compare(
left,
[ast.In()],
[comparator]
)
)
) | {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/compare.py",
"copies": "1",
"size": "2590",
"license": "mit",
"hash": 6692318726731458000,
"line_mean": 29.8452380952,
"line_max": 78,
"alpha_frac": 0.3355212355,
"autogenerated": false,
"ratio": 4.787430683918669,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.562295191941867,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import ast
from jaspyx.ast_util import ast_call, ast_store, ast_load
from jaspyx.visitor import BaseVisitor
class ListComp(BaseVisitor):
def visit_ListComp(self, node):
body = [
ast.Expr(
ast_call(
ast_load('$$.push'),
node.elt
)
)
]
for i, generator in reversed(zip(range(len(node.generators)), node.generators)):
if not isinstance(generator.target, ast.Name):
raise TypeError('dereferencing assignment not supported')
if generator.ifs:
if len(generator.ifs) > 1:
cond = ast.BoolOp(ast.And(), generator.ifs)
else:
cond = generator.ifs[0]
body = [
ast.If(
cond,
body,
[]
)
]
body = [
ast.Assign(
[ast_store('$$' + generator.target.id)],
generator.iter
),
ast.For(
ast_store('$' + generator.target.id),
ast_call(
ast_load('range'),
ast.Num(0),
ast.Attribute(
ast_load('$$' + generator.target.id),
'length',
ast.Load()
)
),
[
ast.Assign(
[generator.target],
ast.Subscript(
ast_load('$$' + generator.target.id),
ast.Index(ast_load('$' + generator.target.id)),
ast.Load()
)
),
] + body,
[]
)
]
self.visit(
ast_call(
ast.FunctionDef(
'',
ast.arguments(
[
], None, None, []
),
[
ast.Assign(
[
ast_store('$$')
],
ast.List(
[],
ast.Load()
)
),
] + body + [
ast.Return(ast_load('$$')),
],
[]
),
)
)
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/listcomp.py",
"copies": "1",
"size": "2837",
"license": "mit",
"hash": -6384909622047512000,
"line_mean": 30.8764044944,
"line_max": 88,
"alpha_frac": 0.2936200211,
"autogenerated": false,
"ratio": 5.985232067510548,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6778852088610549,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import _ast
from jaspyx.ast_util import ast_store
class TestAstStoreName:
def setUp(self):
self.o = ast_store('foo')
def test_type(self):
assert isinstance(self.o, _ast.Name)
def test_id(self):
assert self.o.id == 'foo'
def test_ctx(self):
assert isinstance(self.o.ctx, _ast.Store)
class TestAstStoreAttr:
def setUp(self):
self.o = ast_store('foo', 'bar')
def test_type(self):
assert isinstance(self.o, _ast.Attribute)
def test_attr(self):
assert self.o.attr == 'bar'
def test_ctx(self):
assert isinstance(self.o.ctx, _ast.Store)
def test_value_type(self):
assert isinstance(self.o.value, _ast.Name)
def test_value_id(self):
assert self.o.value.id == 'foo'
def test_value_ctx(self):
assert isinstance(self.o.value.ctx, _ast.Load)
class TestAstStoreAttrWithDot:
def setUp(self):
self.o = ast_store('foo.bar')
def test_type(self):
assert isinstance(self.o, _ast.Attribute)
def test_attr(self):
assert self.o.attr == 'bar'
def test_ctx(self):
assert isinstance(self.o.ctx, _ast.Store)
def test_value_type(self):
assert isinstance(self.o.value, _ast.Name)
def test_value_id(self):
assert self.o.value.id == 'foo'
def test_value_ctx(self):
assert isinstance(self.o.value.ctx, _ast.Load)
class TestAstStoreAttrWithDotAndStarArgs:
def setUp(self):
self.o = ast_store('foo.bar', 'baz')
def test_type(self):
assert isinstance(self.o, _ast.Attribute)
def test_attr(self):
assert self.o.attr == 'baz'
def test_ctx(self):
assert isinstance(self.o.ctx, _ast.Store)
def test_value_type(self):
assert isinstance(self.o.value, _ast.Attribute)
def test_value_attr(self):
assert self.o.value.attr == 'bar'
def test_value_ctx(self):
assert isinstance(self.o.value.ctx, _ast.Load)
def test_value_value_type(self):
assert isinstance(self.o.value.value, _ast.Name)
def test_value_value_id(self):
assert self.o.value.value.id == 'foo'
def test_value_value_ctx(self):
assert isinstance(self.o.value.value.ctx, _ast.Load)
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/tests/test_ast_store.py",
"copies": "1",
"size": "2330",
"license": "mit",
"hash": -7645406573170899000,
"line_mean": 23.5263157895,
"line_max": 64,
"alpha_frac": 0.6223175966,
"autogenerated": false,
"ratio": 3.2496513249651326,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43719689215651325,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import ast
from jaspyx.context.module import ModuleContext
from jaspyx.compat import basestring
class BaseVisitor(ast.NodeVisitor):
def __init__(self, path, registry, indent=0):
self.path = path
self.registry = registry
self.default_indent = indent
self.stack = []
self.module = None
def push(self, context):
"""
Push a new context on the stack.
:param context: An instance of one of the available context from
the jaspyx.context package.
"""
self.stack.append(context)
def pop(self):
"""
Pop the current context from the stack and append it to the previous
context as content.
"""
self.stack[-2].add(self.stack.pop())
def output(self, s):
"""
Append literal output to the current context.
This will also automatically prepend indention.
"""
self.stack[-1].add(s)
def indent(self):
self.output(' ' * (self.stack[-1].indent + 2))
def finish(self):
self.output(';\n')
def group(self, values, prefix='(', infix=' ', infix_node=None, suffix=')'):
"""
Append a group of values with a configurable prefix, suffix and infix
to the output buffer. This is used to render a list of AST nodes with
fixed surroundings.
:param values: A list of AST nodes.
:param prefix: Text to prepend before the output.
:param infix: Text to put between the rendered AST nodes. If
infix_node is also specified, infix_node will be
surrounded by infix.
:param infix_node: An AST node to render in between the values.
:param suffix: Text to append after the output.
"""
self.output(prefix)
first = True
for value in values:
if not first:
if infix:
self.output(infix)
if infix_node is not None:
self.visit(infix_node)
if infix:
self.output(infix)
else:
first = False
if isinstance(value, basestring):
self.output(value)
else:
self.visit(value)
self.output(suffix)
def block(self, nodes, context=None):
"""
Process a block of AST nodes and treat all of them as statements. It
will also control automatic indention and appending semicolons and
carriage returns to the output. Can optionally push a context on the
stack before processing and pop it after it's done.
:param nodes: A list of AST nodes to render.
:param context: An optional context to push / pop.
"""
if context is not None:
self.push(context)
for node in nodes:
self.visit(node)
if context is not None:
self.pop()
def visit_Module(self, node):
"""
Handler for top-level AST nodes. Sets this visitor's module
attribute to a newly generated ModuleContext.
:param node: The current AST node being visited.
"""
self.module = ModuleContext()
self.module.indent = self.default_indent
self.push(self.module)
self.block(node.body)
def visit_Expr(self, node):
self.indent()
self.visit(node.value)
self.finish()
def visit_Pass(self, node):
pass
def visit_NameConstant(self, node):
self.visit(ast.Name(str(node.value), ast.Load()))
def generic_visit(self, node):
"""
Generic AST node handlers. Raises an exception. This is called by
ast.NodeVisitor when no suitable visit_<name> method is found.
:param node: The current AST node being visited.
"""
raise NotImplementedError('Unsupported AST node %s' % node)
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/base_visitor.py",
"copies": "1",
"size": "4005",
"license": "mit",
"hash": 1708581140341607400,
"line_mean": 30.7857142857,
"line_max": 80,
"alpha_frac": 0.581772784,
"autogenerated": false,
"ratio": 4.4303097345132745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5512082518513275,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import ast
from jaspyx.visitor import BaseVisitor
class AugAssign(BaseVisitor):
def visit_AugAssign(self, node):
attr = getattr(self, 'AugAssign_%s' % node.op.__class__.__name__, None)
if attr is None:
# Rewrite the expression as an assignment using a BinOp
self.visit(ast.Assign(
[node.target],
ast.BinOp(
ast.Name(node.target.id, ast.Load()),
node.op,
node.value
)
))
else:
attr(node.target, node.value)
for key, value in {
'Add': ' += ',
'Sub': ' -= ',
'Mult': ' *= ',
'Div': ' /= ',
'Mod': ' %= ',
'BitAnd': ' &= ',
'BitOr': ' |= ',
'BitXor': ' ^= ',
}.items():
def gen_op(op):
def f_op(self, target, value):
self.indent()
self.group(
[target, value],
prefix='',
infix=op,
suffix='',
)
self.finish()
return f_op
exec('AugAssign_%s = gen_op("%s")' % (key, value))
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/augassign.py",
"copies": "1",
"size": "1275",
"license": "mit",
"hash": 7210283874704621000,
"line_mean": 28.6511627907,
"line_max": 79,
"alpha_frac": 0.4133333333,
"autogenerated": false,
"ratio": 4.153094462540716,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5066427795840716,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import ast
from nose.tools import raises
from jaspyx.ast_util import ast_store, ast_load, ast_call
from jaspyx.tests.visitor.v8_helper import V8Helper
class TestAssignSingleTarget(V8Helper):
def test_single_assign(self):
assert self.run(
[
ast.Assign(
[ast_store('test')],
ast.Str('test'))
],
'test'
) == 'test'
def test_multi_assign(self):
assert self.run(
[
ast.Assign(
[
ast_store('test1'),
ast_store('test2')
],
ast.Str('test')
)
],
'test1 + "+" + test2'
) == 'test+test'
@raises(Exception)
def test_assign_multiple_slice(self):
self.v.visit(
ast.Assign(
[
ast.Subscript(
ast_load('foo'),
ast.Slice(),
ast.Store()
),
ast_store('bar'),
],
ast.Str('test')
)
)
def _slice_assign(self, start, end):
result = self.run(
[
ast.Assign(
[ast_store('test')],
ast.List([ast.Num(x) for x in range(10)], ast.Load())
),
ast.Assign(
[
ast.Subscript(
ast_load('test'),
ast.Slice(start and ast.Num(start), end and ast.Num(end), None),
ast.Store()
),
],
ast.List([ast.Num(42), ast.Num(43)], ast.Load())
),
],
'test',
list
)
return result
def test_assign_slice_full(self):
assert self._slice_assign(None, None) == [42, 43]
def test_assign_slice_start(self):
assert self._slice_assign(5, None) == [0, 1, 2, 3, 4, 42, 43]
def test_assign_slice_neg_start(self):
assert self._slice_assign(-6, None) == [0, 1, 2, 3, 42, 43]
def test_assign_slice_end(self):
assert self._slice_assign(None, 5) == [42, 43, 5, 6, 7, 8, 9]
def test_assign_slice_neg_end(self):
assert self._slice_assign(None, -1) == [42, 43, 9]
def test_assign_slice_start_end(self):
assert self._slice_assign(2, 8) == [0, 1, 42, 43, 8, 9]
def test_assign_slice_neg_start_end(self):
assert self._slice_assign(-8, 8) == [0, 1, 42, 43, 8, 9]
def test_assign_slice_neg_start_neg_end(self):
assert self._slice_assign(-8, -2) == [0, 1, 42, 43, 8, 9]
def test_assign_expr_slice(self):
assert self.run(
[
ast.Assign(
[ast_store('test')],
ast.List([ast.Num(x) for x in range(10)], ast.Load())
),
ast.FunctionDef(
'f_test',
ast.arguments([], None, None, []),
[
ast.Return(ast_load('test')),
],
[]
),
ast.Assign(
[
ast.Subscript(
ast_call(ast_load('f_test')),
ast.Slice(ast.Num(2), ast.Num(8), None),
ast.Store()
),
],
ast.List([ast.Num(42), ast.Num(43)], ast.Load())
),
],
'test',
list
) == [0, 1, 42, 43, 8, 9]
def test_destructure(self):
assert self.run(
[
ast.Assign(
[ast_store('test2')],
ast_call(
ast.FunctionDef(
'',
ast.arguments([], None, None, []),
[
ast.Global(['test1']),
ast.Assign(
[
ast.List(
[
ast_store('test1'),
ast_store('test2'),
],
ast.Store()
)
],
ast.List(
[
ast.Str('test1'),
ast.Str('test2'),
],
ast.Load()
)
),
ast.Return(ast_load('test2'))
],
[]
)
)
)
],
'test1 + "+" + test2'
) == 'test1+test2'
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/tests/visitor/test_assign.py",
"copies": "1",
"size": "5384",
"license": "mit",
"hash": 2693377371943771000,
"line_mean": 32.0306748466,
"line_max": 92,
"alpha_frac": 0.3237369985,
"autogenerated": false,
"ratio": 4.815742397137746,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00006596741209842338,
"num_lines": 163
} |
from __future__ import absolute_import, division, print_function
import ast
import _ast
from jaspyx.ast_util import ast_call, ast_load, ast_store
from jaspyx.context.block import BlockContext
from jaspyx.visitor import BaseVisitor
class TryExcept(BaseVisitor):
def visit_TryExcept(self, node):
if node.orelse:
raise NotImplementedError('Try-except else handler not implemented')
self.indent()
self.output('try')
self.block(node.body, context=BlockContext(self.stack[-1]))
self.output(' catch($e) ')
self.push(BlockContext(self.stack[-1]))
if_start = None
if_end = None
for handler in node.handlers:
if handler.type is not None:
if handler.name is not None:
body = handler.body[:]
body.insert(0, ast.Assign(
[handler.name],
ast_call(ast_load('JS'), ast.Str('$e'))
))
else:
body = handler.body
types = [handler.type] if isinstance(handler.type, _ast.Name) else handler.type
conditions = [
ast_call(
ast_load('isinstance'),
ast_call(ast_load('JS'), ast.Str('$e')),
type_,
)
for type_ in types
]
_if = ast.If(
ast.BoolOp(ast.Or(), conditions),
body,
[]
)
if if_start is None:
if_start = if_end = _if
else:
if_end.orelse, if_end = [_if], _if
else:
if handler is not node.handlers[-1]:
raise SyntaxError("default 'except:' must be last")
if if_start is None:
self.block(handler.body)
else:
if_end.orelse = handler.body
if if_start is not None:
self.visit(if_start)
self.pop()
self.output('\n')
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/tryexcept.py",
"copies": "1",
"size": "2157",
"license": "mit",
"hash": 4266103008144380000,
"line_mean": 32.1846153846,
"line_max": 95,
"alpha_frac": 0.4663885025,
"autogenerated": false,
"ratio": 4.51255230125523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.547894080375523,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import _ast
import ast
from jaspyx.ast_util import ast_call, ast_load, ast_store
from jaspyx.visitor import BaseVisitor
class SimpleAssign(_ast.Assign):
pass
class Assign(BaseVisitor):
def build_Assign_Slice(self, target, value):
args = []
if target.slice.lower or target.slice.upper:
args.append(target.slice.lower or ast.Num(0))
if target.slice.upper:
args.append(target.slice.upper)
return ast_call(
ast.FunctionDef(
'',
ast.arguments(
[
ast_store('t'),
ast_store('v'),
ast_store('s'),
ast_store('e'),
], None, None, []
),
[
ast.Assign(
[ast_store('s')],
ast.IfExp(
ast.Compare(
ast_call(
ast_load('type'),
ast_load('s')
),
[ast.Eq()],
[ast.Str('undefined')],
),
ast.Num(0),
ast.IfExp(
ast.Compare(
ast_load('s'),
[ast.Lt()],
[ast.Num(0)],
),
ast.BinOp(
ast_load('s'),
ast.Add(),
ast_load('t.length')
),
ast_load('s')
)
)
),
ast.Assign(
[ast_store('e')],
ast.IfExp(
ast.Compare(
ast_call(
ast_load('type'),
ast_load('e')
),
[ast.Eq()],
[ast.Str('undefined')],
),
ast_load('t.length'),
ast.IfExp(
ast.Compare(
ast_load('e'),
[ast.Lt()],
[ast.Num(0)],
),
ast.BinOp(
ast_load('e'),
ast.Add(),
ast_load('t.length')
),
ast_load('e')
)
)
),
ast.Expr(
ast_call(
ast_load('Array.prototype.splice.apply'),
ast_load('t'),
ast_call(
ast.Attribute(
ast.List([
ast_load('s'),
ast.BinOp(
ast_load('e'),
ast.Sub(),
ast_load('s')
),
], ast.Load()),
'concat',
ast.Load(),
),
ast_load('v'),
)
)
),
ast.Return(ast_load('v')),
],
[]
),
target.value,
value,
*args
)
def build_Assign_Destructuring(self, targets, value):
scope = self.stack[-1].scope
global_scope = scope.get_global_scope()
assignments = []
for target, i in zip(targets.elts, range(len(targets.elts))):
if isinstance(target, _ast.Name):
if scope.is_global(target.id):
global_scope.declare(target.id)
else:
scope.declare(target.id)
target = ast.Name(target.id, ast.Load())
assignments.append(
ast.Assign(
[target],
ast.Subscript(
ast_load('v'),
ast.Index(ast.Num(i)),
ast.Load()
)
)
)
return ast_call(
ast.FunctionDef(
'',
ast.arguments([ast_store('v')], None, None, []),
assignments + [
ast.Return(ast_load('v'))
],
[]
),
value
)
def visit_SimpleAssign(self, node):
self.visit(node.targets[0])
self.output(' = ')
self.visit(node.value)
def visit_Assign(self, node):
body = node.value
for target in reversed(node.targets):
if isinstance(target, _ast.List) or isinstance(target, _ast.Tuple):
body = self.build_Assign_Destructuring(target, body)
elif isinstance(target, _ast.Subscript) and \
isinstance(target.slice, _ast.Slice):
body = self.build_Assign_Slice(target, body)
else:
body = SimpleAssign([target], body)
self.indent()
self.visit(body)
self.finish()
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/assign.py",
"copies": "1",
"size": "6066",
"license": "mit",
"hash": -5871456101861678000,
"line_mean": 34.8934911243,
"line_max": 79,
"alpha_frac": 0.2970656116,
"autogenerated": false,
"ratio": 5.866537717601547,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 169
} |
from __future__ import absolute_import, division, print_function
import ast
import _ast
from jaspyx.ast_util import ast_call
class TestAstCallWithoutArgs:
def setUp(self):
self.c = ast_call(ast.Name('foo', ast.Load()))
def test_type(self):
assert isinstance(self.c, _ast.Call)
def test_func_type(self):
assert isinstance(self.c.func, _ast.Name)
def test_func_id(self):
assert self.c.func.id == 'foo'
def test_args(self):
assert self.c.args == ()
def test_keywords(self):
assert self.c.keywords is None
def test_starargs(self):
assert self.c.starargs is None
def test_kwargs(self):
assert self.c.kwargs is None
class TestAstCallWithArgs:
def setUp(self):
self.c = ast_call(ast.Name('foo', ast.Load()), 'a', 'b', 'c')
def test_type(self):
assert isinstance(self.c, _ast.Call)
def test_func_type(self):
assert isinstance(self.c.func, _ast.Name)
def test_func_id(self):
assert self.c.func.id == 'foo'
def test_args(self):
assert self.c.args == ('a', 'b', 'c')
def test_keywords(self):
assert self.c.keywords is None
def test_starargs(self):
assert self.c.starargs is None
def test_kwargs(self):
assert self.c.kwargs is None
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/tests/test_ast_call.py",
"copies": "1",
"size": "1330",
"license": "mit",
"hash": 2218057956845251000,
"line_mean": 22.75,
"line_max": 69,
"alpha_frac": 0.6135338346,
"autogenerated": false,
"ratio": 3.325,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44385338346000003,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import _ast
import ast
from jaspyx.ast_util import ast_load, ast_call
from jaspyx.visitor import BaseVisitor
class Call(BaseVisitor):
def visit_Call(self, node):
if node.keywords:
raise Exception('keyword arguments are not supported')
if node.kwargs is not None:
raise Exception('kwargs is not supported')
if isinstance(node.func, _ast.Name):
func = getattr(self, 'func_%s' % node.func.id, None)
if func is not None:
# noinspection PyCallingNonCallable
return func(*node.args)
if not node.starargs:
self.visit(node.func)
self.group(node.args, infix=', ')
else:
# Rewrite the call without starargs using apply.
if isinstance(node.func, _ast.Attribute):
this = node.func.value
else:
this = ast_load('this')
if not node.args:
args = node.starargs
else:
args = ast_call(
ast.Attribute(
ast.List(node.args, ast.Load()),
'concat',
ast.Load()
),
node.starargs
)
self.visit(
ast_call(
ast.Attribute(
node.func,
'apply',
ast.Load()
),
this,
args,
)
)
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/call.py",
"copies": "1",
"size": "1639",
"license": "mit",
"hash": -2049542589013894000,
"line_mean": 31.137254902,
"line_max": 66,
"alpha_frac": 0.4551555827,
"autogenerated": false,
"ratio": 4.877976190476191,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5833131773176191,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import _ast
import ast
from jaspyx.context.block import BlockContext
from jaspyx.visitor import BaseVisitor
class For(BaseVisitor):
def visit_For(self, node):
if node.orelse:
raise Exception('for-else is not supported.')
if isinstance(node.iter, _ast.Call) and isinstance(node.iter.func, _ast.Name) and \
node.iter.func.id == 'range':
if len(node.iter.args) == 1:
start = ast.Num(0)
stop = node.iter.args[0]
step = ast.Num(1)
cmp_op = ast.Lt()
elif len(node.iter.args) == 2:
start = node.iter.args[0]
stop = node.iter.args[1]
step = ast.Num(1)
cmp_op = ast.Lt()
elif len(node.iter.args) == 3:
start = node.iter.args[0]
stop = node.iter.args[1]
step = node.iter.args[2]
if not isinstance(step, _ast.Num):
raise Exception('range() only supports literal numeric step')
if step.n >= 0:
cmp_op = ast.Lt()
else:
cmp_op = ast.Gt()
else:
raise Exception('range() expects 1, 2 or 3 parameters')
self.indent()
self.output('for(')
self.visit(node.target)
self.output(' = ')
self.visit(start)
self.output('; ')
self.visit(
ast.Compare(
node.target,
[cmp_op],
[stop]
)
)
self.output('; ')
self.visit(node.target)
self.output(' += ')
self.visit(step)
self.output(') ')
else:
self.indent()
self.output('for(')
self.visit(node.target)
self.output(' in ')
self.visit(node.iter)
self.output(') ')
self.block(node.body, context=BlockContext(self.stack[-1]))
self.output('\n')
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/for_.py",
"copies": "1",
"size": "2167",
"license": "mit",
"hash": -5950195012116832000,
"line_mean": 32.3384615385,
"line_max": 91,
"alpha_frac": 0.4586986617,
"autogenerated": false,
"ratio": 4.159309021113244,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5118007682813244,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import ast
import os
from jaspyx.ast_util import ast_load, ast_store, ast_call
from jaspyx.visitor import BaseVisitor
class Import(BaseVisitor):
import_path = ['.']
def load_module(self, pieces):
module_name = '.'.join(pieces)
if module_name in self.registry:
return
if len(pieces) > 1:
parent = self.registry['.'.join(pieces[:-1])]
import_path = [os.path.split(parent.path)[0]]
else:
import_path = self.import_path
for path in import_path:
module_path = os.path.join(path, pieces[-1], '__init__.jpx')
if os.path.exists(module_path):
break
module_path = os.path.join(path, pieces[-1]) + '.jpx'
if os.path.isfile(module_path):
break
else:
raise ImportError('module %s not found' % module_name)
c = ast.parse(open(module_path).read(), module_path)
self.registry[module_name] = v = self.__class__(module_path, self.registry, indent=self.default_indent)
v.import_path = self.import_path
v.visit(c)
def init_module(self, module_path):
for i in range(len(module_path)):
self.load_module(module_path[:i + 1])
return ast_call(
ast_call(ast_load('JS'), ast.Str('__import__')),
ast_call(ast_load('JS'), ast.Str('__module__')),
ast.Str('.'.join(module_path))
)
def visit_Import(self, node):
for name in node.names:
module_path = name.name.split('.')
import_module = self.init_module(module_path)
if not name.asname:
self.visit(ast.Expr(import_module))
self.visit(
ast.Assign(
[ast_store(module_path[0])],
self.init_module(module_path[:1])
)
)
else:
self.visit(
ast.Assign(
[ast_store(name.asname)],
import_module
)
)
def visit_ImportFrom(self, node):
if node.level:
raise NotImplementedError('Relative imports are not supported')
module_path = node.module.split('.')
import_module = self.init_module(module_path)
if len(node.names) > 1 or node.names[0].name == '*':
self.visit(ast.Assign(
[ast_store('$t1')],
import_module
))
import_from = ast_load('$t1')
else:
import_from = import_module
if node.names[0].name == '*':
name = node.names[0]
if name.name == '*':
if self.stack[-1].scope.prefix != ['__module__']:
raise NotImplementedError('from x import * only implemented at module level')
self.visit(ast.For(
ast_store('$t2'),
import_from,
[
ast.Assign(
[
ast.Subscript(
ast_call(ast_load('JS'), ast.Str('__module__')),
ast.Index(ast_load('$t2')),
ast.Load()
)
],
ast.Subscript(
import_from,
ast.Index(ast_load('$t2')),
ast.Load(),
)
),
],
[]
))
else:
for name in node.names:
asname = name.asname if name.asname else name.name
self.visit(
ast.Assign(
[ast_store(asname)],
ast.Attribute(
import_from,
name.name,
ast.Load()
)
)
)
| {
"repo_name": "ztane/jaspyx",
"path": "jaspyx/visitor/import_.py",
"copies": "1",
"size": "4248",
"license": "mit",
"hash": -7834830787499370000,
"line_mean": 32.984,
"line_max": 111,
"alpha_frac": 0.4241996234,
"autogenerated": false,
"ratio": 4.632497273718648,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5556696897118648,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import codecs
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
import copy
import os
import os.path as path
import sys
import toml
import nfldb
import nflfan.provider as provider
import nflfan.score as score
_xdg_home = os.getenv('XDG_CONFIG_HOME')
"""XDG user configuration directory."""
if not _xdg_home:
home = os.getenv('HOME')
if not home:
_xdg_home = ''
else:
_xdg_home = path.join(home, '.config')
_data_paths = [
path.join(_xdg_home, 'nflfan'),
path.join(sys.prefix, 'share', 'nflfan'),
]
"""A list of paths to check for loading data files."""
builtin_providers = {
'yahoo': provider.Yahoo,
'espn': provider.ESPN,
}
"""The default set of providers defined by nflfan."""
def load_config(providers=builtin_providers, file_path=''):
"""
Reads and loads the configuration file containing fantasy football
league information.
The return value is a dictionary mapping provider name (e.g.,
`yahoo`) to a list of leagues for that provider. Each league
is guaranteed to have at least a `name`, `season`, `phase`
and `scoring` attributes filled in as values that are not
`None`. Providers also have their own specific mandatory fields:
If no configuration file can be found, then an `IOError` is raised.
"""
def prov_leagues(d):
return ((k, d[k]) for k in sorted(d.keys()) if isinstance(d[k], dict))
schema = {
'all': {
'req': provider.Provider.conf_required,
'opt': provider.Provider.conf_optional,
},
}
for prov in providers.values():
schema[prov.provider_name] = {
'req': prov.conf_required, 'opt': prov.conf_optional,
}
raw = toml.loads(get_data('config.toml', file_path=file_path))
scoring = merge(raw['scoring'])
conf = {'leagues': OrderedDict()}
for pname in sorted(raw.keys()):
prov = raw[pname]
if pname == 'scoring':
continue
if not isinstance(prov, dict):
conf[pname] = prov
continue
conf['leagues'][pname] = OrderedDict()
for lg_name, lg in prov_leagues(prov):
lg['league_name'] = lg_name
lg['provider_class'] = providers[pname]
apply_schema(schema, scoring, pname, prov, lg)
lg = provider.League(lg['season'], lg['phase'], lg['league_id'],
pname, lg_name, lg['scoring'], lg)
conf['leagues'][pname][lg_name] = lg
return conf
def merge(s):
"""
Given a nesting of TOML dictionaries, return a flat list of each
scheme in `s`. This applies the inheritance used is configuration
files so that each scheme has each attribute fully resolved.
"""
def settings_and_subschemes(d, defaults):
settings, subs = {}, {}
for k, v in d.items():
if isinstance(v, dict):
subs[k] = v
else:
settings[k] = v
for k, v in defaults.items():
if k not in settings:
settings[k] = v
return copy.deepcopy(settings), subs
def merge(d, defaults, name):
settings, subs = settings_and_subschemes(d, defaults)
schemes[name] = settings
for subname, subscheme in subs.items():
fullname = '%s.%s' % (name, subname)
merge(subscheme, settings, fullname)
schemes = {}
for name, scheme in s.items():
merge(scheme, {}, name)
return schemes
def get_data(name, file_path=''):
"""
Reads the contents of a configuration data file with name
`name`. If `file_path` is given, then it is used if it exists.
If no file can be found, then an `IOError` is raised.
"""
if file_path:
paths = [file_path] + _data_paths
else:
paths = _data_paths
for fp in map(lambda p: path.join(p, name), paths):
try:
with codecs.open(fp) as fp:
return fp.read()
except IOError:
pass
raise IOError("Could not find configuration file %s" % name)
def cache_path():
"""
Returns a file path to the cache directory. If a cache directory
does not exist, one is created.
If there is a problem creating a cache directory, an `IOError`
exception is raised.
"""
for fp in _data_paths:
if os.access(fp, os.R_OK):
cdir = path.join(fp, 'data')
if not os.access(cdir, os.R_OK):
try:
os.mkdir(cdir)
except IOError as e:
raise IOError(e + ' (please create a cache directory)')
return cdir
raise IOError('could not find or create a cache directory')
def apply_schema(schema, scoring, prov_name, prov, lg):
"""
Applies the scheme for the provider `prov_name` to the league `lg`
while using `prov` as a dictionary of default values for `lg`.
`scoring` should be a dictionary mapping names to scoring schemes.
The `schema` should be a dictionary mapping provider name to its
set of required and optional fields. Namely, each value should be
a dictionary with two keys: `req` and `opt`, where each correspond
to a list of required and optional fields, respectively. There
must also be an `all` key in `schema` that specifies required and
optional fields for every provider.
If a required field in the provider's scheme is missing, then a
`ValueError` is raised.
"""
def get_scoring(ref):
try:
return score.ScoreSchema(ref, scoring[ref])
except KeyError:
raise KeyError("Scoring scheme %s does not exist." % ref)
def val(key, required=False):
v = lg.get(key, prov.get(key, None))
if required and v is None:
raise ValueError("Provider %s must have %s." % (prov_name, key))
elif key == 'scoring':
return get_scoring(v)
elif key == 'phase':
v = nfldb.Enums.season_phase[v.lower().title()]
return v
for r in schema['all']['req'] + schema[prov_name]['req']:
lg[r] = val(r, required=True)
for o in schema['all']['opt'] + schema[prov_name]['opt']:
lg[o] = val(o)
| {
"repo_name": "codeaudit/nflfan",
"path": "nflfan/config.py",
"copies": "2",
"size": "6357",
"license": "unlicense",
"hash": -4512103232873625000,
"line_mean": 30.9447236181,
"line_max": 78,
"alpha_frac": 0.5999685386,
"autogenerated": false,
"ratio": 3.8341375150784076,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5434106053678407,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import code
import os
import os.path
import sys
import time
from pprint import pformat
from threading import Event, Thread
from sqlalchemy import create_engine
from manhattan.server import Server, main as server_main, logging_config
from manhattan.client import (ServerError, TimeoutError, Client,
main as client_main)
from manhattan.log.timerotating import TimeRotatingLog
from . import data
from .base import BaseTest, work_path
from .test_combinations import drop_existing_tables
class MockBackend(object):
def foo(self, a, b):
return u"foo: %s %s" % (pformat(a), pformat(b))
def bar(self, *args, **kw):
return u"bar: %s %s" % (pformat(args), pformat(kw))
def failme(self, a):
raise ValueError('sad')
class TestClientServer(BaseTest):
def test_basic(self):
backend = MockBackend()
server = Server(backend, 'tcp://127.0.0.1:31338')
client = Client('tcp://127.0.0.1:31338')
try:
server.start()
# This test is screwy. If simplejson is installed, it will convert
# all strings to str objects. Without it, you get unicode objects.
# We require simplejson so assume it's gonna be str.
self.assertEqual(client.foo(4, 'blah'), "foo: 4 'blah'")
self.assertEqual(
client.bar('hello', 'world', **dict(a=12, b='blah')),
"bar: ('hello', 'world') {'a': 12, 'b': 'blah'}")
with self.assertRaisesRegexp(ServerError, 'ValueError: sad'):
client.failme(42)
finally:
server.kill()
def test_timeout(self):
client = Client('tcp://127.0.0.1:31339', wait=10)
with self.assertRaisesRegexp(TimeoutError,
'Timed out after 10 ms waiting'):
client.foo()
def _run_server_with_args(self, args, path, url, bind):
log = TimeRotatingLog(path)
data.run_clickstream(log)
drop_existing_tables(create_engine(url))
sys.argv = args
killed_event = Event()
th = Thread(target=server_main, args=(killed_event,))
orig_interact = code.interact
try:
th.start()
# Give the server time to process all the records before querying
# it.
time.sleep(0.5)
def fake_interact(banner, local):
client = local['client']
self.assertEqual(client.count(u'add to cart', site_id=1), 5)
self.assertEqual(client.count(u'began checkout', site_id=1), 4)
self.assertEqual(client.count(u'viewed page', site_id=1), 6)
self.assertEqual(client.count(u'abandoned cart', site_id=1), 1)
self.assertEqual(
client.count(u'abandoned after validation failure',
site_id=1), 0)
code.interact = fake_interact
sys.argv = ['manhattan-client', '--connect=%s' % bind]
client_main()
finally:
code.interact = orig_interact
killed_event.set()
# Wait for thread to die before returning.
time.sleep(0.5)
def test_clientserver_executable(self):
path = work_path('clientserver-executable')
log_path = work_path('debug.log')
url = 'sqlite:////tmp/manhattan-clientserver.db'
bind = 'tcp://127.0.0.1:5555'
args = [
'manhattan-server',
'--url=%s' % url,
'--path=%s' % path,
'--log=%s' % log_path,
'--bind=%s' % bind,
'--complex="abandoned cart|add to cart|began checkout"',
'--complex="abandoned checkout|began checkout|completed checkout"',
'--complex="abandoned after validation failure|'
'began checkout,checkout validation failed|completed checkout"',
'--complex="abandoned after payment failure|'
'began checkout,payment failed|completed checkout"',
]
self._run_server_with_args(args, path, url, bind)
self.assertTrue(os.path.exists(log_path))
def test_configure_logging(self):
cfg = logging_config(filename=None)
self.assertNotIn('root_file', cfg)
def test_clientserver_python_config(self):
path = data.sampleconfig['input_log_path']
url = data.sampleconfig['sqlalchemy_url']
bind = data.sampleconfig['bind']
log_path = data.sampleconfig['error_log_path']
args = ['manhattan-server',
'--config=manhattan.tests.data.sampleconfig']
self._run_server_with_args(args, path, url, bind)
self.assertTrue(os.path.exists(log_path))
| {
"repo_name": "storborg/manhattan",
"path": "manhattan/tests/test_clientserver.py",
"copies": "1",
"size": "4816",
"license": "mit",
"hash": 4133486379407250400,
"line_mean": 34.1532846715,
"line_max": 79,
"alpha_frac": 0.582641196,
"autogenerated": false,
"ratio": 3.905920519059205,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4988561715059205,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import collections
import six
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# TODO: This is a temporary dispatch mechanism. This whole system
# is to be changed to automatic discovery of the for_* modules
from configman.def_sources import for_mappings
from configman.def_sources import for_modules
from configman.def_sources import for_json
definition_dispatch = {
collections.Mapping: for_mappings.setup_definitions,
type(for_modules): for_modules.setup_definitions,
six.binary_type: for_json.setup_definitions,
six.text_type: for_json.setup_definitions,
}
try:
from configman.def_sources import for_argparse
import argparse
definition_dispatch[argparse.ArgumentParser] = \
for_argparse.setup_definitions
except ImportError:
# silently ignore that argparse doesn't exist
pass
class UnknownDefinitionTypeException(Exception):
pass
def setup_definitions(source, destination):
target_setup_func = None
try:
target_setup_func = definition_dispatch[type(source)]
except KeyError:
for a_key in definition_dispatch.keys():
if isinstance(source, a_key):
target_setup_func = definition_dispatch[a_key]
break
if not target_setup_func:
raise UnknownDefinitionTypeException(repr(type(source)))
target_setup_func(source, destination)
| {
"repo_name": "twobraids/configman",
"path": "configman/def_sources/__init__.py",
"copies": "2",
"size": "1595",
"license": "mpl-2.0",
"hash": -2594688366198818000,
"line_mean": 31.5510204082,
"line_max": 69,
"alpha_frac": 0.721630094,
"autogenerated": false,
"ratio": 4.079283887468031,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 49
} |
from __future__ import absolute_import, division, print_function
import ConfigParser
import datetime
import os
import os.path as path
import re
import sys
import psycopg2
from psycopg2.extras import RealDictCursor
from psycopg2.extensions import TRANSACTION_STATUS_INTRANS
from psycopg2.extensions import new_type, register_type
import pytz
import nfldb.team
__pdoc__ = {}
api_version = 3
__pdoc__['api_version'] = \
"""
The schema version that this library corresponds to. When the schema
version of the database is less than this value, `nfldb.connect` will
automatically update the schema to the latest version before doing
anything else.
"""
_config_home = os.getenv('XDG_CONFIG_HOME')
if not _config_home:
home = os.getenv('HOME')
if not home:
_config_home = ''
else:
_config_home = path.join(home, '.config')
def config(config_path=''):
"""
Reads and loads the configuration file containing PostgreSQL
connection information. This function is used automatically
by `nfldb.connect`.
The return value is a dictionary mapping a key in the configuration
file to its corresponding value. All values are strings, except for
`port`, which is always an integer.
A total of three possible file paths are tried before
giving up and returning `None`. The file paths, in
order, are: `fp`, `sys.prefix/share/nfldb/config.ini` and
`$XDG_CONFIG_HOME/nfldb/config.ini`.
"""
paths = [
config_path,
path.join(sys.prefix, 'share', 'nfldb', 'config.ini'),
path.join(_config_home, 'nfldb', 'config.ini'),
]
cp = ConfigParser.RawConfigParser()
for p in paths:
try:
with open(p) as fp:
cp.readfp(fp)
return {
'timezone': cp.get('pgsql', 'timezone'),
'database': cp.get('pgsql', 'database'),
'user': cp.get('pgsql', 'user'),
'password': cp.get('pgsql', 'password'),
'host': cp.get('pgsql', 'host'),
'port': cp.getint('pgsql', 'port'),
}
except IOError:
pass
return None
def connect(database=None, user=None, password=None, host=None, port=None,
timezone=None, config_path=''):
"""
Returns a `psycopg2._psycopg.connection` object from the
`psycopg2.connect` function. If database is `None`, then `connect`
will look for a configuration file using `nfldb.config` with
`config_path`. Otherwise, the connection will use the parameters
given.
If `database` is `None` and no config file can be found, then an
`IOError` exception is raised.
This function will also compare the current schema version of the
database against the API version `nfldb.api_version` and assert
that they are equivalent. If the schema library version is less
than the the API version, then the schema will be automatically
upgraded. If the schema version is newer than the library version,
then this function will raise an assertion error. An assertion
error will also be raised if the schema version is 0 and the
database is not empty.
N.B. The `timezone` parameter should be set to a value that
PostgreSQL will accept. Select from the `pg_timezone_names` view
to get a list of valid time zones.
"""
if database is None:
conf = config(config_path=config_path)
if conf is None:
raise IOError("Could not find valid configuration file.")
timezone, database = conf['timezone'], conf['database']
user, password = conf['user'], conf['password']
host, port = conf['host'], conf['port']
conn = psycopg2.connect(database=database, user=user, password=password,
host=host, port=port)
# Start the migration. Make sure if this is the initial setup that
# the DB is empty.
sversion = schema_version(conn)
assert sversion <= api_version, \
'Library with version %d is older than the schema with version %d' \
% (api_version, sversion)
assert sversion > 0 or (sversion == 0 and _is_empty(conn)), \
'Schema has version 0 but is not empty.'
set_timezone(conn, 'UTC')
_migrate(conn, api_version)
if timezone is not None:
set_timezone(conn, timezone)
# Bind SQL -> Python casting functions.
from nfldb.types import Clock, _Enum, Enums, FieldPosition, PossessionTime
_bind_type(conn, 'game_phase', _Enum._pg_cast(Enums.game_phase))
_bind_type(conn, 'season_phase', _Enum._pg_cast(Enums.season_phase))
_bind_type(conn, 'game_day', _Enum._pg_cast(Enums.game_day))
_bind_type(conn, 'player_pos', _Enum._pg_cast(Enums.player_pos))
_bind_type(conn, 'player_status', _Enum._pg_cast(Enums.player_status))
_bind_type(conn, 'game_time', Clock._pg_cast)
_bind_type(conn, 'pos_period', PossessionTime._pg_cast)
_bind_type(conn, 'field_pos', FieldPosition._pg_cast)
return conn
def schema_version(conn):
"""
Returns the schema version of the given database. If the version
is not stored in the database, then `0` is returned.
"""
with Tx(conn) as c:
try:
c.execute('SELECT version FROM meta LIMIT 1', ['version'])
except psycopg2.ProgrammingError:
conn.rollback()
return 0
if c.rowcount == 0:
return 0
return c.fetchone()['version']
def set_timezone(conn, timezone):
"""
Sets the timezone for which all datetimes will be displayed
as. Valid values are exactly the same set of values accepted
by PostgreSQL. (Select from the `pg_timezone_names` view to
get a list of valid time zones.)
Note that all datetimes are stored in UTC. This setting only
affects how datetimes are viewed from select queries.
"""
with Tx(conn) as c:
c.execute('SET timezone = %s', (timezone,))
def now():
"""
Returns the current date/time in UTC as a `datetime.datetime`
object. It can be used to compare against date/times in any of the
`nfldb` objects without worrying about timezones.
"""
return datetime.datetime.now(pytz.utc)
def _bind_type(conn, sql_type_name, cast):
"""
Binds a `cast` function to the SQL type in the connection `conn`
given by `sql_type_name`. `cast` must be a function with two
parameters: the SQL value and a cursor object. It should return the
appropriate Python object.
Note that `sql_type_name` is not escaped.
"""
with Tx(conn) as c:
c.execute('SELECT NULL::%s' % sql_type_name)
typ = new_type((c.description[0].type_code,), sql_type_name, cast)
register_type(typ)
def _db_name(conn):
m = re.search('dbname=(\S+)', conn.dsn)
return m.group(1)
def _is_empty(conn):
"""
Returns `True` if and only if there are no tables in the given
database.
"""
with Tx(conn) as c:
c.execute('''
SELECT COUNT(*) AS count FROM information_schema.tables
WHERE table_catalog = %s AND table_schema = 'public'
''', [_db_name(conn)])
if c.fetchone()['count'] == 0:
return True
return False
def _mogrify(cursor, xs):
"""Shortcut for mogrifying a list as if it were a tuple."""
return cursor.mogrify('%s', (tuple(xs),))
def _num_rows(cursor, table):
"""Returns the number of rows in table."""
cursor.execute('SELECT COUNT(*) AS rowcount FROM %s' % table)
return cursor.fetchone()['rowcount']
class Tx (object):
"""
Tx is a `with` compatible class that abstracts a transaction given
a connection. If an exception occurs inside the `with` block, then
rollback is automatically called. Otherwise, upon exit of the with
block, commit is called.
Tx blocks can be nested inside other Tx blocks. Nested Tx blocks
never commit or rollback a transaction. Instead, the exception is
passed along to the caller. Only the outermost transaction will
commit or rollback the entire transaction.
Use it like so:
#!python
with Tx(conn) as cursor:
...
Which is meant to be roughly equivalent to the following:
#!python
with conn:
with conn.cursor() as curs:
...
This should only be used when you're running SQL queries directly.
(Or when interfacing with another part of the API that requires
a database cursor.)
"""
def __init__(self, psycho_conn, name=None, factory=None):
"""
`psycho_conn` is a DB connection returned from `nfldb.connect`,
`name` is passed as the `name` argument to the cursor
constructor (for server-side cursors), and `factory` is passed
as the `cursor_factory` parameter to the cursor constructor.
Note that the default cursor factory is
`psycopg2.extras.RealDictCursor`. However, using
`psycopg2.extensions.cursor` (the default tuple cursor) can be
much more efficient when fetching large result sets.
"""
tstatus = psycho_conn.get_transaction_status()
self.__name = name
self.__nested = tstatus == TRANSACTION_STATUS_INTRANS
self.__conn = psycho_conn
self.__cursor = None
self.__factory = factory
if self.__factory is None:
self.__factory = RealDictCursor
def __enter__(self):
self.__cursor = self.__conn.cursor(name=self.__name,
cursor_factory=self.__factory)
return self.__cursor
def __exit__(self, typ, value, traceback):
if not self.__cursor.closed:
self.__cursor.close()
if typ is not None:
if not self.__nested:
self.__conn.rollback()
return False
else:
if not self.__nested:
self.__conn.commit()
return True
def _big_insert(cursor, table, datas):
"""
Given a database cursor, table name and a list of asssociation
lists of data (column name and value), perform a single large
insert. Namely, each association list should correspond to a single
row in `table`.
Each association list must have exactly the same number of columns
in exactly the same order.
"""
stamped = table in ('game', 'drive', 'play')
insert_fields = [k for k, _ in datas[0]]
if stamped:
insert_fields.append('time_inserted')
insert_fields.append('time_updated')
insert_fields = ', '.join(insert_fields)
def times(xs):
if stamped:
xs.append('NOW()')
xs.append('NOW()')
return xs
def vals(xs):
return [v for _, v in xs]
values = ', '.join(_mogrify(cursor, times(vals(data))) for data in datas)
cursor.execute('INSERT INTO %s (%s) VALUES %s'
% (table, insert_fields, values))
def _upsert(cursor, table, data, pk):
"""
Performs an arbitrary "upsert" given a table, an association list
mapping key to value, and an association list representing the
primary key.
Note that this is **not** free of race conditions. It is the
caller's responsibility to avoid race conditions. (e.g., By using a
table or row lock.)
If the table is `game`, `drive` or `play`, then the `time_insert`
and `time_updated` fields are automatically populated.
"""
stamped = table in ('game', 'drive', 'play')
update_set = ['%s = %s' % (k, '%s') for k, _ in data]
if stamped:
update_set.append('time_updated = NOW()')
update_set = ', '.join(update_set)
insert_fields = [k for k, _ in data]
insert_places = ['%s' for _ in data]
if stamped:
insert_fields.append('time_inserted')
insert_fields.append('time_updated')
insert_places.append('NOW()')
insert_places.append('NOW()')
insert_fields = ', '.join(insert_fields)
insert_places = ', '.join(insert_places)
pk_cond = ' AND '.join(['%s = %s' % (k, '%s') for k, _ in pk])
q = '''
UPDATE %s SET %s WHERE %s;
''' % (table, update_set, pk_cond)
q += '''
INSERT INTO %s (%s)
SELECT %s WHERE NOT EXISTS (SELECT 1 FROM %s WHERE %s)
''' % (table, insert_fields, insert_places, table, pk_cond)
values = [v for _, v in data]
pk_values = [v for _, v in pk]
try:
cursor.execute(q, values + pk_values + values + pk_values)
except psycopg2.ProgrammingError as e:
print(cursor.query)
raise e
def _drop_stat_indexes(c):
from nfldb.types import _play_categories, _player_categories
for cat in _player_categories.values():
c.execute('DROP INDEX play_player_in_%s' % cat)
for cat in _play_categories.values():
c.execute('DROP INDEX play_in_%s' % cat)
def _create_stat_indexes(c):
from nfldb.types import _play_categories, _player_categories
for cat in _player_categories.values():
c.execute('CREATE INDEX play_player_in_%s ON play_player (%s ASC)'
% (cat, cat))
for cat in _play_categories.values():
c.execute('CREATE INDEX play_in_%s ON play (%s ASC)' % (cat, cat))
# What follows are the migration functions. They follow the naming
# convention "_migrate_{VERSION}" where VERSION is an integer that
# corresponds to the version that the schema will be after the
# migration function runs. Each migration function is only responsible
# for running the queries required to update schema. It does not
# need to update the schema version.
#
# The migration functions should accept a cursor as a parameter,
# which are created in the higher-order _migrate. In particular,
# each migration function is run in its own transaction. Commits
# and rollbacks are handled automatically.
def _migrate(conn, to):
current = schema_version(conn)
assert current <= to
globs = globals()
for v in xrange(current+1, to+1):
fname = '_migrate_%d' % v
with Tx(conn) as c:
assert fname in globs, 'Migration function %d not defined.' % v
globs[fname](c)
c.execute("UPDATE meta SET version = %s", (v,))
def _migrate_1(c):
c.execute('''
CREATE DOMAIN utctime AS timestamp with time zone
CHECK (EXTRACT(TIMEZONE FROM VALUE) = '0')
''')
c.execute('''
CREATE TABLE meta (
version smallint,
last_roster_download utctime NOT NULL
)
''')
c.execute('''
INSERT INTO meta
(version, last_roster_download)
VALUES (1, '0001-01-01T00:00:00Z')
''')
def _migrate_2(c):
from nfldb.types import Enums, _play_categories, _player_categories
# Create some types and common constraints.
c.execute('''
CREATE DOMAIN gameid AS character varying (10)
CHECK (char_length(VALUE) = 10)
''')
c.execute('''
CREATE DOMAIN usmallint AS smallint
CHECK (VALUE >= 0)
''')
c.execute('''
CREATE DOMAIN game_clock AS smallint
CHECK (VALUE >= 0 AND VALUE <= 900)
''')
c.execute('''
CREATE DOMAIN field_offset AS smallint
CHECK (VALUE >= -50 AND VALUE <= 50)
''')
c.execute('''
CREATE TYPE game_phase AS ENUM %s
''' % _mogrify(c, Enums.game_phase))
c.execute('''
CREATE TYPE season_phase AS ENUM %s
''' % _mogrify(c, Enums.season_phase))
c.execute('''
CREATE TYPE game_day AS ENUM %s
''' % _mogrify(c, Enums.game_day))
c.execute('''
CREATE TYPE player_pos AS ENUM %s
''' % _mogrify(c, Enums.player_pos))
c.execute('''
CREATE TYPE player_status AS ENUM %s
''' % _mogrify(c, Enums.player_status))
c.execute('''
CREATE TYPE game_time AS (
phase game_phase,
elapsed game_clock
)
''')
c.execute('''
CREATE TYPE pos_period AS (
elapsed usmallint
)
''')
c.execute('''
CREATE TYPE field_pos AS (
pos field_offset
)
''')
# Now that some types have been made, add current state to meta table.
c.execute('''
ALTER TABLE meta
ADD season_type season_phase NULL,
ADD season_year usmallint NULL
CHECK (season_year >= 1960 AND season_year <= 2100),
ADD week usmallint NULL
CHECK (week >= 1 AND week <= 25)
''')
# Create the team table and populate it.
c.execute('''
CREATE TABLE team (
team_id character varying (3) NOT NULL,
city character varying (50) NOT NULL,
name character varying (50) NOT NULL,
PRIMARY KEY (team_id)
)
''')
c.execute('''
INSERT INTO team (team_id, city, name) VALUES %s
''' % (', '.join(_mogrify(c, team[0:3]) for team in nfldb.team.teams)))
c.execute('''
CREATE TABLE player (
player_id character varying (10) NOT NULL
CHECK (char_length(player_id) = 10),
gsis_name character varying (75) NULL,
full_name character varying (100) NULL,
first_name character varying (100) NULL,
last_name character varying (100) NULL,
team character varying (3) NOT NULL,
position player_pos NOT NULL,
profile_id integer NULL,
profile_url character varying (255) NULL,
uniform_number usmallint NULL,
birthdate character varying (75) NULL,
college character varying (255) NULL,
height character varying (100) NULL,
weight character varying (100) NULL,
years_pro usmallint NULL,
status player_status NOT NULL,
PRIMARY KEY (player_id),
FOREIGN KEY (team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE
)
''')
c.execute('''
CREATE TABLE game (
gsis_id gameid NOT NULL,
gamekey character varying (5) NULL,
start_time utctime NOT NULL,
week usmallint NOT NULL
CHECK (week >= 1 AND week <= 25),
day_of_week game_day NOT NULL,
season_year usmallint NOT NULL
CHECK (season_year >= 1960 AND season_year <= 2100),
season_type season_phase NOT NULL,
finished boolean NOT NULL,
home_team character varying (3) NOT NULL,
home_score usmallint NOT NULL,
home_score_q1 usmallint NULL,
home_score_q2 usmallint NULL,
home_score_q3 usmallint NULL,
home_score_q4 usmallint NULL,
home_score_q5 usmallint NULL,
home_turnovers usmallint NOT NULL,
away_team character varying (3) NOT NULL,
away_score usmallint NOT NULL,
away_score_q1 usmallint NULL,
away_score_q2 usmallint NULL,
away_score_q3 usmallint NULL,
away_score_q4 usmallint NULL,
away_score_q5 usmallint NULL,
away_turnovers usmallint NOT NULL,
time_inserted utctime NOT NULL,
time_updated utctime NOT NULL,
PRIMARY KEY (gsis_id),
FOREIGN KEY (home_team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE,
FOREIGN KEY (away_team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE
)
''')
c.execute('''
CREATE TABLE drive (
gsis_id gameid NOT NULL,
drive_id usmallint NOT NULL,
start_field field_pos NULL,
start_time game_time NOT NULL,
end_field field_pos NULL,
end_time game_time NOT NULL,
pos_team character varying (3) NOT NULL,
pos_time pos_period NULL,
first_downs usmallint NOT NULL,
result text NULL,
penalty_yards smallint NOT NULL,
yards_gained smallint NOT NULL,
play_count usmallint NOT NULL,
time_inserted utctime NOT NULL,
time_updated utctime NOT NULL,
PRIMARY KEY (gsis_id, drive_id),
FOREIGN KEY (gsis_id)
REFERENCES game (gsis_id)
ON DELETE CASCADE,
FOREIGN KEY (pos_team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE
)
''')
# I've taken the approach of using a sparse table to represent
# sparse play statistic data. See issue #2:
# https://github.com/BurntSushi/nfldb/issues/2
c.execute('''
CREATE TABLE play (
gsis_id gameid NOT NULL,
drive_id usmallint NOT NULL,
play_id usmallint NOT NULL,
time game_time NOT NULL,
pos_team character varying (3) NOT NULL,
yardline field_pos NULL,
down smallint NULL
CHECK (down >= 1 AND down <= 4),
yards_to_go smallint NULL
CHECK (yards_to_go >= 0 AND yards_to_go <= 100),
description text NULL,
note text NULL,
time_inserted utctime NOT NULL,
time_updated utctime NOT NULL,
%s,
PRIMARY KEY (gsis_id, drive_id, play_id),
FOREIGN KEY (gsis_id, drive_id)
REFERENCES drive (gsis_id, drive_id)
ON DELETE CASCADE,
FOREIGN KEY (gsis_id)
REFERENCES game (gsis_id)
ON DELETE CASCADE,
FOREIGN KEY (pos_team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE
)
''' % ', '.join([cat._sql_field for cat in _play_categories.values()]))
c.execute('''
CREATE TABLE play_player (
gsis_id gameid NOT NULL,
drive_id usmallint NOT NULL,
play_id usmallint NOT NULL,
player_id character varying (10) NOT NULL,
team character varying (3) NOT NULL,
%s,
PRIMARY KEY (gsis_id, drive_id, play_id, player_id),
FOREIGN KEY (gsis_id, drive_id, play_id)
REFERENCES play (gsis_id, drive_id, play_id)
ON DELETE CASCADE,
FOREIGN KEY (gsis_id, drive_id)
REFERENCES drive (gsis_id, drive_id)
ON DELETE CASCADE,
FOREIGN KEY (gsis_id)
REFERENCES game (gsis_id)
ON DELETE CASCADE,
FOREIGN KEY (player_id)
REFERENCES player (player_id)
ON DELETE RESTRICT,
FOREIGN KEY (team)
REFERENCES team (team_id)
ON DELETE RESTRICT
ON UPDATE CASCADE
)
''' % ', '.join(cat._sql_field for cat in _player_categories.values()))
def _migrate_3(c):
_create_stat_indexes(c)
c.execute('''
CREATE INDEX player_in_gsis_name ON player (gsis_name ASC);
CREATE INDEX player_in_full_name ON player (full_name ASC);
CREATE INDEX player_in_team ON player (team ASC);
CREATE INDEX player_in_position ON player (position ASC);
''')
c.execute('''
CREATE INDEX game_in_gamekey ON game (gamekey ASC);
CREATE INDEX game_in_start_time ON game (start_time ASC);
CREATE INDEX game_in_week ON game (week ASC);
CREATE INDEX game_in_day_of_week ON game (day_of_week ASC);
CREATE INDEX game_in_season_year ON game (season_year ASC);
CREATE INDEX game_in_season_type ON game (season_type ASC);
CREATE INDEX game_in_finished ON game (finished ASC);
CREATE INDEX game_in_home_team ON game (home_team ASC);
CREATE INDEX game_in_away_team ON game (away_team ASC);
CREATE INDEX game_in_home_score ON game (home_score ASC);
CREATE INDEX game_in_away_score ON game (away_score ASC);
CREATE INDEX game_in_home_turnovers ON game (home_turnovers ASC);
CREATE INDEX game_in_away_turnovers ON game (away_turnovers ASC);
''')
c.execute('''
CREATE INDEX drive_in_gsis_id ON drive (gsis_id ASC);
CREATE INDEX drive_in_drive_id ON drive (drive_id ASC);
CREATE INDEX drive_in_start_field ON drive
(((start_field).pos) ASC);
CREATE INDEX drive_in_end_field ON drive
(((end_field).pos) ASC);
CREATE INDEX drive_in_start_time ON drive
(((start_time).phase) ASC, ((start_time).elapsed) ASC);
CREATE INDEX drive_in_end_time ON drive
(((end_time).phase) ASC, ((end_time).elapsed) ASC);
CREATE INDEX drive_in_pos_team ON drive (pos_team ASC);
CREATE INDEX drive_in_pos_time ON drive
(((pos_time).elapsed) DESC);
CREATE INDEX drive_in_first_downs ON drive (first_downs DESC);
CREATE INDEX drive_in_penalty_yards ON drive (penalty_yards DESC);
CREATE INDEX drive_in_yards_gained ON drive (yards_gained DESC);
CREATE INDEX drive_in_play_count ON drive (play_count DESC);
''')
c.execute('''
CREATE INDEX play_in_gsis_id ON play (gsis_id ASC);
CREATE INDEX play_in_gsis_drive_id ON play (gsis_id ASC, drive_id ASC);
CREATE INDEX play_in_time ON play
(((time).phase) ASC, ((time).elapsed) ASC);
CREATE INDEX play_in_pos_team ON play (pos_team ASC);
CREATE INDEX play_in_yardline ON play
(((yardline).pos) ASC);
CREATE INDEX play_in_down ON play (down ASC);
CREATE INDEX play_in_yards_to_go ON play (yards_to_go DESC);
''')
c.execute('''
CREATE INDEX pp_in_gsis_id ON play_player (gsis_id ASC);
CREATE INDEX pp_in_player_id ON play_player (player_id ASC);
CREATE INDEX pp_in_gsis_drive_id ON play_player
(gsis_id ASC, drive_id ASC);
CREATE INDEX pp_in_gsis_drive_play_id ON play_player
(gsis_id ASC, drive_id ASC, play_id ASC);
CREATE INDEX pp_in_gsis_player_id ON play_player
(gsis_id ASC, player_id ASC);
CREATE INDEX pp_in_team ON play_player (team ASC);
''')
| {
"repo_name": "webflint/nfldb",
"path": "nfldb/db.py",
"copies": "1",
"size": "26481",
"license": "unlicense",
"hash": 3361983381442575400,
"line_mean": 35.1268758527,
"line_max": 79,
"alpha_frac": 0.5927646237,
"autogenerated": false,
"ratio": 3.933016485964652,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00003100582909587002,
"num_lines": 733
} |
from __future__ import absolute_import, division, print_function
import copy
import logging
import six
import numpy as np
from numpy.testing import (assert_equal, assert_array_almost_equal)
from nose.tools import assert_true, raises, assert_raises
from skxray.core.fitting.base.parameter_data import get_para, e_calibration
from skxray.core.fitting.xrf_model import (
ModelSpectrum, ParamController, linear_spectrum_fitting,
construct_linear_model, trim, sum_area, compute_escape_peak,
register_strategy, update_parameter_dict, _set_parameter_hint,
_STRATEGY_REGISTRY
)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
level=logging.INFO,
filemode='w')
def synthetic_spectrum():
param = get_para()
x = np.arange(2000)
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak
elist, matv, area_v = construct_linear_model(x, param, elemental_lines, default_area=1e5)
return np.sum(matv, 1) + 100 # avoid zero values
def test_param_controller_fail():
param = get_para()
PC = ParamController(param, [])
assert_raises(ValueError, PC._add_area_param, 'Ar')
def test_parameter_controller():
param = get_para()
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak
PC = ParamController(param, elemental_lines)
set_opt = dict(pos='hi', width='lohi', area='hi', ratio='lo')
PC.update_element_prop(['Fe_K', 'Ce_L', pileup_peak[0]], **set_opt)
PC.set_strategy('linear')
# check boundary value
for k, v in six.iteritems(PC.params):
if 'Fe' in k:
if 'ratio' in k:
assert_equal(str(v['bound_type']), set_opt['ratio'])
if 'center' in k:
assert_equal(str(v['bound_type']), set_opt['pos'])
elif 'area' in k:
assert_equal(str(v['bound_type']), set_opt['area'])
elif 'sigma' in k:
assert_equal(str(v['bound_type']), set_opt['width'])
elif ('pileup_'+pileup_peak[0].replace('-', '_')) in k:
if 'ratio' in k:
assert_equal(str(v['bound_type']), set_opt['ratio'])
if 'center' in k:
assert_equal(str(v['bound_type']), set_opt['pos'])
elif 'area' in k:
assert_equal(str(v['bound_type']), set_opt['area'])
elif 'sigma' in k:
assert_equal(str(v['bound_type']), set_opt['width'])
def test_fit():
param = get_para()
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak
x0 = np.arange(2000)
y0 = synthetic_spectrum()
x, y = trim(x0, y0, 100, 1300)
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
result = MS.model_fit(x, y, weights=1/np.sqrt(y), maxfev=200)
# check area of each element
for k, v in six.iteritems(result.values):
if 'area' in k:
# error smaller than 1%
assert_true((v-1e5)/1e5 < 1e-2)
# multiple peak sumed, so value should be larger than one peak area 1e5
sum_Fe = sum_area('Fe_K', result)
assert_true(sum_Fe > 1e5)
sum_Ce = sum_area('Ce_L', result)
assert_true(sum_Ce > 1e5)
sum_Pt = sum_area('Pt_M', result)
assert_true(sum_Pt > 1e5)
# create full list of parameters
PC = ParamController(param, elemental_lines)
new_params = PC.params
# update values
update_parameter_dict(new_params, result)
for k, v in six.iteritems(new_params):
if 'area' in k:
assert_equal(v['value'], result.values[k])
MS = ModelSpectrum(new_params, elemental_lines)
MS.assemble_models()
result = MS.model_fit(x, y, weights=1/np.sqrt(y), maxfev=200)
# check area of each element
for k, v in six.iteritems(result.values):
if 'area' in k:
# error smaller than 0.1%
assert_true((v-1e5)/1e5 < 1e-3)
def test_register():
new_strategy = e_calibration
register_strategy('e_calibration', new_strategy, overwrite=False)
assert_equal(len(_STRATEGY_REGISTRY), 5)
new_strategy = copy.deepcopy(e_calibration)
new_strategy['coherent_sct_amplitude'] = 'fixed'
register_strategy('new_strategy', new_strategy)
assert_equal(len(_STRATEGY_REGISTRY), 6)
@raises(RuntimeError)
def test_register_error():
new_strategy = copy.deepcopy(e_calibration)
new_strategy['coherent_sct_amplitude'] = 'fixed'
register_strategy('e_calibration', new_strategy, overwrite=False)
def test_pre_fit():
y0 = synthetic_spectrum()
x0 = np.arange(len(y0))
# the following items should appear
item_list = ['Ar_K', 'Fe_K', 'compton', 'elastic']
param = get_para()
# with weight pre fit
x, y_total, area_v = linear_spectrum_fitting(x0, y0, param)
for v in item_list:
assert_true(v in y_total)
# no weight pre fit
x, y_total, area_v = linear_spectrum_fitting(x0, y0, param, constant_weight=None)
for v in item_list:
assert_true(v in y_total)
def test_escape_peak():
y0 = synthetic_spectrum()
ratio = 0.01
param = get_para()
xnew, ynew = compute_escape_peak(y0, ratio, param)
# ratio should be the same
assert_array_almost_equal(np.sum(ynew)/np.sum(y0), ratio, decimal=3)
def test_set_param_hint():
param = get_para()
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M']
bound_options = ['none', 'lohi', 'fixed', 'lo', 'hi']
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
# get compton model
compton = MS.mod.components[0]
for v in bound_options:
input_param = {'bound_type': v, 'max': 13.0, 'min': 9.0, 'value': 11.0}
_set_parameter_hint('coherent_sct_energy', input_param, compton)
p = compton.make_params()
if v == 'fixed':
assert_equal(p['coherent_sct_energy'].vary, False)
else:
assert_equal(p['coherent_sct_energy'].vary, True)
@raises(ValueError)
def test_set_param():
param = get_para()
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M']
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
# get compton model
compton = MS.mod.components[0]
input_param = {'bound_type': 'other', 'max': 13.0, 'min': 9.0, 'value': 11.0}
_set_parameter_hint('coherent_sct_energy', input_param, compton)
| {
"repo_name": "giltis/scikit-xray",
"path": "skxray/core/fitting/tests/test_xrf_fit.py",
"copies": "1",
"size": "6513",
"license": "bsd-3-clause",
"hash": -9140348721233390000,
"line_mean": 31.8939393939,
"line_max": 93,
"alpha_frac": 0.6078612007,
"autogenerated": false,
"ratio": 3.064941176470588,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4172802377170588,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import copy
import matplotlib.cm as mcm
from matplotlib.colors import LogNorm
def show_label_array(ax, label_array, cmap=None, **kwargs):
"""
Display a labeled array nicely
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
label_array : ndarray
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interest
cmap : str or colormap, optional
Color map to use, defaults to 'Paired'
Returns
-------
img : AxesImage
The artist added to the axes
"""
if cmap is None:
cmap = 'Paired'
_cmap = copy.copy((mcm.get_cmap(cmap)))
_cmap.set_under('w', 0)
vmin = max(.5, kwargs.pop('vmin', .5))
ax.set_aspect('equal')
im = ax.imshow(label_array, cmap=cmap,
interpolation='nearest',
vmin=vmin,
**kwargs)
return im
def show_label_array_on_image(ax, image, label_array, cmap=None,
imshow_cmap='gray', norm=LogNorm(), **kwargs):
"""
This will plot the required ROI's(labeled array) on the image
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
image : array
The image array
label_array : array
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interest
cmap : str or colormap, optional
Color map to use for plotting the label_array, defaults to 'None'
imshow_cmap : str or colormap, optional
Color map to use for plotting the image, defaults to 'gray'
norm : str, optional
Normalize scale data, defaults to 'Lognorm()'
Returns
-------
im : AxesImage
The artist added to the axes
im_label : AxesImage
The artist added to the axes
"""
ax.set_aspect('equal')
im = ax.imshow(image, cmap=imshow_cmap, interpolation='none', norm=norm,
**kwargs)
im_label = show_label_array(ax, label_array, cmap=cmap, norm=norm,
**kwargs)
return im, im_label
| {
"repo_name": "sameera2004/xray-vision",
"path": "xray_vision/mpl_plotting/roi.py",
"copies": "3",
"size": "2476",
"license": "bsd-3-clause",
"hash": 7066160466508348000,
"line_mean": 26.5111111111,
"line_max": 76,
"alpha_frac": 0.6013731826,
"autogenerated": false,
"ratio": 4.092561983471074,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6193935166071074,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import copy
import pytest
np = pytest.importorskip('numpy')
import os
import time
from distutils.version import LooseVersion
from operator import add, sub, getitem
from threading import Lock
import warnings
from toolz import merge, countby, concat
from toolz.curried import identity
import dask
import dask.array as da
from dask.base import tokenize
from dask.delayed import delayed
from dask.async import get_sync
from dask.utils import ignoring, tmpfile, tmpdir
from dask.utils_test import inc
from dask.array import chunk
from dask.array.core import (getem, getarray, getarray_nofancy, top, dotmany,
concatenate3, broadcast_dimensions, Array, stack,
concatenate, from_array, take, elemwise, isnull,
notnull, broadcast_shapes, partial_by_order,
tensordot, choose, where, coarsen, insert,
broadcast_to, fromfunction,
blockdims_from_blockshape, store, optimize,
from_func, normalize_chunks, broadcast_chunks,
atop, from_delayed, concatenate_axes,
common_blockdim)
from dask.array.utils import assert_eq
# temporary until numpy functions migrated
try:
from numpy import nancumsum, nancumprod
except ImportError: # pragma: no cover
import dask.array.numpy_compat as npcompat
nancumsum = npcompat.nancumsum
nancumprod = npcompat.nancumprod
def same_keys(a, b):
def key(k):
if isinstance(k, str):
return (k, -1, -1, -1)
else:
return k
return sorted(a.dask, key=key) == sorted(b.dask, key=key)
def test_getem():
for fancy, getter in [(True, getarray), (False, getarray_nofancy)]:
sol = {('X', 0, 0): (getter, 'X', (slice(0, 2), slice(0, 3))),
('X', 1, 0): (getter, 'X', (slice(2, 4), slice(0, 3))),
('X', 1, 1): (getter, 'X', (slice(2, 4), slice(3, 6))),
('X', 0, 1): (getter, 'X', (slice(0, 2), slice(3, 6)))}
assert getem('X', (2, 3), shape=(4, 6), fancy=fancy) == sol
def test_top():
assert top(inc, 'z', 'ij', 'x', 'ij', numblocks={'x': (2, 2)}) == \
{('z', 0, 0): (inc, ('x', 0, 0)),
('z', 0, 1): (inc, ('x', 0, 1)),
('z', 1, 0): (inc, ('x', 1, 0)),
('z', 1, 1): (inc, ('x', 1, 1))}
assert top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij',
numblocks={'x': (2, 2), 'y': (2, 2)}) == \
{('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)),
('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 1)),
('z', 1, 0): (add, ('x', 1, 0), ('y', 1, 0)),
('z', 1, 1): (add, ('x', 1, 1), ('y', 1, 1))}
assert top(dotmany, 'z', 'ik', 'x', 'ij', 'y', 'jk',
numblocks={'x': (2, 2), 'y': (2, 2)}) == \
{('z', 0, 0): (dotmany, [('x', 0, 0), ('x', 0, 1)],
[('y', 0, 0), ('y', 1, 0)]),
('z', 0, 1): (dotmany, [('x', 0, 0), ('x', 0, 1)],
[('y', 0, 1), ('y', 1, 1)]),
('z', 1, 0): (dotmany, [('x', 1, 0), ('x', 1, 1)],
[('y', 0, 0), ('y', 1, 0)]),
('z', 1, 1): (dotmany, [('x', 1, 0), ('x', 1, 1)],
[('y', 0, 1), ('y', 1, 1)])}
assert top(identity, 'z', '', 'x', 'ij', numblocks={'x': (2, 2)}) ==\
{('z',): (identity, [[('x', 0, 0), ('x', 0, 1)],
[('x', 1, 0), ('x', 1, 1)]])}
def test_top_supports_broadcasting_rules():
assert top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij',
numblocks={'x': (1, 2), 'y': (2, 1)}) == \
{('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)),
('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 0)),
('z', 1, 0): (add, ('x', 0, 0), ('y', 1, 0)),
('z', 1, 1): (add, ('x', 0, 1), ('y', 1, 0))}
def test_concatenate3_on_scalars():
assert_eq(concatenate3([1, 2]), np.array([1, 2]))
def test_chunked_dot_product():
x = np.arange(400).reshape((20, 20))
o = np.ones((20, 20))
d = {'x': x, 'o': o}
getx = getem('x', (5, 5), shape=(20, 20))
geto = getem('o', (5, 5), shape=(20, 20))
result = top(dotmany, 'out', 'ik', 'x', 'ij', 'o', 'jk',
numblocks={'x': (4, 4), 'o': (4, 4)})
dsk = merge(d, getx, geto, result)
out = dask.get(dsk, [[('out', i, j) for j in range(4)] for i in range(4)])
assert_eq(np.dot(x, o), concatenate3(out))
def test_chunked_transpose_plus_one():
x = np.arange(400).reshape((20, 20))
d = {'x': x}
getx = getem('x', (5, 5), shape=(20, 20))
f = lambda x: x.T + 1
comp = top(f, 'out', 'ij', 'x', 'ji', numblocks={'x': (4, 4)})
dsk = merge(d, getx, comp)
out = dask.get(dsk, [[('out', i, j) for j in range(4)] for i in range(4)])
assert_eq(concatenate3(out), x.T + 1)
def test_transpose():
x = np.arange(240).reshape((4, 6, 10))
d = da.from_array(x, (2, 3, 4))
assert_eq(d.transpose((2, 0, 1)),
x.transpose((2, 0, 1)))
assert same_keys(d.transpose((2, 0, 1)), d.transpose((2, 0, 1)))
assert_eq(d.transpose(2, 0, 1),
x.transpose(2, 0, 1))
assert same_keys(d.transpose(2, 0, 1), d.transpose(2, 0, 1))
with pytest.raises(ValueError):
d.transpose(1, 2)
with pytest.raises(ValueError):
d.transpose((1, 2))
def test_broadcast_dimensions_works_with_singleton_dimensions():
argpairs = [('x', 'i')]
numblocks = {'x': ((1,),)}
assert broadcast_dimensions(argpairs, numblocks) == {'i': (1,)}
def test_broadcast_dimensions():
argpairs = [('x', 'ij'), ('y', 'ij')]
d = {'x': ('Hello', 1), 'y': (1, (2, 3))}
assert broadcast_dimensions(argpairs, d) == {'i': 'Hello', 'j': (2, 3)}
def test_Array():
shape = (1000, 1000)
chunks = (100, 100)
name = 'x'
dsk = merge({name: 'some-array'}, getem(name, chunks, shape=shape))
a = Array(dsk, name, chunks, shape=shape, dtype='f8')
assert a.numblocks == (10, 10)
assert a._keys() == [[('x', i, j) for j in range(10)]
for i in range(10)]
assert a.chunks == ((100,) * 10, (100,) * 10)
assert a.shape == shape
assert len(a) == shape[0]
def test_uneven_chunks():
a = Array({}, 'x', chunks=(3, 3), shape=(10, 10), dtype='f8')
assert a.chunks == ((3, 3, 3, 1), (3, 3, 3, 1))
def test_numblocks_suppoorts_singleton_block_dims():
shape = (100, 10)
chunks = (10, 10)
name = 'x'
dsk = merge({name: 'some-array'}, getem(name, shape=shape, chunks=chunks))
a = Array(dsk, name, chunks, shape=shape, dtype='f8')
assert set(concat(a._keys())) == set([('x', i, 0) for i in range(100 // 10)])
def test_keys():
dsk = dict((('x', i, j), ()) for i in range(5) for j in range(6))
dx = Array(dsk, 'x', chunks=(10, 10), shape=(50, 60), dtype='f8')
assert dx._keys() == [[(dx.name, i, j) for j in range(6)]
for i in range(5)]
d = Array({}, 'x', (), shape=(), dtype='f8')
assert d._keys() == [('x',)]
def test_Array_computation():
a = Array({('x', 0, 0): np.eye(3)}, 'x', shape=(3, 3), chunks=(3, 3), dtype='f8')
assert_eq(np.array(a), np.eye(3))
assert isinstance(a.compute(), np.ndarray)
assert float(a[0, 0]) == 1
def test_stack():
a, b, c = [Array(getem(name, chunks=(2, 3), shape=(4, 6)),
name, chunks=(2, 3), dtype='f8', shape=(4, 6))
for name in 'ABC']
s = stack([a, b, c], axis=0)
colon = slice(None, None, None)
assert s.shape == (3, 4, 6)
assert s.chunks == ((1, 1, 1), (2, 2), (3, 3))
assert s.dask[(s.name, 0, 1, 0)] == (getitem, ('A', 1, 0),
(None, colon, colon))
assert s.dask[(s.name, 2, 1, 0)] == (getitem, ('C', 1, 0),
(None, colon, colon))
assert same_keys(s, stack([a, b, c], axis=0))
s2 = stack([a, b, c], axis=1)
assert s2.shape == (4, 3, 6)
assert s2.chunks == ((2, 2), (1, 1, 1), (3, 3))
assert s2.dask[(s2.name, 0, 1, 0)] == (getitem, ('B', 0, 0),
(colon, None, colon))
assert s2.dask[(s2.name, 1, 1, 0)] == (getitem, ('B', 1, 0),
(colon, None, colon))
assert same_keys(s2, stack([a, b, c], axis=1))
s2 = stack([a, b, c], axis=2)
assert s2.shape == (4, 6, 3)
assert s2.chunks == ((2, 2), (3, 3), (1, 1, 1))
assert s2.dask[(s2.name, 0, 1, 0)] == (getitem, ('A', 0, 1),
(colon, colon, None))
assert s2.dask[(s2.name, 1, 1, 2)] == (getitem, ('C', 1, 1),
(colon, colon, None))
assert same_keys(s2, stack([a, b, c], axis=2))
pytest.raises(ValueError, lambda: stack([a, b, c], axis=3))
assert set(b.dask.keys()).issubset(s2.dask.keys())
assert stack([a, b, c], axis=-1).chunks == stack([a, b, c], axis=2).chunks
def test_short_stack():
x = np.array([1])
d = da.from_array(x, chunks=(1,))
s = da.stack([d])
assert s.shape == (1, 1)
assert Array._get(s.dask, s._keys())[0][0].shape == (1, 1)
def test_stack_scalars():
d = da.arange(4, chunks=2)
s = da.stack([d.mean(), d.sum()])
assert s.compute().tolist() == [np.arange(4).mean(), np.arange(4).sum()]
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.10.0',
reason="NumPy doesn't yet support stack")
def test_stack_rechunk():
x = da.random.random(10, chunks=5)
y = da.random.random(10, chunks=4)
z = da.stack([x, y], axis=0)
assert z.shape == (2, 10)
assert z.chunks == ((1, 1), (4, 1, 3, 2))
assert_eq(z, np.stack([x.compute(), y.compute()], axis=0))
def test_concatenate():
a, b, c = [Array(getem(name, chunks=(2, 3), shape=(4, 6)),
name, chunks=(2, 3), dtype='f8', shape=(4, 6))
for name in 'ABC']
x = concatenate([a, b, c], axis=0)
assert x.shape == (12, 6)
assert x.chunks == ((2, 2, 2, 2, 2, 2), (3, 3))
assert x.dask[(x.name, 0, 1)] == ('A', 0, 1)
assert x.dask[(x.name, 5, 0)] == ('C', 1, 0)
assert same_keys(x, concatenate([a, b, c], axis=0))
y = concatenate([a, b, c], axis=1)
assert y.shape == (4, 18)
assert y.chunks == ((2, 2), (3, 3, 3, 3, 3, 3))
assert y.dask[(y.name, 1, 0)] == ('A', 1, 0)
assert y.dask[(y.name, 1, 5)] == ('C', 1, 1)
assert same_keys(y, concatenate([a, b, c], axis=1))
assert set(b.dask.keys()).issubset(y.dask.keys())
assert (concatenate([a, b, c], axis=-1).chunks ==
concatenate([a, b, c], axis=1).chunks)
pytest.raises(ValueError, lambda: concatenate([a, b, c], axis=2))
def test_concatenate_rechunk():
x = da.random.random((6, 6), chunks=(3, 3))
y = da.random.random((6, 6), chunks=(2, 2))
z = da.concatenate([x, y], axis=0)
assert z.shape == (12, 6)
assert z.chunks == ((3, 3, 2, 2, 2), (2, 1, 1, 2))
assert_eq(z, np.concatenate([x.compute(), y.compute()], axis=0))
z = da.concatenate([x, y], axis=1)
assert z.shape == (6, 12)
assert z.chunks == ((2, 1, 1, 2), (3, 3, 2, 2, 2))
assert_eq(z, np.concatenate([x.compute(), y.compute()], axis=1))
def test_concatenate_fixlen_strings():
x = np.array(['a', 'b', 'c'])
y = np.array(['aa', 'bb', 'cc'])
a = da.from_array(x, chunks=(2,))
b = da.from_array(y, chunks=(2,))
assert_eq(np.concatenate([x, y]),
da.concatenate([a, b]))
def test_vstack():
x = np.arange(5)
y = np.ones(5)
a = da.arange(5, chunks=2)
b = da.ones(5, chunks=2)
assert_eq(np.vstack((x, y)), da.vstack((a, b)))
assert_eq(np.vstack((x, y[None, :])), da.vstack((a, b[None, :])))
def test_hstack():
x = np.arange(5)
y = np.ones(5)
a = da.arange(5, chunks=2)
b = da.ones(5, chunks=2)
assert_eq(np.hstack((x[None, :], y[None, :])),
da.hstack((a[None, :], b[None, :])))
assert_eq(np.hstack((x, y)), da.hstack((a, b)))
def test_dstack():
x = np.arange(5)
y = np.ones(5)
a = da.arange(5, chunks=2)
b = da.ones(5, chunks=2)
assert_eq(np.dstack((x[None, None, :], y[None, None, :])),
da.dstack((a[None, None, :], b[None, None, :])))
assert_eq(np.dstack((x[None, :], y[None, :])),
da.dstack((a[None, :], b[None, :])))
assert_eq(np.dstack((x, y)), da.dstack((a, b)))
def test_take():
x = np.arange(400).reshape((20, 20))
a = from_array(x, chunks=(5, 5))
assert_eq(np.take(x, 3, axis=0), take(a, 3, axis=0))
assert_eq(np.take(x, [3, 4, 5], axis=-1), take(a, [3, 4, 5], axis=-1))
pytest.raises(ValueError, lambda: take(a, 3, axis=2))
assert same_keys(take(a, [3, 4, 5], axis=-1), take(a, [3, 4, 5], axis=-1))
def test_compress():
x = np.arange(25).reshape((5, 5))
a = from_array(x, chunks=(2, 2))
assert_eq(np.compress([True, False, True, False, True], x, axis=0),
da.compress([True, False, True, False, True], a, axis=0))
assert_eq(np.compress([True, False, True, False, True], x, axis=1),
da.compress([True, False, True, False, True], a, axis=1))
assert_eq(np.compress([True, False], x, axis=1),
da.compress([True, False], a, axis=1))
with pytest.raises(NotImplementedError):
da.compress([True, False], a)
with pytest.raises(ValueError):
da.compress([True, False], a, axis=100)
with pytest.raises(ValueError):
da.compress([[True], [False]], a, axis=100)
def test_binops():
a = Array(dict((('a', i), np.array([0])) for i in range(3)),
'a', chunks=((1, 1, 1),), dtype='i8')
b = Array(dict((('b', i), np.array([0])) for i in range(3)),
'b', chunks=((1, 1, 1),), dtype='i8')
result = elemwise(add, a, b, name='c')
assert result.dask == merge(a.dask, b.dask,
dict((('c', i), (add, ('a', i), ('b', i)))
for i in range(3)))
result = elemwise(pow, a, 2, name='c')
assert "'a', 0" in str(result.dask[('c', 0)])
assert "2" in str(result.dask[('c', 0)])
def test_isnull():
x = np.array([1, np.nan])
a = from_array(x, chunks=(2,))
with ignoring(ImportError):
assert_eq(isnull(a), np.isnan(x))
assert_eq(notnull(a), ~np.isnan(x))
def test_isclose():
x = np.array([0, np.nan, 1, 1.5])
y = np.array([1e-9, np.nan, 1, 2])
a = from_array(x, chunks=(2,))
b = from_array(y, chunks=(2,))
assert_eq(da.isclose(a, b, equal_nan=True),
np.isclose(x, y, equal_nan=True))
def test_broadcast_shapes():
assert (3, 4, 5) == broadcast_shapes((3, 4, 5), (4, 1), ())
assert (3, 4) == broadcast_shapes((3, 1), (1, 4), (4,))
assert (5, 6, 7, 3, 4) == broadcast_shapes((3, 1), (), (5, 6, 7, 1, 4))
pytest.raises(ValueError, lambda: broadcast_shapes((3,), (3, 4)))
pytest.raises(ValueError, lambda: broadcast_shapes((2, 3), (2, 3, 1)))
def test_elemwise_on_scalars():
x = np.arange(10)
a = from_array(x, chunks=(5,))
assert len(a._keys()) == 2
assert_eq(a.sum()**2, x.sum()**2)
x = np.arange(11)
a = from_array(x, chunks=(5,))
assert len(a._keys()) == 3
assert_eq(a, x)
def test_partial_by_order():
assert partial_by_order(5, function=add, other=[(1, 20)]) == 25
def test_elemwise_with_ndarrays():
x = np.arange(3)
y = np.arange(12).reshape(4, 3)
a = from_array(x, chunks=(3,))
b = from_array(y, chunks=(2, 3))
assert_eq(x + a, 2 * x)
assert_eq(a + x, 2 * x)
assert_eq(x + b, x + y)
assert_eq(b + x, x + y)
assert_eq(a + y, x + y)
assert_eq(y + a, x + y)
# Error on shape mismatch
pytest.raises(ValueError, lambda: a + y.T)
pytest.raises(ValueError, lambda: a + np.arange(2))
def test_elemwise_differently_chunked():
x = np.arange(3)
y = np.arange(12).reshape(4, 3)
a = from_array(x, chunks=(3,))
b = from_array(y, chunks=(2, 2))
assert_eq(a + b, x + y)
assert_eq(b + a, x + y)
def test_operators():
x = np.arange(10)
y = np.arange(10).reshape((10, 1))
a = from_array(x, chunks=(5,))
b = from_array(y, chunks=(5, 1))
c = a + 1
assert_eq(c, x + 1)
c = a + b
assert_eq(c, x + x.reshape((10, 1)))
expr = (3 / a * b)**2 > 5
assert_eq(expr, (3 / x * y)**2 > 5)
c = da.exp(a)
assert_eq(c, np.exp(x))
assert_eq(abs(-a), a)
assert_eq(a, +x)
def test_operator_dtype_promotion():
x = np.arange(10, dtype=np.float32)
y = np.array([1])
a = from_array(x, chunks=(5,))
assert_eq(x + 1, a + 1) # still float32
assert_eq(x + 1e50, a + 1e50) # now float64
assert_eq(x + y, a + y) # also float64
def test_field_access():
x = np.array([(1, 1.0), (2, 2.0)], dtype=[('a', 'i4'), ('b', 'f4')])
y = from_array(x, chunks=(1,))
assert_eq(y['a'], x['a'])
assert_eq(y[['b', 'a']], x[['b', 'a']])
assert same_keys(y[['b', 'a']], y[['b', 'a']])
def test_field_access_with_shape():
dtype = [('col1', ('f4', (3, 2))), ('col2', ('f4', 3))]
data = np.ones((100, 50), dtype=dtype)
x = da.from_array(data, 10)
assert_eq(x['col1'], data['col1'])
assert_eq(x[['col1']], data[['col1']])
assert_eq(x['col2'], data['col2'])
assert_eq(x[['col1', 'col2']], data[['col1', 'col2']])
def test_tensordot():
x = np.arange(400).reshape((20, 20))
a = from_array(x, chunks=(5, 4))
y = np.arange(200).reshape((20, 10))
b = from_array(y, chunks=(4, 5))
for axes in [1, (1, 0)]:
assert_eq(tensordot(a, b, axes=axes), np.tensordot(x, y, axes=axes))
assert_eq(tensordot(x, b, axes=axes), np.tensordot(x, y, axes=axes))
assert_eq(tensordot(a, y, axes=axes), np.tensordot(x, y, axes=axes))
assert same_keys(tensordot(a, b, axes=(1, 0)), tensordot(a, b, axes=(1, 0)))
assert not same_keys(tensordot(a, b, axes=0), tensordot(a, b, axes=1))
# assert (tensordot(a, a).chunks
# == tensordot(a, a, axes=((1, 0), (0, 1))).chunks)
# assert_eq(tensordot(a, a), np.tensordot(x, x))
@pytest.mark.parametrize('axes', [
0,
1,
(0, 1),
(1, 0),
((1, 0), (2, 1)),
((1, 2), (2, 0)),
((2, 0), (1, 2))
])
def test_tensordot_2(axes):
x = np.arange(4 * 4 * 4).reshape((4, 4, 4))
y = da.from_array(x, chunks=2)
assert_eq(da.tensordot(y, y, axes=axes),
np.tensordot(x, x, axes=axes))
def test_dot_method():
x = np.arange(400).reshape((20, 20))
a = from_array(x, chunks=(5, 5))
y = np.arange(200).reshape((20, 10))
b = from_array(y, chunks=(5, 5))
assert_eq(a.dot(b), x.dot(y))
def test_T():
x = np.arange(400).reshape((20, 20))
a = from_array(x, chunks=(5, 5))
assert_eq(x.T, a.T)
def test_norm():
a = np.arange(200, dtype='f8').reshape((20, 10))
b = from_array(a, chunks=(5, 5))
assert_eq(b.vnorm(), np.linalg.norm(a))
assert_eq(b.vnorm(ord=1), np.linalg.norm(a.flatten(), ord=1))
assert_eq(b.vnorm(ord=4, axis=0), np.linalg.norm(a, ord=4, axis=0))
assert b.vnorm(ord=4, axis=0, keepdims=True).ndim == b.ndim
split_every = {0: 3, 1: 3}
assert_eq(b.vnorm(ord=1, axis=0, split_every=split_every),
np.linalg.norm(a, ord=1, axis=0))
assert_eq(b.vnorm(ord=np.inf, axis=0, split_every=split_every),
np.linalg.norm(a, ord=np.inf, axis=0))
assert_eq(b.vnorm(ord=np.inf, split_every=split_every),
np.linalg.norm(a.flatten(), ord=np.inf))
def test_choose():
x = np.random.randint(10, size=(15, 16))
d = from_array(x, chunks=(4, 5))
assert_eq(choose(d > 5, [0, d]), np.choose(x > 5, [0, x]))
assert_eq(choose(d > 5, [-d, d]), np.choose(x > 5, [-x, x]))
def test_where():
x = np.random.randint(10, size=(15, 16))
d = from_array(x, chunks=(4, 5))
y = np.random.randint(10, size=15)
e = from_array(y, chunks=(4,))
assert_eq(where(d > 5, d, 0), np.where(x > 5, x, 0))
assert_eq(where(d > 5, d, -e[:, None]), np.where(x > 5, x, -y[:, None]))
def test_where_has_informative_error():
x = da.ones(5, chunks=3)
try:
da.where(x > 0)
except Exception as e:
assert 'dask' in str(e)
def test_coarsen():
x = np.random.randint(10, size=(24, 24))
d = from_array(x, chunks=(4, 8))
assert_eq(chunk.coarsen(np.sum, x, {0: 2, 1: 4}),
coarsen(np.sum, d, {0: 2, 1: 4}))
assert_eq(chunk.coarsen(np.sum, x, {0: 2, 1: 4}),
coarsen(da.sum, d, {0: 2, 1: 4}))
def test_coarsen_with_excess():
x = da.arange(10, chunks=5)
assert_eq(coarsen(np.min, x, {0: 3}, trim_excess=True),
np.array([0, 5]))
assert_eq(coarsen(np.sum, x, {0: 3}, trim_excess=True),
np.array([0 + 1 + 2, 5 + 6 + 7]))
def test_insert():
x = np.random.randint(10, size=(10, 10))
a = from_array(x, chunks=(5, 5))
y = np.random.randint(10, size=(5, 10))
b = from_array(y, chunks=(4, 4))
assert_eq(np.insert(x, 0, -1, axis=0), insert(a, 0, -1, axis=0))
assert_eq(np.insert(x, 3, -1, axis=-1), insert(a, 3, -1, axis=-1))
assert_eq(np.insert(x, 5, -1, axis=1), insert(a, 5, -1, axis=1))
assert_eq(np.insert(x, -1, -1, axis=-2), insert(a, -1, -1, axis=-2))
assert_eq(np.insert(x, [2, 3, 3], -1, axis=1),
insert(a, [2, 3, 3], -1, axis=1))
assert_eq(np.insert(x, [2, 3, 8, 8, -2, -2], -1, axis=0),
insert(a, [2, 3, 8, 8, -2, -2], -1, axis=0))
assert_eq(np.insert(x, slice(1, 4), -1, axis=1),
insert(a, slice(1, 4), -1, axis=1))
assert_eq(np.insert(x, [2] * 3 + [5] * 2, y, axis=0),
insert(a, [2] * 3 + [5] * 2, b, axis=0))
assert_eq(np.insert(x, 0, y[0], axis=1),
insert(a, 0, b[0], axis=1))
pytest.raises(NotImplementedError, lambda: insert(a, [4, 2], -1, axis=0))
pytest.raises(IndexError, lambda: insert(a, [3], -1, axis=2))
pytest.raises(IndexError, lambda: insert(a, [3], -1, axis=-3))
assert same_keys(insert(a, [2, 3, 8, 8, -2, -2], -1, axis=0),
insert(a, [2, 3, 8, 8, -2, -2], -1, axis=0))
def test_multi_insert():
z = np.random.randint(10, size=(1, 2))
c = from_array(z, chunks=(1, 2))
assert_eq(np.insert(np.insert(z, [0, 1], -1, axis=0), [1], -1, axis=1),
insert(insert(c, [0, 1], -1, axis=0), [1], -1, axis=1))
def test_broadcast_to():
x = np.random.randint(10, size=(5, 1, 6))
a = from_array(x, chunks=(3, 1, 3))
for shape in [(5, 4, 6), (2, 5, 1, 6), (3, 4, 5, 4, 6)]:
assert_eq(chunk.broadcast_to(x, shape),
broadcast_to(a, shape))
pytest.raises(ValueError, lambda: broadcast_to(a, (2, 1, 6)))
pytest.raises(ValueError, lambda: broadcast_to(a, (3,)))
def test_ravel():
x = np.random.randint(10, size=(4, 6))
# 2d
for chunks in [(4, 6), (2, 6)]:
a = from_array(x, chunks=chunks)
assert_eq(x.ravel(), a.ravel())
assert len(a.ravel().dask) == len(a.dask) + len(a.chunks[0])
# 0d
assert_eq(x[0, 0].ravel(), a[0, 0].ravel())
# 1d
a_flat = a.ravel()
assert_eq(a_flat.ravel(), a_flat)
# 3d
x = np.random.randint(10, size=(2, 3, 4))
for chunks in [4, (1, 3, 4)]:
a = from_array(x, chunks=chunks)
assert_eq(x.ravel(), a.ravel())
assert_eq(x.flatten(), a.flatten())
assert_eq(np.ravel(x), da.ravel(a))
def _maybe_len(l):
try:
return len(l)
except TypeError:
return 0
@pytest.mark.parametrize('chunks', [(4, 6), (2, 6)])
@pytest.mark.parametrize('shift', [3, 7, 9, (3, 9), (7, 2)])
@pytest.mark.parametrize('axis', [None, 0, 1, -1, (0, 1), (1, 0)])
def test_roll(chunks, shift, axis):
x = np.random.randint(10, size=(4, 6))
a = from_array(x, chunks=chunks)
if _maybe_len(shift) != _maybe_len(axis):
with pytest.raises(TypeError if axis is None else ValueError):
da.roll(a, shift, axis)
else:
if (_maybe_len(shift) > 1 and
LooseVersion(np.__version__) < LooseVersion("1.12.0")):
pytest.skip(
"NumPy %s doesn't support multiple axes with `roll`."
" Need NumPy 1.12.0 or greater." % np.__version__
)
assert_eq(np.roll(x, shift, axis), da.roll(a, shift, axis))
@pytest.mark.parametrize('original_shape,new_shape,chunks', [
((10,), (10,), (3, 3, 4)),
((10,), (10, 1, 1), 5),
((10,), (1, 10,), 5),
((24,), (2, 3, 4), 12),
((1, 24,), (2, 3, 4), 12),
((2, 3, 4), (24,), (1, 3, 4)),
((2, 3, 4), (24,), 4),
((2, 3, 4), (24, 1), 4),
((2, 3, 4), (1, 24), 4),
((4, 4, 1), (4, 4), 2),
((4, 4), (4, 4, 1), 2),
((1, 4, 4), (4, 4), 2),
((1, 4, 4), (4, 4, 1), 2),
((1, 4, 4), (1, 1, 4, 4), 2),
((4, 4), (1, 4, 4, 1), 2),
((4, 4), (1, 4, 4), 2),
((2, 3), (2, 3), (1, 2)),
((2, 3), (3, 2), 3),
((4, 2, 3), (4, 6), 4),
((3, 4, 5, 6), (3, 4, 5, 6), (2, 3, 4, 5)),
((), (1,), 1),
((1,), (), 1),
((24,), (3, 8), 24),
((24,), (4, 6), 6),
((24,), (4, 3, 2), 6),
((24,), (4, 6, 1), 6),
((24,), (4, 6), (6, 12, 6)),
((64, 4), (8, 8, 4), (16, 2)),
((4, 64), (4, 8, 4, 2), (2, 16)),
((4, 8, 4, 2), (2, 1, 2, 32, 2), (2, 4, 2, 2)),
((4, 1, 4), (4, 4), (2, 1, 2)),
((0, 10), (0, 5, 2), (5, 5)),
((5, 0, 2), (0, 10), (5, 2, 2)),
((0,), (2, 0, 2), (4,)),
((2, 0, 2), (0,), (4, 4, 4)),
])
def test_reshape(original_shape, new_shape, chunks):
x = np.random.randint(10, size=original_shape)
a = from_array(x, chunks=chunks)
xr = x.reshape(new_shape)
ar = a.reshape(new_shape)
if a.shape == new_shape:
assert a is ar
assert_eq(xr, ar)
def test_reshape_exceptions():
x = np.random.randint(10, size=(5,))
a = from_array(x, chunks=(2,))
with pytest.raises(ValueError):
da.reshape(a, (100,))
def test_reshape_splat():
x = da.ones((5, 5), chunks=(2, 2))
assert_eq(x.reshape((25,)), x.reshape(25))
def test_reshape_fails_for_dask_only():
cases = [
((3, 4), (4, 3), 2),
]
for original_shape, new_shape, chunks in cases:
x = np.random.randint(10, size=original_shape)
a = from_array(x, chunks=chunks)
assert x.reshape(new_shape).shape == new_shape
with pytest.raises(ValueError):
da.reshape(a, new_shape)
def test_reshape_unknown_dimensions():
for original_shape in [(24,), (2, 12), (2, 3, 4)]:
for new_shape in [(-1,), (2, -1), (-1, 3, 4)]:
x = np.random.randint(10, size=original_shape)
a = from_array(x, 24)
assert_eq(x.reshape(new_shape), a.reshape(new_shape))
pytest.raises(ValueError, lambda: da.reshape(a, (-1, -1)))
def test_full():
d = da.full((3, 4), 2, chunks=((2, 1), (2, 2)))
assert d.chunks == ((2, 1), (2, 2))
assert_eq(d, np.full((3, 4), 2))
def test_map_blocks():
x = np.arange(400).reshape((20, 20))
d = from_array(x, chunks=(7, 7))
e = d.map_blocks(inc, dtype=d.dtype)
assert d.chunks == e.chunks
assert_eq(e, x + 1)
e = d.map_blocks(inc, name='increment')
assert e.name == 'increment'
d = from_array(x, chunks=(10, 10))
e = d.map_blocks(lambda x: x[::2, ::2], chunks=(5, 5), dtype=d.dtype)
assert e.chunks == ((5, 5), (5, 5))
assert_eq(e, x[::2, ::2])
d = from_array(x, chunks=(8, 8))
e = d.map_blocks(lambda x: x[::2, ::2], chunks=((4, 4, 2), (4, 4, 2)),
dtype=d.dtype)
assert_eq(e, x[::2, ::2])
def test_map_blocks2():
x = np.arange(10, dtype='i8')
d = from_array(x, chunks=(2,))
def func(block, block_id=None, c=0):
return np.ones_like(block) * sum(block_id) + c
out = d.map_blocks(func, dtype='i8')
expected = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4], dtype='i8')
assert_eq(out, expected)
assert same_keys(d.map_blocks(func, dtype='i8'), out)
out = d.map_blocks(func, dtype='i8', c=1)
expected = expected + 1
assert_eq(out, expected)
assert same_keys(d.map_blocks(func, dtype='i8', c=1), out)
def test_map_blocks_with_constants():
d = da.arange(10, chunks=3)
e = d.map_blocks(add, 100, dtype=d.dtype)
assert_eq(e, np.arange(10) + 100)
assert_eq(da.map_blocks(sub, d, 10, dtype=d.dtype),
np.arange(10) - 10)
assert_eq(da.map_blocks(sub, 10, d, dtype=d.dtype),
10 - np.arange(10))
def test_map_blocks_with_kwargs():
d = da.arange(10, chunks=5)
result = d.map_blocks(np.max, axis=0, keepdims=True, dtype=d.dtype,
chunks=(1,))
assert_eq(result, np.array([4, 9]))
def test_map_blocks_with_chunks():
dx = da.ones((5, 3), chunks=(2, 2))
dy = da.ones((5, 3), chunks=(2, 2))
dz = da.map_blocks(np.add, dx, dy, chunks=dx.chunks)
assert_eq(dz, np.ones((5, 3)) * 2)
def test_map_blocks_dtype_inference():
x = np.arange(50).reshape((5, 10))
y = np.arange(10)
dx = da.from_array(x, chunks=5)
dy = da.from_array(y, chunks=5)
def foo(x, *args, **kwargs):
cast = kwargs.pop('cast', 'i8')
return (x + sum(args)).astype(cast)
assert_eq(dx.map_blocks(foo, dy, 1), foo(dx, dy, 1))
assert_eq(dx.map_blocks(foo, dy, 1, cast='f8'), foo(dx, dy, 1, cast='f8'))
assert_eq(dx.map_blocks(foo, dy, 1, cast='f8', dtype='f8'),
foo(dx, dy, 1, cast='f8', dtype='f8'))
def foo(x):
raise RuntimeError("Woops")
try:
dx.map_blocks(foo)
except Exception as e:
assert e.args[0].startswith("`dtype` inference failed")
assert "Please specify the dtype explicitly" in e.args[0]
assert 'RuntimeError' in e.args[0]
else:
assert False, "Should have errored"
def test_fromfunction():
def f(x, y):
return x + y
d = fromfunction(f, shape=(5, 5), chunks=(2, 2), dtype='f8')
assert_eq(d, np.fromfunction(f, shape=(5, 5)))
assert same_keys(d, fromfunction(f, shape=(5, 5), chunks=(2, 2), dtype='f8'))
def test_from_function_requires_block_args():
x = np.arange(10)
pytest.raises(Exception, lambda: from_array(x))
def test_repr():
d = da.ones((4, 4), chunks=(2, 2))
assert d.name[:5] in repr(d)
assert str(d.shape) in repr(d)
assert str(d.dtype) in repr(d)
d = da.ones((4000, 4), chunks=(4, 2))
assert len(str(d)) < 1000
# Empty array
d = da.Array({}, 'd', ((), (3, 4)), dtype='i8')
assert str(d.shape) in repr(d)
assert str(d.dtype) in repr(d)
def test_slicing_with_ellipsis():
x = np.arange(256).reshape((4, 4, 4, 4))
d = da.from_array(x, chunks=((2, 2, 2, 2)))
assert_eq(d[..., 1], x[..., 1])
assert_eq(d[0, ..., 1], x[0, ..., 1])
def test_slicing_with_ndarray():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=((4, 4)))
assert_eq(d[np.arange(8)], x)
assert_eq(d[np.ones(8, dtype=bool)], x)
def test_dtype():
d = da.ones((4, 4), chunks=(2, 2))
assert d.dtype == d.compute().dtype
assert (d * 1.0).dtype == (d + 1.0).compute().dtype
assert d.sum().dtype == d.sum().compute().dtype # no shape
def test_blockdims_from_blockshape():
assert blockdims_from_blockshape((10, 10), (4, 3)) == ((4, 4, 2), (3, 3, 3, 1))
pytest.raises(TypeError, lambda: blockdims_from_blockshape((10,), None))
assert blockdims_from_blockshape((1e2, 3), [1e1, 3]) == ((10, ) * 10, (3, ))
assert blockdims_from_blockshape((np.int8(10), ), (5, )) == ((5, 5), )
def test_coerce():
d = da.from_array(np.array([1]), chunks=(1,))
with dask.set_options(get=dask.get):
assert bool(d)
assert int(d)
assert float(d)
assert complex(d)
def test_store_delayed_target():
from dask.delayed import delayed
d = da.ones((4, 4), chunks=(2, 2))
a, b = d + 1, d + 2
# empty buffers to be used as targets
targs = {}
def make_target(key):
a = np.empty((4, 4))
targs[key] = a
return a
# delayed calls to these targets
atd = delayed(make_target)('at')
btd = delayed(make_target)('bt')
store([a, b], [atd, btd])
at = targs['at']
bt = targs['bt']
assert_eq(at, a)
assert_eq(bt, b)
pytest.raises(ValueError, lambda: store([a], [at, bt]))
pytest.raises(ValueError, lambda: store(at, at))
pytest.raises(ValueError, lambda: store([at, bt], [at, bt]))
def test_store():
d = da.ones((4, 4), chunks=(2, 2))
a, b = d + 1, d + 2
at = np.empty(shape=(4, 4))
bt = np.empty(shape=(4, 4))
store([a, b], [at, bt])
assert (at == 2).all()
assert (bt == 3).all()
pytest.raises(ValueError, lambda: store([a], [at, bt]))
pytest.raises(ValueError, lambda: store(at, at))
pytest.raises(ValueError, lambda: store([at, bt], [at, bt]))
def test_store_regions():
d = da.ones((4, 4, 4), chunks=(2, 2, 2))
a, b = d + 1, d + 2
at = np.zeros(shape=(8, 4, 6))
bt = np.zeros(shape=(8, 4, 6))
region = (slice(None,None,2), slice(None), [1, 2, 4, 5])
# Single region:
v = store([a, b], [at, bt], regions=region, compute=False)
assert (at == 0).all() and (bt[region] == 0).all()
v.compute()
assert (at[region] == 2).all() and (bt[region] == 3).all()
assert not (bt == 3).all() and not ( bt == 0 ).all()
assert not (at == 3).all() and not ( at == 0 ).all()
# Multiple regions:
at = np.zeros(shape=(8, 4, 6))
bt = np.zeros(shape=(8, 4, 6))
v = store([a, b], [at, bt], regions=[region, region], compute=False)
assert (at == 0).all() and (bt[region] == 0).all()
v.compute()
assert (at[region] == 2).all() and (bt[region] == 3).all()
assert not (bt == 3).all() and not ( bt == 0 ).all()
assert not (at == 3).all() and not ( at == 0 ).all()
def test_store_compute_false():
d = da.ones((4, 4), chunks=(2, 2))
a, b = d + 1, d + 2
at = np.zeros(shape=(4, 4))
bt = np.zeros(shape=(4, 4))
v = store([a, b], [at, bt], compute=False)
assert (at == 0).all() and (bt == 0).all()
v.compute()
assert (at == 2).all() and (bt == 3).all()
class ThreadSafetyError(Exception):
pass
class NonthreadSafeStore(object):
def __init__(self):
self.in_use = False
def __setitem__(self, key, value):
if self.in_use:
raise ThreadSafetyError()
self.in_use = True
time.sleep(0.001)
self.in_use = False
class ThreadSafeStore(object):
def __init__(self):
self.concurrent_uses = 0
self.max_concurrent_uses = 0
def __setitem__(self, key, value):
self.concurrent_uses += 1
self.max_concurrent_uses = max(self.concurrent_uses, self.max_concurrent_uses)
time.sleep(0.01)
self.concurrent_uses -= 1
def test_store_locks():
_Lock = type(Lock())
d = da.ones((10, 10), chunks=(2, 2))
a, b = d + 1, d + 2
at = np.zeros(shape=(10, 10))
bt = np.zeros(shape=(10, 10))
lock = Lock()
v = store([a, b], [at, bt], compute=False, lock=lock)
dsk = v.dask
locks = set(vv for v in dsk.values() for vv in v if isinstance(vv, _Lock))
assert locks == set([lock])
# Ensure same lock applies over multiple stores
at = NonthreadSafeStore()
v = store([a, b], [at, at], lock=lock,
get=dask.threaded.get, num_workers=10)
# Don't assume thread safety by default
at = NonthreadSafeStore()
store(a, at, get=dask.threaded.get, num_workers=10)
a.store(at, get=dask.threaded.get, num_workers=10)
# Ensure locks can be removed
at = ThreadSafeStore()
for i in range(10):
a.store(at, lock=False, get=dask.threaded.get, num_workers=10)
if at.max_concurrent_uses > 1:
break
if i == 9:
assert False
@pytest.mark.xfail(reason="can't lock with multiprocessing")
def test_store_multiprocessing_lock():
d = da.ones((10, 10), chunks=(2, 2))
a = d + 1
at = np.zeros(shape=(10, 10))
a.store(at, get=dask.multiprocessing.get, num_workers=10)
def test_to_hdf5():
h5py = pytest.importorskip('h5py')
x = da.ones((4, 4), chunks=(2, 2))
y = da.ones(4, chunks=2, dtype='i4')
with tmpfile('.hdf5') as fn:
x.to_hdf5(fn, '/x')
with h5py.File(fn) as f:
d = f['/x']
assert_eq(d[:], x)
assert d.chunks == (2, 2)
with tmpfile('.hdf5') as fn:
x.to_hdf5(fn, '/x', chunks=None)
with h5py.File(fn) as f:
d = f['/x']
assert_eq(d[:], x)
assert d.chunks is None
with tmpfile('.hdf5') as fn:
x.to_hdf5(fn, '/x', chunks=(1, 1))
with h5py.File(fn) as f:
d = f['/x']
assert_eq(d[:], x)
assert d.chunks == (1, 1)
with tmpfile('.hdf5') as fn:
da.to_hdf5(fn, {'/x': x, '/y': y})
with h5py.File(fn) as f:
assert_eq(f['/x'][:], x)
assert f['/x'].chunks == (2, 2)
assert_eq(f['/y'][:], y)
assert f['/y'].chunks == (2,)
def test_to_dask_dataframe():
dd = pytest.importorskip('dask.dataframe')
a = da.ones((4,), chunks=(2,))
d = a.to_dask_dataframe()
assert isinstance(d, dd.Series)
a = da.ones((4, 4), chunks=(2, 2))
d = a.to_dask_dataframe()
assert isinstance(d, dd.DataFrame)
def test_np_array_with_zero_dimensions():
d = da.ones((4, 4), chunks=(2, 2))
assert_eq(np.array(d.sum()), np.array(d.compute().sum()))
def test_unique():
x = np.array([1, 2, 4, 4, 5, 2])
d = da.from_array(x, chunks=(3,))
assert_eq(da.unique(d), np.unique(x))
def test_dtype_complex():
x = np.arange(24).reshape((4, 6)).astype('f4')
y = np.arange(24).reshape((4, 6)).astype('i8')
z = np.arange(24).reshape((4, 6)).astype('i2')
a = da.from_array(x, chunks=(2, 3))
b = da.from_array(y, chunks=(2, 3))
c = da.from_array(z, chunks=(2, 3))
def assert_eq(a, b):
return (isinstance(a, np.dtype) and
isinstance(b, np.dtype) and
str(a) == str(b))
assert_eq(a.dtype, x.dtype)
assert_eq(b.dtype, y.dtype)
assert_eq((a + 1).dtype, (x + 1).dtype)
assert_eq((a + b).dtype, (x + y).dtype)
assert_eq(a.T.dtype, x.T.dtype)
assert_eq(a[:3].dtype, x[:3].dtype)
assert_eq((a.dot(b.T)).dtype, (x.dot(y.T)).dtype)
assert_eq(stack([a, b]).dtype, np.vstack([x, y]).dtype)
assert_eq(concatenate([a, b]).dtype, np.concatenate([x, y]).dtype)
assert_eq(b.std().dtype, y.std().dtype)
assert_eq(c.sum().dtype, z.sum().dtype)
assert_eq(a.min().dtype, a.min().dtype)
assert_eq(b.std().dtype, b.std().dtype)
assert_eq(a.argmin(axis=0).dtype, a.argmin(axis=0).dtype)
assert_eq(da.sin(c).dtype, np.sin(z).dtype)
assert_eq(da.exp(b).dtype, np.exp(y).dtype)
assert_eq(da.floor(a).dtype, np.floor(x).dtype)
assert_eq(da.isnan(b).dtype, np.isnan(y).dtype)
with ignoring(ImportError):
assert da.isnull(b).dtype == 'bool'
assert da.notnull(b).dtype == 'bool'
x = np.array([('a', 1)], dtype=[('text', 'S1'), ('numbers', 'i4')])
d = da.from_array(x, chunks=(1,))
assert_eq(d['text'].dtype, x['text'].dtype)
assert_eq(d[['numbers', 'text']].dtype, x[['numbers', 'text']].dtype)
def test_astype():
x = np.ones((5, 5), dtype='f8')
d = da.from_array(x, chunks=(2,2))
assert d.astype('i8').dtype == 'i8'
assert_eq(d.astype('i8'), x.astype('i8'))
assert same_keys(d.astype('i8'), d.astype('i8'))
with pytest.raises(TypeError):
d.astype('i8', casting='safe')
with pytest.raises(TypeError):
d.astype('i8', not_a_real_kwarg='foo')
# smoketest with kwargs
assert_eq(d.astype('i8', copy=False), x.astype('i8', copy=False))
# Check it's a noop
assert d.astype('f8') is d
def test_arithmetic():
x = np.arange(5).astype('f4') + 2
y = np.arange(5).astype('i8') + 2
z = np.arange(5).astype('i4') + 2
a = da.from_array(x, chunks=(2,))
b = da.from_array(y, chunks=(2,))
c = da.from_array(z, chunks=(2,))
assert_eq(a + b, x + y)
assert_eq(a * b, x * y)
assert_eq(a - b, x - y)
assert_eq(a / b, x / y)
assert_eq(b & b, y & y)
assert_eq(b | b, y | y)
assert_eq(b ^ b, y ^ y)
assert_eq(a // b, x // y)
assert_eq(a ** b, x ** y)
assert_eq(a % b, x % y)
assert_eq(a > b, x > y)
assert_eq(a < b, x < y)
assert_eq(a >= b, x >= y)
assert_eq(a <= b, x <= y)
assert_eq(a == b, x == y)
assert_eq(a != b, x != y)
assert_eq(a + 2, x + 2)
assert_eq(a * 2, x * 2)
assert_eq(a - 2, x - 2)
assert_eq(a / 2, x / 2)
assert_eq(b & True, y & True)
assert_eq(b | True, y | True)
assert_eq(b ^ True, y ^ True)
assert_eq(a // 2, x // 2)
assert_eq(a ** 2, x ** 2)
assert_eq(a % 2, x % 2)
assert_eq(a > 2, x > 2)
assert_eq(a < 2, x < 2)
assert_eq(a >= 2, x >= 2)
assert_eq(a <= 2, x <= 2)
assert_eq(a == 2, x == 2)
assert_eq(a != 2, x != 2)
assert_eq(2 + b, 2 + y)
assert_eq(2 * b, 2 * y)
assert_eq(2 - b, 2 - y)
assert_eq(2 / b, 2 / y)
assert_eq(True & b, True & y)
assert_eq(True | b, True | y)
assert_eq(True ^ b, True ^ y)
assert_eq(2 // b, 2 // y)
assert_eq(2 ** b, 2 ** y)
assert_eq(2 % b, 2 % y)
assert_eq(2 > b, 2 > y)
assert_eq(2 < b, 2 < y)
assert_eq(2 >= b, 2 >= y)
assert_eq(2 <= b, 2 <= y)
assert_eq(2 == b, 2 == y)
assert_eq(2 != b, 2 != y)
assert_eq(-a, -x)
assert_eq(abs(a), abs(x))
assert_eq(~(a == b), ~(x == y))
assert_eq(~(a == b), ~(x == y))
assert_eq(da.logaddexp(a, b), np.logaddexp(x, y))
assert_eq(da.logaddexp2(a, b), np.logaddexp2(x, y))
assert_eq(da.exp(b), np.exp(y))
assert_eq(da.log(a), np.log(x))
assert_eq(da.log10(a), np.log10(x))
assert_eq(da.log1p(a), np.log1p(x))
assert_eq(da.expm1(b), np.expm1(y))
assert_eq(da.sqrt(a), np.sqrt(x))
assert_eq(da.square(a), np.square(x))
assert_eq(da.sin(a), np.sin(x))
assert_eq(da.cos(b), np.cos(y))
assert_eq(da.tan(a), np.tan(x))
assert_eq(da.arcsin(b / 10), np.arcsin(y / 10))
assert_eq(da.arccos(b / 10), np.arccos(y / 10))
assert_eq(da.arctan(b / 10), np.arctan(y / 10))
assert_eq(da.arctan2(b * 10, a), np.arctan2(y * 10, x))
assert_eq(da.hypot(b, a), np.hypot(y, x))
assert_eq(da.sinh(a), np.sinh(x))
assert_eq(da.cosh(b), np.cosh(y))
assert_eq(da.tanh(a), np.tanh(x))
assert_eq(da.arcsinh(b * 10), np.arcsinh(y * 10))
assert_eq(da.arccosh(b * 10), np.arccosh(y * 10))
assert_eq(da.arctanh(b / 10), np.arctanh(y / 10))
assert_eq(da.deg2rad(a), np.deg2rad(x))
assert_eq(da.rad2deg(a), np.rad2deg(x))
assert_eq(da.logical_and(a < 1, b < 4), np.logical_and(x < 1, y < 4))
assert_eq(da.logical_or(a < 1, b < 4), np.logical_or(x < 1, y < 4))
assert_eq(da.logical_xor(a < 1, b < 4), np.logical_xor(x < 1, y < 4))
assert_eq(da.logical_not(a < 1), np.logical_not(x < 1))
assert_eq(da.maximum(a, 5 - a), np.maximum(a, 5 - a))
assert_eq(da.minimum(a, 5 - a), np.minimum(a, 5 - a))
assert_eq(da.fmax(a, 5 - a), np.fmax(a, 5 - a))
assert_eq(da.fmin(a, 5 - a), np.fmin(a, 5 - a))
assert_eq(da.isreal(a + 1j * b), np.isreal(x + 1j * y))
assert_eq(da.iscomplex(a + 1j * b), np.iscomplex(x + 1j * y))
assert_eq(da.isfinite(a), np.isfinite(x))
assert_eq(da.isinf(a), np.isinf(x))
assert_eq(da.isnan(a), np.isnan(x))
assert_eq(da.signbit(a - 3), np.signbit(x - 3))
assert_eq(da.copysign(a - 3, b), np.copysign(x - 3, y))
assert_eq(da.nextafter(a - 3, b), np.nextafter(x - 3, y))
assert_eq(da.ldexp(c, c), np.ldexp(z, z))
assert_eq(da.fmod(a * 12, b), np.fmod(x * 12, y))
assert_eq(da.floor(a * 0.5), np.floor(x * 0.5))
assert_eq(da.ceil(a), np.ceil(x))
assert_eq(da.trunc(a / 2), np.trunc(x / 2))
assert_eq(da.degrees(b), np.degrees(y))
assert_eq(da.radians(a), np.radians(x))
assert_eq(da.rint(a + 0.3), np.rint(x + 0.3))
assert_eq(da.fix(a - 2.5), np.fix(x - 2.5))
assert_eq(da.angle(a + 1j), np.angle(x + 1j))
assert_eq(da.real(a + 1j), np.real(x + 1j))
assert_eq((a + 1j).real, np.real(x + 1j))
assert_eq(da.imag(a + 1j), np.imag(x + 1j))
assert_eq((a + 1j).imag, np.imag(x + 1j))
assert_eq(da.conj(a + 1j * b), np.conj(x + 1j * y))
assert_eq((a + 1j * b).conj(), (x + 1j * y).conj())
assert_eq(da.clip(b, 1, 4), np.clip(y, 1, 4))
assert_eq(b.clip(1, 4), y.clip(1, 4))
assert_eq(da.fabs(b), np.fabs(y))
assert_eq(da.sign(b - 2), np.sign(y - 2))
assert_eq(da.absolute(b - 2), np.absolute(y - 2))
assert_eq(da.absolute(b - 2 + 1j), np.absolute(y - 2 + 1j))
l1, l2 = da.frexp(a)
r1, r2 = np.frexp(x)
assert_eq(l1, r1)
assert_eq(l2, r2)
l1, l2 = da.modf(a)
r1, r2 = np.modf(x)
assert_eq(l1, r1)
assert_eq(l2, r2)
assert_eq(da.around(a, -1), np.around(x, -1))
def test_elemwise_consistent_names():
a = da.from_array(np.arange(5, dtype='f4'), chunks=(2,))
b = da.from_array(np.arange(5, dtype='f4'), chunks=(2,))
assert same_keys(a + b, a + b)
assert same_keys(a + 2, a + 2)
assert same_keys(da.exp(a), da.exp(a))
assert same_keys(da.exp(a, dtype='f8'), da.exp(a, dtype='f8'))
assert same_keys(da.maximum(a, b), da.maximum(a, b))
def test_optimize():
x = np.arange(5).astype('f4')
a = da.from_array(x, chunks=(2,))
expr = a[1:4] + 1
result = optimize(expr.dask, expr._keys())
assert isinstance(result, dict)
assert all(key in result for key in expr._keys())
def test_slicing_with_non_ndarrays():
class ARangeSlice(object):
def __init__(self, start, stop):
self.start = start
self.stop = stop
def __array__(self):
return np.arange(self.start, self.stop)
class ARangeSlicable(object):
dtype = 'i8'
def __init__(self, n):
self.n = n
@property
def shape(self):
return (self.n,)
def __getitem__(self, key):
return ARangeSlice(key[0].start, key[0].stop)
x = da.from_array(ARangeSlicable(10), chunks=(4,))
assert_eq((x + 1).sum(), (np.arange(10, dtype=x.dtype) + 1).sum())
def test_getarray():
assert type(getarray(np.matrix([[1]]), 0)) == np.ndarray
assert_eq(getarray([1, 2, 3, 4, 5], slice(1, 4)), np.array([2, 3, 4]))
assert_eq(getarray(np.arange(5), (None, slice(None, None))),
np.arange(5)[None, :])
def test_squeeze():
x = da.ones((10, 1), chunks=(3, 1))
assert_eq(x.squeeze(), x.compute().squeeze())
assert x.squeeze().chunks == ((3, 3, 3, 1),)
assert same_keys(x.squeeze(), x.squeeze())
def test_size():
x = da.ones((10, 2), chunks=(3, 1))
assert x.size == np.array(x).size
assert isinstance(x.size, int)
def test_nbytes():
x = da.ones((10, 2), chunks=(3, 1))
assert x.nbytes == np.array(x).nbytes
def test_itemsize():
x = da.ones((10, 2), chunks=(3, 1))
assert x.itemsize == 8
def test_Array_normalizes_dtype():
x = da.ones((3,), chunks=(1,), dtype=int)
assert isinstance(x.dtype, np.dtype)
def test_from_array_with_lock():
x = np.arange(10)
d = da.from_array(x, chunks=5, lock=True)
tasks = [v for k, v in d.dask.items() if k[0] == d.name]
assert hasattr(tasks[0][3], 'acquire')
assert len(set(task[3] for task in tasks)) == 1
assert_eq(d, x)
lock = Lock()
e = da.from_array(x, chunks=5, lock=lock)
f = da.from_array(x, chunks=5, lock=lock)
assert_eq(e + f, x + x)
def test_from_array_slicing_results_in_ndarray():
x = np.matrix(np.arange(100).reshape((10, 10)))
dx = da.from_array(x, chunks=(5, 5))
s1 = dx[0:5]
assert type(dx[0:5].compute()) == np.ndarray
s2 = s1[0:3]
assert type(s2.compute()) == np.ndarray
s3 = s2[:, 0]
assert type(s3.compute()) == np.ndarray
def test_asarray():
assert_eq(da.asarray([1, 2, 3]), np.asarray([1, 2, 3]))
x = da.asarray([1, 2, 3])
assert da.asarray(x) is x
def test_asarray_h5py():
h5py = pytest.importorskip('h5py')
with tmpfile('.hdf5') as fn:
with h5py.File(fn) as f:
d = f.create_dataset('/x', shape=(2, 2), dtype=float)
x = da.asarray(d)
assert d in x.dask.values()
assert not any(isinstance(v, np.ndarray) for v in x.dask.values())
def test_from_func():
x = np.arange(10)
f = lambda n: n * x
d = from_func(f, (10,), x.dtype, kwargs={'n': 2})
assert d.shape == x.shape
assert d.dtype == x.dtype
assert_eq(d.compute(), 2 * x)
assert same_keys(d, from_func(f, (10,), x.dtype, kwargs={'n': 2}))
def test_topk():
x = np.array([5, 2, 1, 6])
d = da.from_array(x, chunks=2)
e = da.topk(2, d)
assert e.chunks == ((2,),)
assert_eq(e, np.sort(x)[-1:-3:-1])
assert same_keys(da.topk(2, d), e)
def test_topk_k_bigger_than_chunk():
x = np.array([5, 2, 1, 6])
d = da.from_array(x, chunks=2)
e = da.topk(3, d)
assert e.chunks == ((3,),)
assert_eq(e, np.array([6, 5, 2]))
def test_bincount():
x = np.array([2, 1, 5, 2, 1])
d = da.from_array(x, chunks=2)
e = da.bincount(d, minlength=6)
assert_eq(e, np.bincount(x, minlength=6))
assert same_keys(da.bincount(d, minlength=6), e)
def test_bincount_with_weights():
x = np.array([2, 1, 5, 2, 1])
d = da.from_array(x, chunks=2)
weights = np.array([1, 2, 1, 0.5, 1])
dweights = da.from_array(weights, chunks=2)
e = da.bincount(d, weights=dweights, minlength=6)
assert_eq(e, np.bincount(x, weights=dweights, minlength=6))
assert same_keys(da.bincount(d, weights=dweights, minlength=6), e)
def test_bincount_raises_informative_error_on_missing_minlength_kwarg():
x = np.array([2, 1, 5, 2, 1])
d = da.from_array(x, chunks=2)
try:
da.bincount(d)
except Exception as e:
assert 'minlength' in str(e)
else:
assert False
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.10.0',
reason="NumPy doesn't yet support nd digitize")
def test_digitize():
x = np.array([2, 4, 5, 6, 1])
bins = np.array([1, 2, 3, 4, 5])
for chunks in [2, 4]:
for right in [False, True]:
d = da.from_array(x, chunks=chunks)
assert_eq(da.digitize(d, bins, right=right),
np.digitize(x, bins, right=right))
x = np.random.random(size=(100, 100))
bins = np.random.random(size=13)
bins.sort()
for chunks in [(10, 10), (10, 20), (13, 17), (87, 54)]:
for right in [False, True]:
d = da.from_array(x, chunks=chunks)
assert_eq(da.digitize(d, bins, right=right),
np.digitize(x, bins, right=right))
def test_histogram():
# Test for normal, flattened input
n = 100
v = da.random.random(n, chunks=10)
bins = np.arange(0, 1.01, 0.01)
(a1, b1) = da.histogram(v, bins=bins)
(a2, b2) = np.histogram(v, bins=bins)
# Check if the sum of the bins equals the number of samples
assert a2.sum(axis=0) == n
assert a1.sum(axis=0) == n
assert_eq(a1, a2)
assert same_keys(da.histogram(v, bins=bins)[0], a1)
def test_histogram_alternative_bins_range():
v = da.random.random(100, chunks=10)
(a1, b1) = da.histogram(v, bins=10, range=(0, 1))
(a2, b2) = np.histogram(v, bins=10, range=(0, 1))
assert_eq(a1, a2)
assert_eq(b1, b2)
def test_histogram_return_type():
v = da.random.random(100, chunks=10)
bins = np.arange(0, 1.01, 0.01)
# Check if return type is same as hist
bins = np.arange(0, 11, 1, dtype='i4')
assert_eq(da.histogram(v * 10, bins=bins)[0],
np.histogram(v * 10, bins=bins)[0])
def test_histogram_extra_args_and_shapes():
# Check for extra args and shapes
bins = np.arange(0, 1.01, 0.01)
v = da.random.random(100, chunks=10)
data = [(v, bins, da.ones(100, chunks=v.chunks) * 5),
(da.random.random((50, 50), chunks=10), bins, da.ones((50, 50), chunks=10) * 5)]
for v, bins, w in data:
# density
assert_eq(da.histogram(v, bins=bins, normed=True)[0],
np.histogram(v, bins=bins, normed=True)[0])
# normed
assert_eq(da.histogram(v, bins=bins, density=True)[0],
np.histogram(v, bins=bins, density=True)[0])
# weights
assert_eq(da.histogram(v, bins=bins, weights=w)[0],
np.histogram(v, bins=bins, weights=w)[0])
assert_eq(da.histogram(v, bins=bins, weights=w, density=True)[0],
da.histogram(v, bins=bins, weights=w, density=True)[0])
def test_concatenate3_2():
x = np.array([1, 2])
assert_eq(concatenate3([x, x, x]),
np.array([1, 2, 1, 2, 1, 2]))
x = np.array([[1, 2]])
assert (concatenate3([[x, x, x], [x, x, x]]) ==
np.array([[1, 2, 1, 2, 1, 2],
[1, 2, 1, 2, 1, 2]])).all()
assert (concatenate3([[x, x], [x, x], [x, x]]) ==
np.array([[1, 2, 1, 2],
[1, 2, 1, 2],
[1, 2, 1, 2]])).all()
x = np.arange(12).reshape((2, 2, 3))
assert_eq(concatenate3([[[x, x, x], [x, x, x]],
[[x, x, x], [x, x, x]]]),
np.array([[[ 0, 1, 2, 0, 1, 2, 0, 1, 2],
[ 3, 4, 5, 3, 4, 5, 3, 4, 5],
[ 0, 1, 2, 0, 1, 2, 0, 1, 2],
[ 3, 4, 5, 3, 4, 5, 3, 4, 5]],
[[ 6, 7, 8, 6, 7, 8, 6, 7, 8],
[ 9, 10, 11, 9, 10, 11, 9, 10, 11],
[ 6, 7, 8, 6, 7, 8, 6, 7, 8],
[ 9, 10, 11, 9, 10, 11, 9, 10, 11]],
[[ 0, 1, 2, 0, 1, 2, 0, 1, 2],
[ 3, 4, 5, 3, 4, 5, 3, 4, 5],
[ 0, 1, 2, 0, 1, 2, 0, 1, 2],
[ 3, 4, 5, 3, 4, 5, 3, 4, 5]],
[[ 6, 7, 8, 6, 7, 8, 6, 7, 8],
[ 9, 10, 11, 9, 10, 11, 9, 10, 11],
[ 6, 7, 8, 6, 7, 8, 6, 7, 8],
[ 9, 10, 11, 9, 10, 11, 9, 10, 11]]]))
def test_map_blocks3():
x = np.arange(10)
y = np.arange(10) * 2
d = da.from_array(x, chunks=5)
e = da.from_array(y, chunks=5)
assert_eq(da.core.map_blocks(lambda a, b: a + 2 * b, d, e, dtype=d.dtype),
x + 2 * y)
z = np.arange(100).reshape((10, 10))
f = da.from_array(z, chunks=5)
func = lambda a, b: a + 2 * b
res = da.core.map_blocks(func, d, f, dtype=d.dtype)
assert_eq(res, x + 2 * z)
assert same_keys(da.core.map_blocks(func, d, f, dtype=d.dtype), res)
assert_eq(da.map_blocks(func, f, d, dtype=d.dtype), z + 2 * x)
def test_from_array_with_missing_chunks():
x = np.random.randn(2, 4, 3)
d = da.from_array(x, chunks=(None, 2, None))
assert d.chunks == da.from_array(x, chunks=(2, 2, 3)).chunks
def test_cache():
x = da.arange(15, chunks=5)
y = 2 * x + 1
z = y.cache()
assert len(z.dask) == 3 # very short graph
assert_eq(y, z)
cache = np.empty(15, dtype=y.dtype)
z = y.cache(store=cache)
assert len(z.dask) < 6 # very short graph
assert z.chunks == y.chunks
assert_eq(y, z)
def test_take_dask_from_numpy():
x = np.arange(5).astype('f8')
y = da.from_array(np.array([1, 2, 3, 3, 2 ,1]), chunks=3)
z = da.take(x * 2, y)
assert z.chunks == y.chunks
assert_eq(z, np.array([2., 4., 6., 6., 4., 2.]))
def test_normalize_chunks():
assert normalize_chunks(3, (4, 6)) == ((3, 1), (3, 3))
def test_raise_on_no_chunks():
x = da.ones(6, chunks=3)
try:
Array(x.dask, x.name, chunks=None, dtype=x.dtype, shape=None)
assert False
except ValueError as e:
assert "dask.pydata.org" in str(e)
pytest.raises(ValueError, lambda: da.ones(6))
def test_chunks_is_immutable():
x = da.ones(6, chunks=3)
try:
x.chunks = 2
assert False
except TypeError as e:
assert 'rechunk(2)' in str(e)
def test_raise_on_bad_kwargs():
x = da.ones(5, chunks=3)
try:
da.minimum(x, out=None)
except TypeError as e:
assert 'minimum' in str(e)
assert 'out' in str(e)
def test_long_slice():
x = np.arange(10000)
d = da.from_array(x, chunks=1)
assert_eq(d[8000:8200], x[8000:8200])
def test_h5py_newaxis():
h5py = pytest.importorskip('h5py')
with tmpfile('h5') as fn:
with h5py.File(fn) as f:
x = f.create_dataset('/x', shape=(10, 10), dtype='f8')
d = da.from_array(x, chunks=(5, 5))
assert d[None, :, :].compute(get=get_sync).shape == (1, 10, 10)
assert d[:, None, :].compute(get=get_sync).shape == (10, 1, 10)
assert d[:, :, None].compute(get=get_sync).shape == (10, 10, 1)
assert same_keys(d[:, :, None], d[:, :, None])
def test_ellipsis_slicing():
assert_eq(da.ones(4, chunks=2)[...], np.ones(4))
def test_point_slicing():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(3, 4))
result = d.vindex[[1, 2, 5, 5], [3, 1, 6, 1]]
assert_eq(result, x[[1, 2, 5, 5], [3, 1, 6, 1]])
result = d.vindex[[0, 1, 6, 0], [0, 1, 0, 7]]
assert_eq(result, x[[0, 1, 6, 0], [0, 1, 0, 7]])
assert same_keys(result, d.vindex[[0, 1, 6, 0], [0, 1, 0, 7]])
def test_point_slicing_with_full_slice():
from dask.array.core import _vindex_transpose, _get_axis
x = np.arange(4 * 5 * 6 * 7).reshape((4, 5, 6, 7))
d = da.from_array(x, chunks=(2, 3, 3, 4))
inds = [[[1, 2, 3], None, [3, 2, 1], [5, 3, 4]],
[[1, 2, 3], None, [4, 3, 2], None],
[[1, 2, 3], [3, 2, 1]],
[[1, 2, 3], [3, 2, 1], [3, 2, 1], [5, 3, 4]],
[[], [], [], None],
[np.array([1, 2, 3]), None, np.array([4, 3, 2]), None],
[None, None, [1, 2, 3], [4, 3, 2]],
[None, [0, 2, 3], None, [0, 3, 2]]]
for ind in inds:
slc = [i if isinstance(i, (np.ndarray, list)) else slice(None, None)
for i in ind]
result = d.vindex[tuple(slc)]
# Rotate the expected result accordingly
axis = _get_axis(ind)
expected = _vindex_transpose(x[tuple(slc)], axis)
assert_eq(result, expected)
# Always have the first axis be the length of the points
k = len(next(i for i in ind if isinstance(i, (np.ndarray, list))))
assert result.shape[0] == k
def test_slice_with_floats():
d = da.ones((5,), chunks=(3,))
with pytest.raises(IndexError):
d[1.5]
with pytest.raises(IndexError):
d[0:1.5]
with pytest.raises(IndexError):
d[[1, 1.5]]
def test_vindex_errors():
d = da.ones((5, 5, 5), chunks=(3, 3, 3))
pytest.raises(IndexError, lambda: d.vindex[0])
pytest.raises(IndexError, lambda: d.vindex[[1, 2, 3]])
pytest.raises(IndexError, lambda: d.vindex[[1, 2, 3], [1, 2, 3], 0])
pytest.raises(IndexError, lambda: d.vindex[[1], [1, 2, 3]])
pytest.raises(IndexError, lambda: d.vindex[[1, 2, 3], [[1], [2], [3]]])
def test_vindex_merge():
from dask.array.core import _vindex_merge
locations = [1], [2, 0]
values = [np.array([[1, 2, 3]]),
np.array([[10, 20, 30], [40, 50, 60]])]
assert (_vindex_merge(locations, values) == np.array([[40, 50, 60],
[1, 2, 3],
[10, 20, 30]])).all()
def test_empty_array():
assert_eq(np.arange(0), da.arange(0, chunks=5))
def test_array():
x = np.ones(5, dtype='i4')
d = da.ones(5, chunks=3, dtype='i4')
assert_eq(da.array(d, ndmin=3, dtype='i8'),
np.array(x, ndmin=3, dtype='i8'))
def test_cov():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(4, 4))
assert_eq(da.cov(d), np.cov(x))
assert_eq(da.cov(d, rowvar=0), np.cov(x, rowvar=0))
assert_eq(da.cov(d, ddof=10), np.cov(x, ddof=10))
assert_eq(da.cov(d, bias=1), np.cov(x, bias=1))
assert_eq(da.cov(d, d), np.cov(x, x))
y = np.arange(8)
e = da.from_array(y, chunks=(4,))
assert_eq(da.cov(d, e), np.cov(x, y))
assert_eq(da.cov(e, d), np.cov(y, x))
pytest.raises(ValueError, lambda: da.cov(d, ddof=1.5))
def test_corrcoef():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(4, 4))
assert_eq(da.corrcoef(d), np.corrcoef(x))
assert_eq(da.corrcoef(d, rowvar=0), np.corrcoef(x, rowvar=0))
assert_eq(da.corrcoef(d, d), np.corrcoef(x, x))
y = np.arange(8)
e = da.from_array(y, chunks=(4,))
assert_eq(da.corrcoef(d, e), np.corrcoef(x, y))
assert_eq(da.corrcoef(e, d), np.corrcoef(y, x))
def test_memmap():
with tmpfile('npy') as fn_1:
with tmpfile('npy') as fn_2:
try:
x = da.arange(100, chunks=15)
target = np.memmap(fn_1, shape=x.shape, mode='w+', dtype=x.dtype)
x.store(target)
assert_eq(target, x)
np.save(fn_2, target)
assert_eq(np.load(fn_2, mmap_mode='r'), x)
finally:
target._mmap.close()
def test_to_npy_stack():
x = np.arange(5 * 10 * 10).reshape((5, 10, 10))
d = da.from_array(x, chunks=(2, 4, 4))
with tmpdir() as dirname:
da.to_npy_stack(dirname, d, axis=0)
assert os.path.exists(os.path.join(dirname, '0.npy'))
assert (np.load(os.path.join(dirname, '1.npy')) == x[2:4]).all()
e = da.from_npy_stack(dirname)
assert_eq(d, e)
def test_view():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(2, 3))
assert_eq(x.view('i4'), d.view('i4'))
assert_eq(x.view('i2'), d.view('i2'))
assert all(isinstance(s, int) for s in d.shape)
x = np.arange(8, dtype='i1')
d = da.from_array(x, chunks=(4,))
assert_eq(x.view('i4'), d.view('i4'))
with pytest.raises(ValueError):
x = np.arange(8, dtype='i1')
d = da.from_array(x, chunks=(3,))
d.view('i4')
with pytest.raises(ValueError):
d.view('i4', order='asdf')
def test_view_fortran():
x = np.asfortranarray(np.arange(64).reshape((8, 8)))
d = da.from_array(x, chunks=(2, 3))
assert_eq(x.view('i4'), d.view('i4', order='F'))
assert_eq(x.view('i2'), d.view('i2', order='F'))
def test_h5py_tokenize():
h5py = pytest.importorskip('h5py')
with tmpfile('hdf5') as fn1:
with tmpfile('hdf5') as fn2:
f = h5py.File(fn1)
g = h5py.File(fn2)
f['x'] = np.arange(10).astype(float)
g['x'] = np.ones(10).astype(float)
x1 = f['x']
x2 = g['x']
assert tokenize(x1) != tokenize(x2)
def test_map_blocks_with_changed_dimension():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(7, 4))
e = d.map_blocks(lambda b: b.sum(axis=0), chunks=(4,), drop_axis=0,
dtype=d.dtype)
assert e.chunks == ((4, 4),)
assert_eq(e, x.sum(axis=0))
# Provided chunks have wrong shape
with pytest.raises(ValueError):
d.map_blocks(lambda b: b.sum(axis=0), chunks=(7, 4), drop_axis=0)
with pytest.raises(ValueError):
d.map_blocks(lambda b: b.sum(axis=0), chunks=((4, 4, 4),), drop_axis=0)
# Can't drop axis with more than 1 block
with pytest.raises(ValueError):
d.map_blocks(lambda b: b.sum(axis=1), drop_axis=1, dtype=d.dtype)
# Can't use both drop_axis and new_axis
with pytest.raises(ValueError):
d.map_blocks(lambda b: b, drop_axis=1, new_axis=1)
d = da.from_array(x, chunks=(4, 8))
e = d.map_blocks(lambda b: b.sum(axis=1), drop_axis=1, dtype=d.dtype)
assert e.chunks == ((4, 3),)
assert_eq(e, x.sum(axis=1))
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = d.map_blocks(lambda b: b[None, :, :, None],
chunks=(1, 4, 4, 1), new_axis=[0, 3], dtype=d.dtype)
assert e.chunks == ((1,), (4, 4), (4, 4), (1,))
assert_eq(e, x[None, :, :, None])
e = d.map_blocks(lambda b: b[None, :, :, None],
new_axis=[0, 3], dtype=d.dtype)
assert e.chunks == ((1,), (4, 4), (4, 4), (1,))
assert_eq(e, x[None, :, :, None])
def test_broadcast_chunks():
assert broadcast_chunks(((5, 5),), ((5, 5),)) == ((5, 5),)
a = ((10, 10, 10), (5, 5),)
b = ((5, 5),)
assert broadcast_chunks(a, b) == ((10, 10, 10), (5, 5),)
assert broadcast_chunks(b, a) == ((10, 10, 10), (5, 5),)
a = ((10, 10, 10), (5, 5),)
b = ((1,), (5, 5),)
assert broadcast_chunks(a, b) == ((10, 10, 10), (5, 5),)
a = ((10, 10, 10), (5, 5),)
b = ((3, 3,), (5, 5),)
with pytest.raises(ValueError):
broadcast_chunks(a, b)
a = ((1,), (5, 5),)
b = ((1,), (5, 5),)
assert broadcast_chunks(a, b) == a
def test_chunks_error():
x = np.ones((10, 10))
with pytest.raises(ValueError):
da.from_array(x, chunks=(5,))
def test_array_compute_forward_kwargs():
x = da.arange(10, chunks=2).sum()
x.compute(bogus_keyword=10)
def test_dont_fuse_outputs():
dsk = {('x', 0): np.array([1, 2]),
('x', 1): (inc, ('x', 0))}
a = da.Array(dsk, 'x', chunks=(2,), shape=(4,), dtype=np.array([1]).dtype)
assert_eq(a, np.array([1, 2, 2, 3], dtype=a.dtype))
def test_dont_dealias_outputs():
dsk = {('x', 0, 0): np.ones((2, 2)),
('x', 0, 1): np.ones((2, 2)),
('x', 1, 0): np.ones((2, 2)),
('x', 1, 1): ('x', 0, 0)}
a = da.Array(dsk, 'x', chunks=(2, 2), shape=(4, 4), dtype=np.ones(1).dtype)
assert_eq(a, np.ones((4, 4)))
def test_timedelta_op():
x = np.array([np.timedelta64(10, 'h')])
y = np.timedelta64(1, 'h')
a = da.from_array(x, chunks=(1,)) / y
assert a.compute() == x / y
def test_to_delayed():
x = da.random.random((4, 4), chunks=(2, 2))
y = x + 10
[[a, b], [c, d]] = y.to_delayed()
assert_eq(a.compute(), y[:2, :2])
s = 2
x = da.from_array(np.array(s), chunks=0)
a = x.to_delayed()[tuple()]
assert a.compute() == s
def test_cumulative():
x = da.arange(20, chunks=5)
assert_eq(x.cumsum(axis=0), np.arange(20).cumsum())
assert_eq(x.cumprod(axis=0), np.arange(20).cumprod())
assert_eq(da.nancumsum(x, axis=0), nancumsum(np.arange(20)))
assert_eq(da.nancumprod(x, axis=0), nancumprod(np.arange(20)))
a = np.random.random((20))
rs = np.random.RandomState(0)
a[rs.rand(*a.shape) < 0.5] = np.nan
x = da.from_array(a, chunks=5)
assert_eq(da.nancumsum(x, axis=0), nancumsum(a))
assert_eq(da.nancumprod(x, axis=0), nancumprod(a))
a = np.random.random((20, 24))
x = da.from_array(a, chunks=(6, 5))
assert_eq(x.cumsum(axis=0), a.cumsum(axis=0))
assert_eq(x.cumsum(axis=1), a.cumsum(axis=1))
assert_eq(x.cumprod(axis=0), a.cumprod(axis=0))
assert_eq(x.cumprod(axis=1), a.cumprod(axis=1))
assert_eq(da.nancumsum(x, axis=0), nancumsum(a, axis=0))
assert_eq(da.nancumsum(x, axis=1), nancumsum(a, axis=1))
assert_eq(da.nancumprod(x, axis=0), nancumprod(a, axis=0))
assert_eq(da.nancumprod(x, axis=1), nancumprod(a, axis=1))
a = np.random.random((20, 24))
rs = np.random.RandomState(0)
a[rs.rand(*a.shape) < 0.5] = np.nan
x = da.from_array(a, chunks=(6, 5))
assert_eq(da.nancumsum(x, axis=0), nancumsum(a, axis=0))
assert_eq(da.nancumsum(x, axis=1), nancumsum(a, axis=1))
assert_eq(da.nancumprod(x, axis=0), nancumprod(a, axis=0))
assert_eq(da.nancumprod(x, axis=1), nancumprod(a, axis=1))
a = np.random.random((20, 24, 13))
x = da.from_array(a, chunks=(6, 5, 4))
for axis in [0, 1, 2, -1, -2, -3]:
assert_eq(x.cumsum(axis=axis), a.cumsum(axis=axis))
assert_eq(x.cumprod(axis=axis), a.cumprod(axis=axis))
assert_eq(da.nancumsum(x, axis=axis), nancumsum(a, axis=axis))
assert_eq(da.nancumprod(x, axis=axis), nancumprod(a, axis=axis))
a = np.random.random((20, 24, 13))
rs = np.random.RandomState(0)
a[rs.rand(*a.shape) < 0.5] = np.nan
x = da.from_array(a, chunks=(6, 5, 4))
for axis in [0, 1, 2, -1, -2, -3]:
assert_eq(da.nancumsum(x, axis=axis), nancumsum(a, axis=axis))
assert_eq(da.nancumprod(x, axis=axis), nancumprod(a, axis=axis))
with pytest.raises(ValueError):
x.cumsum(axis=3)
with pytest.raises(ValueError):
x.cumsum(axis=-4)
def test_eye():
assert_eq(da.eye(9, chunks=3), np.eye(9))
assert_eq(da.eye(10, chunks=3), np.eye(10))
assert_eq(da.eye(9, chunks=3, M=11), np.eye(9, M=11))
assert_eq(da.eye(11, chunks=3, M=9), np.eye(11, M=9))
assert_eq(da.eye(7, chunks=3, M=11), np.eye(7, M=11))
assert_eq(da.eye(11, chunks=3, M=7), np.eye(11, M=7))
assert_eq(da.eye(9, chunks=3, k=2), np.eye(9, k=2))
assert_eq(da.eye(9, chunks=3, k=-2), np.eye(9, k=-2))
assert_eq(da.eye(7, chunks=3, M=11, k=5), np.eye(7, M=11, k=5))
assert_eq(da.eye(11, chunks=3, M=7, k=-6), np.eye(11, M=7, k=-6))
assert_eq(da.eye(6, chunks=3, M=9, k=7), np.eye(6, M=9, k=7))
assert_eq(da.eye(12, chunks=3, M=6, k=-3), np.eye(12, M=6, k=-3))
assert_eq(da.eye(9, chunks=3, dtype=int), np.eye(9, dtype=int))
assert_eq(da.eye(10, chunks=3, dtype=int), np.eye(10, dtype=int))
def test_diag():
v = np.arange(11)
assert_eq(da.diag(v), np.diag(v))
v = da.arange(11, chunks=3)
darr = da.diag(v)
nparr = np.diag(v)
assert_eq(darr, nparr)
assert sorted(da.diag(v).dask) == sorted(da.diag(v).dask)
v = v + v + 3
darr = da.diag(v)
nparr = np.diag(v)
assert_eq(darr, nparr)
v = da.arange(11, chunks=11)
darr = da.diag(v)
nparr = np.diag(v)
assert_eq(darr, nparr)
assert sorted(da.diag(v).dask) == sorted(da.diag(v).dask)
x = np.arange(64).reshape((8, 8))
assert_eq(da.diag(x), np.diag(x))
d = da.from_array(x, chunks=(4, 4))
assert_eq(da.diag(d), np.diag(x))
def test_tril_triu():
A = np.random.randn(20, 20)
for chk in [5, 4]:
dA = da.from_array(A, (chk, chk))
assert np.allclose(da.triu(dA).compute(), np.triu(A))
assert np.allclose(da.tril(dA).compute(), np.tril(A))
for k in [-25, -20, -19, -15, -14, -9, -8, -6, -5, -1,
1, 4, 5, 6, 8, 10, 11, 15, 16, 19, 20, 21]:
assert np.allclose(da.triu(dA, k).compute(), np.triu(A, k))
assert np.allclose(da.tril(dA, k).compute(), np.tril(A, k))
def test_tril_triu_errors():
A = np.random.random_integers(0, 10, (10, 10, 10))
dA = da.from_array(A, chunks=(5, 5, 5))
pytest.raises(ValueError, lambda: da.triu(dA))
A = np.random.random_integers(0, 10, (30, 35))
dA = da.from_array(A, chunks=(5, 5))
pytest.raises(NotImplementedError, lambda: da.triu(dA))
def test_atop_names():
x = da.ones(5, chunks=(2,))
y = atop(add, 'i', x, 'i', dtype=x.dtype)
assert y.name.startswith('add')
def test_atop_new_axes():
def f(x):
return x[:, None] * np.ones((1, 7))
x = da.ones(5, chunks=2)
y = atop(f, 'aq', x, 'a', new_axes={'q': 7}, concatenate=True,
dtype=x.dtype)
assert y.chunks == ((2, 2, 1), (7,))
assert_eq(y, np.ones((5, 7)))
def f(x):
return x[None, :] * np.ones((7, 1))
x = da.ones(5, chunks=2)
y = atop(f, 'qa', x, 'a', new_axes={'q': 7}, concatenate=True,
dtype=x.dtype)
assert y.chunks == ((7,), (2, 2, 1))
assert_eq(y, np.ones((7, 5)))
def f(x):
y = x.sum(axis=1)
return y[:, None] * np.ones((1, 5))
x = da.ones((4, 6), chunks=(2, 2))
y = atop(f, 'aq', x, 'ab', new_axes={'q': 5}, concatenate=True,
dtype=x.dtype)
assert y.chunks == ((2, 2), (5,))
assert_eq(y, np.ones((4, 5)) * 6)
def test_atop_kwargs():
def f(a, b=0):
return a + b
x = da.ones(5, chunks=(2,))
y = atop(f, 'i', x, 'i', b=10, dtype=x.dtype)
assert_eq(y, np.ones(5) + 10)
def test_atop_chunks():
x = da.ones((5, 5), chunks=((2, 1, 2), (3, 2)))
def double(a, axis=0):
return np.concatenate([a, a], axis=axis)
y = atop(double, 'ij', x, 'ij',
adjust_chunks={'i': lambda n: 2 * n}, axis=0, dtype=x.dtype)
assert y.chunks == ((4, 2, 4), (3, 2))
assert_eq(y, np.ones((10, 5)))
y = atop(double, 'ij', x, 'ij',
adjust_chunks={'j': lambda n: 2 * n}, axis=1, dtype=x.dtype)
assert y.chunks == ((2, 1, 2), (6, 4))
assert_eq(y, np.ones((5, 10)))
x = da.ones((10, 10), chunks=(5, 5))
y = atop(double, 'ij', x, 'ij', axis=0,
adjust_chunks={'i': 10}, dtype=x.dtype)
assert y.chunks == ((10, 10), (5, 5))
assert_eq(y, np.ones((20, 10)))
y = atop(double, 'ij', x, 'ij', axis=0,
adjust_chunks={'i': (10, 10)}, dtype=x.dtype)
assert y.chunks == ((10, 10), (5, 5))
assert_eq(y, np.ones((20, 10)))
def test_from_delayed():
v = delayed(np.ones)((5, 3))
x = from_delayed(v, shape=(5, 3), dtype=np.ones(0).dtype)
assert isinstance(x, Array)
assert_eq(x, np.ones((5, 3)))
def test_A_property():
x = da.ones(5, chunks=(2,))
assert x.A is x
def test_copy_mutate():
x = da.arange(5, chunks=(2,))
y = x.copy()
memo = {}
y2 = copy.deepcopy(x, memo=memo)
x[x % 2 == 0] = -1
xx = np.arange(5)
xx[xx % 2 == 0] = -1
assert_eq(x, xx)
assert_eq(y, np.arange(5))
assert_eq(y2, np.arange(5))
assert memo[id(x)] is y2
def test_npartitions():
assert da.ones(5, chunks=(2,)).npartitions == 3
assert da.ones((5, 5), chunks=(2, 3)).npartitions == 6
def test_astype_gh1151():
a = np.arange(5).astype(np.int32)
b = da.from_array(a, (1,))
assert_eq(a.astype(np.int16), b.astype(np.int16))
def test_elemwise_name():
assert (da.ones(5, chunks=2) + 1).name.startswith('add-')
def test_map_blocks_name():
assert da.ones(5, chunks=2).map_blocks(inc).name.startswith('inc-')
def test_from_array_names():
pytest.importorskip('distributed')
from distributed.utils import key_split
x = np.ones(10)
d = da.from_array(x, chunks=2)
names = countby(key_split, d.dask)
assert set(names.values()) == set([1, 5])
def test_array_picklable():
from pickle import loads, dumps
a = da.arange(100, chunks=25)
a2 = loads(dumps(a))
assert_eq(a, a2)
def test_swapaxes():
x = np.random.normal(0, 10, size=(10, 12, 7))
d = da.from_array(x, chunks=(4, 5, 2))
assert_eq(np.swapaxes(x, 0, 1), da.swapaxes(d, 0, 1))
assert_eq(np.swapaxes(x, 2, 1), da.swapaxes(d, 2, 1))
assert_eq(x.swapaxes(2, 1), d.swapaxes(2, 1))
assert_eq(x.swapaxes(0, 0), d.swapaxes(0, 0))
assert_eq(x.swapaxes(1, 2), d.swapaxes(1, 2))
assert_eq(x.swapaxes(0, -1), d.swapaxes(0, -1))
assert_eq(x.swapaxes(-1, 1), d.swapaxes(-1, 1))
assert d.swapaxes(0, 1).name == d.swapaxes(0, 1).name
assert d.swapaxes(0, 1).name != d.swapaxes(1, 0).name
def test_from_array_raises_on_bad_chunks():
x = np.ones(10)
with pytest.raises(ValueError):
da.from_array(x, chunks=(5, 5, 5))
# with pytest.raises(ValueError):
# da.from_array(x, chunks=100)
with pytest.raises(ValueError):
da.from_array(x, chunks=((5, 5, 5),))
def test_concatenate_axes():
x = np.ones((2, 2, 2))
assert_eq(concatenate_axes([x, x], axes=[0]),
np.ones((4, 2, 2)))
assert_eq(concatenate_axes([x, x, x], axes=[0]),
np.ones((6, 2, 2)))
assert_eq(concatenate_axes([x, x], axes=[1]),
np.ones((2, 4, 2)))
assert_eq(concatenate_axes([[x, x], [x, x]], axes=[0, 1]),
np.ones((4, 4, 2)))
assert_eq(concatenate_axes([[x, x], [x, x]], axes=[0, 2]),
np.ones((4, 2, 4)))
assert_eq(concatenate_axes([[x, x, x], [x, x, x]], axes=[1, 2]),
np.ones((2, 4, 6)))
with pytest.raises(ValueError):
concatenate_axes([[x, x], [x, x]], axes=[0]) # not all nested lists accounted for
with pytest.raises(ValueError):
concatenate_axes([x, x], axes=[0, 1, 2, 3]) # too many axes
def test_atop_concatenate():
x = da.ones((4, 4, 4), chunks=(2, 2, 2))
y = da.ones((4, 4), chunks=(2, 2))
def f(a, b):
assert isinstance(a, np.ndarray)
assert isinstance(b, np.ndarray)
assert a.shape == (2, 4, 4)
assert b.shape == (4, 4)
return (a + b).sum(axis=(1, 2))
z = atop(f, 'i', x, 'ijk', y, 'jk', concatenate=True, dtype=x.dtype)
assert_eq(z, np.ones(4) * 32)
z = atop(add, 'ij', y, 'ij', y, 'ij', concatenate=True, dtype=x.dtype)
assert_eq(z, np.ones((4, 4)) * 2)
def f(a, b, c):
assert isinstance(a, np.ndarray)
assert isinstance(b, np.ndarray)
assert isinstance(c, np.ndarray)
assert a.shape == (4, 2, 4)
assert b.shape == (4, 4)
assert c.shape == (4, 2)
return np.ones(5)
z = atop(f, 'j', x, 'ijk', y, 'ki', y, 'ij', concatenate=True,
dtype=x.dtype)
assert_eq(z, np.ones(10), check_shape=False)
def test_common_blockdim():
assert common_blockdim([(5,), (5,)]) == (5,)
assert common_blockdim([(5,), (2, 3,)]) == (2, 3)
assert common_blockdim([(5, 5), (2, 3, 5)]) == (2, 3, 5)
assert common_blockdim([(5, 5), (2, 3, 5)]) == (2, 3, 5)
assert common_blockdim([(5, 2, 3), (2, 3, 5)]) == (2, 3, 2, 3)
assert common_blockdim([(1, 2), (2, 1)]) == (1, 1, 1)
assert common_blockdim([(1, 2, 2), (2, 1, 2), (2, 2, 1)]) == (1, 1, 1, 1, 1)
def test_uneven_chunks_that_fit_neatly():
x = da.arange(10, chunks=((5, 5),))
y = da.ones(10, chunks=((5, 2, 3),))
assert_eq(x + y, np.arange(10) + np.ones(10))
z = x + y
assert z.chunks == ((5, 2, 3),)
def test_elemwise_uneven_chunks():
x = da.arange(10, chunks=((4, 6),))
y = da.ones(10, chunks=((6, 4),))
assert_eq(x + y, np.arange(10) + np.ones(10))
z = x + y
assert z.chunks == ((4, 2, 4),)
x = da.random.random((10, 10), chunks=((4, 6), (5, 2, 3)))
y = da.random.random((4, 10, 10), chunks=((2, 2), (6, 4), (2, 3, 5)))
z = x + y
assert_eq(x + y, x.compute() + y.compute())
assert z.chunks == ((2, 2), (4, 2, 4), (2, 3, 2, 3))
def test_uneven_chunks_atop():
x = da.random.random((10, 10), chunks=((2, 3, 2, 3), (5, 5)))
y = da.random.random((10, 10), chunks=((4, 4, 2), (4, 2, 4)))
z = atop(np.dot, 'ik', x, 'ij', y, 'jk', dtype=x.dtype, concatenate=True)
assert z.chunks == (x.chunks[0], y.chunks[1])
assert_eq(z, x.compute().dot(y))
def test_warn_bad_rechunking():
x = da.ones((20, 20), chunks=(20, 1))
y = da.ones((20, 20), chunks=(1, 20))
with warnings.catch_warnings(record=True) as record:
x + y
assert record
assert '20' in record[0].message.args[0]
def test_optimize_fuse_keys():
x = da.ones(10, chunks=(5,))
y = x + 1
z = y + 1
dsk = z._optimize(z.dask, z._keys())
assert not set(y.dask) & set(dsk)
dsk = z._optimize(z.dask, z._keys(), fuse_keys=y._keys())
assert all(k in dsk for k in y._keys())
def test_round():
x = np.random.random(10)
d = da.from_array(x, chunks=4)
for i in (0, 1, 4, 5):
assert_eq(x.round(i), d.round(i))
assert_eq(d.round(2), da.round(d, 2))
def test_repeat():
x = np.random.random((10, 11, 13))
d = da.from_array(x, chunks=(4, 5, 3))
repeats = [1, 2, 5]
axes = [0, 1, 2]
for r in repeats:
for a in axes:
assert_eq(x.repeat(r, axis=a), d.repeat(r, axis=a))
assert_eq(d.repeat(2, 0), da.repeat(d, 2, 0))
with pytest.raises(NotImplementedError):
da.repeat(d, np.arange(10))
with pytest.raises(NotImplementedError):
da.repeat(d, 2, None)
with pytest.raises(NotImplementedError):
da.repeat(d, 2)
x = np.arange(5)
d = da.arange(5, chunks=(2,))
assert_eq(x.repeat(3), d.repeat(3))
for r in [1, 2, 3, 4]:
assert all(concat(d.repeat(r).chunks))
@pytest.mark.parametrize('shape, chunks', [
((10,), (1,)),
((10, 11, 13), (4, 5, 3)),
])
@pytest.mark.parametrize('reps', [0, 1, 2, 3, 5])
def test_tile(shape, chunks, reps):
x = np.random.random(shape)
d = da.from_array(x, chunks=chunks)
assert_eq(np.tile(x, reps), da.tile(d, reps))
@pytest.mark.parametrize('shape, chunks', [
((10,), (1,)),
((10, 11, 13), (4, 5, 3)),
])
@pytest.mark.parametrize('reps', [-1, -5])
def test_tile_neg_reps(shape, chunks, reps):
x = np.random.random(shape)
d = da.from_array(x, chunks=chunks)
with pytest.raises(ValueError):
da.tile(d, reps)
@pytest.mark.parametrize('shape, chunks', [
((10,), (1,)),
((10, 11, 13), (4, 5, 3)),
])
@pytest.mark.parametrize('reps', [[1], [1, 2]])
def test_tile_array_reps(shape, chunks, reps):
x = np.random.random(shape)
d = da.from_array(x, chunks=chunks)
with pytest.raises(NotImplementedError):
da.tile(d, reps)
def test_concatenate_stack_dont_warn():
with warnings.catch_warnings(record=True) as record:
da.concatenate([da.ones(2, chunks=1)] * 62)
assert not record
with warnings.catch_warnings(record=True) as record:
da.stack([da.ones(2, chunks=1)] * 62)
assert not record
def test_map_blocks_delayed():
x = da.ones((10, 10), chunks=(5, 5))
y = np.ones((5, 5))
z = x.map_blocks(add, y, dtype=x.dtype)
yy = delayed(y)
zz = x.map_blocks(add, yy, dtype=x.dtype)
assert_eq(z, zz)
assert yy.key in zz.dask
def test_no_chunks():
X = np.arange(11)
dsk = {('x', 0): np.arange(5), ('x', 1): np.arange(5, 11)}
x = Array(dsk, 'x', ((np.nan, np.nan,),), np.arange(1).dtype)
assert_eq(x + 1, X + 1)
assert_eq(x.sum(), X.sum())
assert_eq((x + 1).std(), (X + 1).std())
assert_eq((x + x).std(), (X + X).std())
assert_eq((x + x).std(keepdims=True), (X + X).std(keepdims=True))
def test_no_chunks_2d():
X = np.arange(24).reshape((4, 6))
x = da.from_array(X, chunks=(2, 2))
x._chunks = ((np.nan, np.nan), (np.nan, np.nan, np.nan))
assert_eq(da.log(x), np.log(X))
assert_eq(x.T, X.T)
assert_eq(x.sum(axis=0, keepdims=True), X.sum(axis=0, keepdims=True))
assert_eq(x.sum(axis=1, keepdims=True), X.sum(axis=1, keepdims=True))
assert_eq(x.dot(x.T + 1), X.dot(X.T + 1))
def test_no_chunks_yes_chunks():
X = np.arange(24).reshape((4, 6))
x = da.from_array(X, chunks=(2, 2))
x._chunks = ((2, 2), (np.nan, np.nan, np.nan))
assert (x + 1).chunks == ((2, 2), (np.nan, np.nan, np.nan))
assert (x.T).chunks == ((np.nan, np.nan, np.nan), (2, 2))
assert (x.dot(x.T)).chunks == ((2, 2), (2, 2))
def test_raise_informative_errors_no_chunks():
X = np.arange(10)
a = da.from_array(X, chunks=(5, 5))
a._chunks = ((np.nan, np.nan),)
b = da.from_array(X, chunks=(4, 4, 2))
b._chunks = ((np.nan, np.nan, np.nan),)
for op in [lambda: a + b,
lambda: a[1],
lambda: a[::2],
lambda: a[-5],
lambda: a.rechunk(3),
lambda: a.reshape(2, 5)]:
with pytest.raises(ValueError) as e:
op()
if 'chunk' not in str(e) or 'unknown' not in str(e):
op()
def test_no_chunks_slicing_2d():
X = np.arange(24).reshape((4, 6))
x = da.from_array(X, chunks=(2, 2))
x._chunks = ((2, 2), (np.nan, np.nan, np.nan))
assert_eq(x[0], X[0])
for op in [lambda: x[:, 4],
lambda: x[:, ::2],
lambda: x[0, 2:4]]:
with pytest.raises(ValueError) as e:
op()
assert 'chunk' in str(e) and 'unknown' in str(e)
def test_index_array_with_array_1d():
x = np.arange(10)
dx = da.from_array(x, chunks=(5,))
dx._chunks = ((np.nan, np.nan),)
assert_eq(x[x > 6], dx[dx > 6])
assert_eq(x[x % 2 == 0], dx[dx % 2 == 0])
dy = da.ones(11, chunks=(3,))
with pytest.raises(ValueError):
dx[dy > 5]
def test_index_array_with_array_2d():
x = np.arange(24).reshape((4, 6))
dx = da.from_array(x, chunks=(2, 2))
dx._chunks = ((2, 2), (np.nan, np.nan, np.nan))
assert (sorted(x[x % 2 == 0].tolist()) ==
sorted(dx[dx % 2 == 0].compute().tolist()))
assert (sorted(x[x > 6].tolist()) ==
sorted(dx[dx > 6].compute().tolist()))
def test_setitem_1d():
x = np.arange(10)
dx = da.from_array(x.copy(), chunks=(5,))
x[x > 6] = -1
x[x % 2 == 0] = -2
dx[dx > 6] = -1
dx[dx % 2 == 0] = -2
assert_eq(x, dx)
def test_setitem_2d():
x = np.arange(24).reshape((4, 6))
dx = da.from_array(x.copy(), chunks=(2, 2))
x[x > 6] = -1
x[x % 2 == 0] = -2
dx[dx > 6] = -1
dx[dx % 2 == 0] = -2
assert_eq(x, dx)
def test_setitem_mixed_d():
x = np.arange(24).reshape((4, 6))
dx = da.from_array(x, chunks=(2, 2))
x[x[0, None] > 2] = -1
dx[dx[0, None] > 2] = -1
assert_eq(x, dx)
x[x[None, 0] > 2] = -1
dx[dx[None, 0] > 2] = -1
assert_eq(x, dx)
def test_setitem_errs():
x = da.ones((4, 4), chunks=(2, 2))
with pytest.raises(ValueError):
x[x > 1] = x
def test_zero_slice_dtypes():
x = da.arange(5, chunks=1)
y = x[[]]
assert y.dtype == x.dtype
assert y.shape == (0,)
assert_eq(x[[]], np.arange(5)[[]])
def test_zero_sized_array_rechunk():
x = da.arange(5, chunks=1)[:0]
y = da.atop(identity, 'i', x, 'i', dtype=x.dtype)
assert_eq(x, y)
def test_atop_zero_shape():
da.atop(lambda x: x, 'i',
da.arange(10, chunks=10), 'i',
da.from_array(np.ones((0, 2)), ((), 2)), 'ab',
da.from_array(np.ones((0,)), ((),)), 'a',
dtype='float64')
def test_atop_zero_shape_new_axes():
da.atop(lambda x: np.ones(42), 'i',
da.from_array(np.ones((0, 2)), ((), 2)), 'ab',
da.from_array(np.ones((0,)), ((),)), 'a',
dtype='float64', new_axes={'i': 42})
def test_broadcast_against_zero_shape():
assert_eq(da.arange(1, chunks=1)[:0] + 0,
np.arange(1)[:0] + 0)
assert_eq(da.arange(1, chunks=1)[:0] + 0.1,
np.arange(1)[:0] + 0.1)
assert_eq(da.ones((5, 5), chunks=(2, 3))[:0] + 0,
np.ones((5, 5))[:0] + 0)
assert_eq(da.ones((5, 5), chunks=(2, 3))[:0] + 0.1,
np.ones((5, 5))[:0] + 0.1)
assert_eq(da.ones((5, 5), chunks=(2, 3))[:, :0] + 0,
np.ones((5, 5))[:0] + 0)
assert_eq(da.ones((5, 5), chunks=(2, 3))[:, :0] + 0.1,
np.ones((5, 5))[:0] + 0.1)
def test_fast_from_array():
x = np.zeros(10000000)
start = time.time()
da.from_array(x, chunks=x.shape[0] / 10, name='x')
end = time.time()
assert end - start < 0.100
def test_random_from_array():
x = np.zeros(500000000)
start = time.time()
y = da.from_array(x, chunks=x.shape[0] / 10, name=False)
end = time.time()
assert end - start < 0.400
y2 = da.from_array(x, chunks=x.shape[0] / 10, name=False)
assert y.name != y2.name
def test_concatenate_errs():
with pytest.raises(ValueError) as e:
da.concatenate([da.zeros((2, 1), chunks=(2, 1)),
da.zeros((2, 3), chunks=(2, 3))])
assert 'shape' in str(e).lower()
assert '(2, 1)' in str(e)
with pytest.raises(ValueError):
da.concatenate([da.zeros((1, 2), chunks=(1, 2)),
da.zeros((3, 2), chunks=(3, 2))], axis=1)
def test_stack_errs():
with pytest.raises(ValueError) as e:
da.stack([da.zeros((2), chunks=(2)),
da.zeros((3), chunks=(3))])
assert 'shape' in str(e).lower()
assert '(2,)' in str(e)
def test_transpose_negative_axes():
x = np.ones((2, 3, 4, 5))
y = da.ones((2, 3, 4, 5), chunks=3)
assert_eq(x.transpose([-1, -2, 0, 1]),
y.transpose([-1, -2, 0, 1]))
def test_atop_with_numpy_arrays():
x = np.ones(10)
y = da.ones(10, chunks=(5,))
assert_eq(x + y, x + x)
s = da.sum(x)
assert any(x is v for v in s.dask.values())
@pytest.mark.parametrize('chunks', (100, 6))
@pytest.mark.parametrize('other', [[0, 0, 1], [2, 1, 3], (0, 0, 1)])
def test_elemwise_with_lists(chunks, other):
x = np.arange(12).reshape((4, 3))
d = da.arange(12, chunks=chunks).reshape((4, 3))
x2 = np.vstack([x[:, 0], x[:, 1], x[:, 2]]).T
d2 = da.vstack([d[:, 0], d[:, 1], d[:, 2]]).T
assert_eq(x2, d2)
x3 = x2 * other
d3 = d2 * other
assert_eq(x3, d3)
def test_constructor_plugin():
L = []
L2 = []
with dask.set_options(array_plugins=[L.append, L2.append]):
x = da.ones(10, chunks=5)
y = x + 1
assert L == L2 == [x, y]
with dask.set_options(array_plugins=[lambda x: x.compute()]):
x = da.ones(10, chunks=5)
y = x + 1
assert isinstance(y, np.ndarray)
assert len(L) == 2
| {
"repo_name": "cpcloud/dask",
"path": "dask/array/tests/test_array_core.py",
"copies": "1",
"size": "89460",
"license": "bsd-3-clause",
"hash": -4884996991044134000,
"line_mean": 29.1110737126,
"line_max": 92,
"alpha_frac": 0.5208137715,
"autogenerated": false,
"ratio": 2.6553085393725326,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8673538173261598,
"avg_score": 0.0005168275221869622,
"num_lines": 2971
} |
from __future__ import absolute_import, division, print_function
import copy
def show_label_array(ax, label_array, cmap=None, **kwargs):
"""
Display a labeled array nicely
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
label_array : ndarray
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interent
cmap : str or colormap, optional
Color map to use, defaults to 'Paired'
Returns
-------
img : AxesImage
The artist added to the axes
"""
if cmap is None:
cmap = 'Paired'
_cmap = copy.copy((mcm.get_cmap(cmap)))
_cmap.set_under('w', 0)
vmin = max(.5, kwargs.pop('vmin', .5))
ax.set_aspect('equal')
im = ax.imshow(label_array, cmap=cmap,
interpolation='nearest',
vmin=vmin,
**kwargs)
return im
| {
"repo_name": "ericdill/xray-vision",
"path": "xray_vision/mpl_plotting/roi.py",
"copies": "1",
"size": "1066",
"license": "bsd-3-clause",
"hash": -603948614657692400,
"line_mean": 23.7906976744,
"line_max": 67,
"alpha_frac": 0.58630394,
"autogenerated": false,
"ratio": 3.99250936329588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.507881330329588,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import datetime as dt
import numpy as np
import pandas as pd
from caar.configparser_read import SENSOR_DEVICE_ID, SENSOR_LOCATION_ID
from future import standard_library
standard_library.install_aliases()
def days_of_data_by_id(df):
"""Returns pandas DataFrame with ID as index and the number of calendar
days of data as values.
Args:
df (pandas DataFrame): DataFrame as created by **history** module.
Returns:
days_data_df (pandas DataFrame): DataFrame with count
('Days') for each ID.
"""
time_level = _get_time_level_of_df_multiindex(df)
days_data_df = (df.groupby([df.index.get_level_values(level=0),
pd.TimeGrouper('D', level=time_level)])
.count()
.groupby(level=0)
.count())
days_data_df.columns = ['Days']
return days_data_df
def consecutive_days_of_observations(sensor_id, devices_file, cycles_df,
sensors_df, geospatial_df=None,
include_first_and_last_days=False):
"""
Returns a pandas DataFrame with a row for each date range indicating the
number of consecutive days of data across all DataFrames given as
arguments. The starting and ending day of each date range are also given.
Only days in which all data types have one or more observations are included.
Args:
sensor_id (int or str): The ID of the device.
devices_file(str): Path of devices file.
cycles_df (pandas DataFrame): DataFrame as created by **history** module.
sensors_df (pandas DataFrame): DataFrame as created by **history** module.
geospatial_df (Optional[pandas DataFrame]): DataFrame as created by **history** module.
Returns:
consecutive_days_df (pandas DataFrame): DataFrame with 'First Day',
'Last Day', and count ('Consecutive Days') for each set of consecutive
days, for the specified ID.
"""
obs_counts = daily_cycle_sensor_and_geospatial_obs_counts(sensor_id, devices_file,
cycles_df, sensors_df,
geospatial_df=geospatial_df)
streaks = []
if not include_first_and_last_days and len(obs_counts.index) < 3:
return None
elif not include_first_and_last_days:
first_day_in_streak = obs_counts.index[1] # Second day because first may be partial
last_day = obs_counts.index[-2]
else:
first_day_in_streak = obs_counts.index[0]
last_day = obs_counts.index[-1]
day = first_day_in_streak
while day <= last_day:
day += pd.Timedelta(days=1)
if day in obs_counts.index and day <= last_day:
# Streak will include this day
continue
else:
last_day_in_streak = day - pd.Timedelta(days=1)
total_days = (last_day_in_streak - first_day_in_streak +
pd.Timedelta(days=1)) / pd.Timedelta(days=1)
first_day_dt, last_day_dt = tuple(dt.date(d.year, d.month, d.day)
for d in [first_day_in_streak,
last_day_in_streak])
streaks.append((sensor_id, first_day_dt, last_day_dt,
np.int64(total_days)))
if last_day_in_streak < last_day:
first_day_in_streak = last_day_in_streak + pd.Timedelta(days=1)
while first_day_in_streak not in obs_counts.index:
first_day_in_streak += pd.Timedelta(days=1)
day = first_day_in_streak
streaks_arr = np.array(streaks)
streaks_arr[streaks_arr[:, 1].argsort()]
streaks_df = pd.DataFrame(data=streaks_arr,
columns=['ID', 'First day', 'Last day',
'Consecutive days'])
return streaks_df
def daily_cycle_sensor_and_geospatial_obs_counts(sensor_id, devices_file, cycles_df, sensors_df,
geospatial_df=None):
"""Returns a pandas DataFrame with the count of observations of each type
of data given in the arguments (cycles, sensor observations, geospatial
observations), by day. Only days in which all data types have one or more
observations are included.
Args:
sensor_id (int or str): The ID of the device.
devices_file(str): Path of devices file.
cycles_df (pandas DataFrame): DataFrame as created by **history** module.
sensors_df (pandas DataFrame): DataFrame as created by **history** module.
geospatial_df (Optional[pandas DataFrame]): DataFrame as created by **history** module.
Returns:
daily_obs_df (pandas DataFrame): DataFrame with index of the date, and
values of 'Cycles_obs', 'Sensors_obs', and 'Geospatial_obs'.
"""
cycles = _slice_by_single_index(cycles_df, id_index=sensor_id)
sensor = _slice_by_single_index(sensors_df, id_index=sensor_id)
# Get df's with number of observation by day
dfs = [daily_data_points_by_id(df) for df in [cycles, sensor]]
geospatial_data = True if isinstance(geospatial_df, pd.DataFrame) else False
if geospatial_data:
location_id = location_id_of_sensor(sensor_id, devices_file)
geospatial_records = _slice_by_single_index(geospatial_df,
id_index=location_id)
dfs.append(daily_data_points_by_id(geospatial_records))
# Get df's with number of observation by day, for each of 3 types of data
if geospatial_data:
cycles, sensor, geospatial = (df.set_index(df.index.droplevel())
for df in dfs)
else:
cycles, sensor = (df.set_index(df.index.droplevel()) for df in dfs)
cycles_sensors = pd.merge(cycles, sensor, left_index=True,
right_index=True, how='inner')
cycle_end_time = _get_time_label_of_data(cycles_df)
cycles_sensors.rename(columns={cycle_end_time: 'Cycles_obs'})
if geospatial_data:
return pd.merge(cycles_sensors, geospatial, left_index=True,
right_index=True)
else:
return cycles_sensors
def daily_data_points_by_id(df, devid=None):
"""Returns a pandas DataFrame with MultiIndex of ID and day,
and the count of non-null raw data points per id and day as values.
Args:
df (pandas DataFrame): DataFrame as created by **history** module.
devid (Optional[int or str]): The ID of a device.
Returns:
daily_obs_df (pandas DataFrame): DataFrame indexed by date, and
with counts of observations as values.
"""
# 1) Groups the DataFrame by the primary ID and by time.
# 2) Gives count of records within groups.
if devid is not None:
df = _slice_by_single_index(df, id_index=devid)
time_level = _get_time_level_of_df_multiindex(df)
daily_df = (df.groupby([df.index.get_level_values(level=0),
pd.TimeGrouper('D', level=time_level)])
.count())
return daily_df
def df_select_ids(df, id_or_ids):
"""Returns pandas DataFrame that is restricted to a particular ID or IDs
(device ID, or location ID in the case of geospatial data).
Args:
df (pandas DataFrame): DataFrame that has been created by a function in the **history** or **histsummary** modules (it must have a numeric ID as the first or only index column).
id_or_ids (int or str, list of ints or strs, or tuple): A tuple should have the form (min_ID, max_ID)
Returns:
daily_obs (pandas DataFrame)
"""
select_id_df = _slice_by_single_index(df, id_index=id_or_ids)
return select_id_df
def df_select_datetime_range(df, start_time, end_time):
"""Returns pandas DataFrame within a datetime range (slice). If end_time is specified as None, the range will have no upper datetime limit.
Args:
df (pandas DataFrame): DataFrame that has been created by a function in the **history** or **histsummary** modules (it must have a numeric ID as the first or only index column).
start_time (str or datetime.datetime): Datetime.
end_time (str or datetime.datetime): Datetime.
Returns:
dt_range_df (pandas DataFrame)
"""
min_max_tup = (start_time, end_time)
dt_range_df = _slice_by_single_index(df, time_index=min_max_tup)
return dt_range_df
def _slice_by_single_index(df, id_index=None, middle_index=None, time_index=None):
slice_kwargs = {'id_index': id_index, 'time_index': time_index}
if _has_single_index(df):
sliced_df = _slice_by_one_index_in_single_index(df, **slice_kwargs)
elif _has_double_index(df):
sliced_df = _slice_by_one_index_in_double_index(df, **slice_kwargs)
elif _has_triple_index(df):
slice_kwargs['middle_index'] = middle_index
sliced_df = _slice_by_one_index_in_triple_index(df, **slice_kwargs)
else:
raise ValueError('Multiindex of DataFrame does not have two or three '
'index columns as expected.')
return sliced_df
def _has_single_index(df):
if len(df.index.names) == 1:
return True
else:
return False
def _has_double_index(df):
if len(df.index.names) == 2:
return True
else:
return False
def _has_triple_index(df):
if len(df.index.names) == 3:
return True
else:
return False
def _slice_by_one_index_in_triple_index(df, id_index=None, middle_index=None,
time_index=None):
index = [index for index in [id_index, middle_index, time_index] if index]
if len(index) > 1:
raise ValueError('More than one index slice has been chosen. '
'This is not yet supported by this function.')
idx = pd.IndexSlice
if id_index:
idx_arg = _slice_by_id_in_triple_index(id_index)
elif time_index:
idx_arg = _slice_by_time_in_triple_index(time_index)
elif middle_index:
idx_arg = idx[:, middle_index, :]
else:
idx_arg = idx[:, :, :]
sliced_by_one = pd.DataFrame(df.loc[idx_arg, :])
sliced_by_one.sort_index(inplace=True, sort_remaining=True)
return sliced_by_one
def _slice_by_id_in_single_index(id_or_ids):
idx = pd.IndexSlice
if isinstance(id_or_ids, tuple) or isinstance(id_or_ids, list):
min_id, max_id = id_or_ids[0], id_or_ids[1]
idx_arg = idx[min_id:max_id + 1]
else:
idx_arg = idx[id_or_ids]
return idx_arg
def _slice_by_id_in_double_index(id_or_ids):
idx = pd.IndexSlice
if isinstance(id_or_ids, tuple) or isinstance(id_or_ids, list):
min_id, max_id = id_or_ids[0], id_or_ids[1]
idx_arg = idx[min_id:max_id + 1, :]
else:
idx_arg = idx[id_or_ids, :]
return idx_arg
def _slice_by_id_in_triple_index(id_or_ids):
idx = pd.IndexSlice
if isinstance(id_or_ids, tuple) or isinstance(id_or_ids, list):
min_id, max_id = id_or_ids[0], id_or_ids[1]
idx_arg = idx[min_id:max_id + 1, :, :]
else:
idx_arg = idx[id_or_ids, :, :]
return idx_arg
def _slice_by_time_in_single_index(time_index):
idx = pd.IndexSlice
min_time, max_time = time_index[0], time_index[1]
if max_time is not None:
idx_arg = idx[min_time:max_time]
else:
idx_arg = idx[min_time:]
return idx_arg
def _slice_by_time_in_double_index(time_index):
idx = pd.IndexSlice
min_time, max_time = time_index[0], time_index[1]
if max_time is not None:
idx_arg = idx[:, min_time:max_time]
else:
idx_arg = idx[:, min_time:]
return idx_arg
def _slice_by_time_in_triple_index(time_index):
idx = pd.IndexSlice
min_time, max_time = time_index[0], time_index[1]
if max_time is not None:
idx_arg = idx[:, :, min_time:max_time]
else:
idx_arg = idx[:, :, min_time:]
return idx_arg
def _slice_by_one_index_in_single_index(df, id_index=None, time_index=None):
if id_index:
idx_arg = _slice_by_id_in_single_index(id_index)
elif time_index:
idx_arg = _slice_by_time_in_single_index(time_index)
else:
idx = pd.IndexSlice
idx_arg = idx[:]
sliced_by_one = pd.DataFrame(df.loc[idx_arg, :])
sliced_by_one.sortlevel(inplace=True, sort_remaining=True)
return sliced_by_one
def _slice_by_one_index_in_double_index(df, id_index=None, time_index=None):
index = [index for index in [id_index, time_index] if index]
if len(index) > 1:
raise ValueError('More than one index slice has been chosen. '
'This is not yet supported by this function.')
if id_index:
idx_arg = _slice_by_id_in_double_index(id_index)
elif time_index:
idx_arg = _slice_by_time_in_double_index(time_index)
else:
idx = pd.IndexSlice
idx_arg = idx[:, :]
sliced_by_one = pd.DataFrame(df.loc[idx_arg, :])
sliced_by_one.sort_index(inplace=True, sort_remaining=True)
return sliced_by_one
def _sort_by_timestamps(df):
time_label = _get_time_label_of_data(df)
df.sort_values(time_label, inplace=True)
return df
def count_of_data_points_for_each_id(df):
"""Returns dict with IDs as keys and total number (int) of observations of data as values, based on the DataFrame (df) passed as an argument.
Args:
df (pandas DataFrame): DataFrame as created by **history** module.
Returns:
counts_by_id (dict): Dict of key-value pairs, in which IDs are keys.
"""
return (df
.groupby(level=0)
.count()
.to_dict())
def count_of_data_points_for_select_id(df, id):
"""Returns number of observations for the specified device or location
within a DataFrame.
Args:
df (pandas DataFrame): DataFrame as created by **history** module.
id (int or str): ID of device or location.
Returns:
data_points (int): Number of observations for the given ID in the DataFrame.
"""
idx = pd.IndexSlice
return df.loc[idx[id, :], :].count()
def location_id_of_sensor(sensor_id, devices_file):
"""Returns location ID for a device, based on device ID.
Args:
sensor_id (int or str): Device ID.
devices_file (str): Devices file.
Returns:
location_id (int): Location ID.
"""
device_df = pd.read_csv(devices_file,
usecols=[str(SENSOR_DEVICE_ID),
str(SENSOR_LOCATION_ID)],
index_col=0)
idx = pd.IndexSlice
return device_df.loc[idx[sensor_id, SENSOR_LOCATION_ID]]
def _get_id_index_column_label(df):
return df.index.names[0]
def _get_time_index(df):
time_level = _get_time_level_of_df_multiindex(df)
timestamps = (df
.index
.get_level_values(time_level))
return timestamps
def _get_time_index_column_label(df):
time_level = _get_time_level_of_df_multiindex(df)
return df.index.names[time_level]
def _get_time_level_of_df_multiindex(df):
for i in range(len(df.index._levels)):
if type(df.index._levels[i][0]) == pd.Timestamp:
time_level = i
return time_level
def _get_time_label_of_data(df):
for i in range(len(df.columns)):
if type(df.iloc[0, i]) == pd.Timestamp:
return df.columns[i]
return None
def _get_time_column_of_data(df):
time_label = _get_time_label_of_data(df)
for i, label in enumerate(df.columns):
if label == time_label:
break
return i
def _get_label_of_first_data_column(df):
return df.columns[0]
def _get_labels_of_data_columns(df):
col_labels = []
for col in df.columns:
col_labels.append(col)
return col_labels
def _get_column_of_data_label(df, label):
for i, col_label in enumerate(df.columns):
if col_label == label:
break
if df.iloc[0, i].__class__ != str:
return [i]
else:
msg = ('The column ', label, ' contains strings instead of '
'numeric values. You may want to change it to Categorical '
' in pandas, if it represents a category.')
raise ValueError(msg)
def _sliced_by_id_or_ids_and_time_index(df, id_or_ids, start, end):
if id_or_ids:
sliced_by_id = _slice_by_single_index(df, id_index=id_or_ids)
else:
sliced_by_id = df
sliced_by_dt = _slice_by_single_index(sliced_by_id, time_index=(start, end))
return sliced_by_dt
def squared_avg_daily_data_points_per_id(df):
""" Returns DataFrame grouped by the primary id (DeviceId or
LocationId) and by day. The value column has the count of data points
per day.
"""
time_index_level = _get_time_level_of_df_multiindex(df)
grp_daily_by_id = df.groupby([df.index.get_level_values(level=0),
pd.TimeGrouper('D', level=time_index_level)])
return (grp_daily_by_id
.count()
.groupby(level=0)
.mean()
.applymap(np.square))
def counts_by_primary_id_squared(df):
"""Returns dict with IDs as keys and the number of data observations
in the DataFrame argument as values.
Args:
df (pandas DataFrame):
Returns:
counts_by_id (dict): Dict.
"""
return (df
.groupby(level=0)
.count()
.apply(lambda x: x ** 2)
.to_dict())
def first_full_day_of_sensors_obs(df):
earliest_minute = df.index.get_level_values(level=4).min()
first_full_day = dt.datetime(earliest_minute.year, earliest_minute.month,
earliest_minute.day + 1, hour=0, minute=0)
return first_full_day
def last_full_day_of_sensors_obs(df):
last_minute = df.index.get_level_values(level=4).max()
last_full_day = dt.datetime(last_minute.year, last_minute.month,
last_minute.day - 1, hour=0, minute=0)
return last_full_day
def first_and_last_days_cycling(df):
return (first_full_day_of_sensors_obs(df),
last_full_day_of_sensors_obs(df))
def number_of_days(df):
"""Determines number of days between first and last day of data for df."""
first_day, last_day = first_and_last_days_df(df)
return (last_day - first_day)/np.timedelta64(1, 'D')
def start_of_first_full_day_df(df):
""""Returns datetime.datetime value of the very beginning of the first
full day for which data is given in a pandas DataFrame. The DataFrame
must have a MultiIndex in which the time level of the index
contains timestamps."""
time_index_level = _get_time_level_of_df_multiindex(df)
earliest_timestamp = (df
.index
.get_level_values(level=time_index_level)
.min())
earliest_full_day = earliest_timestamp + pd.Timedelta(days=1)
start_earliest_full_day = dt.datetime(earliest_full_day.year,
earliest_full_day.month,
earliest_full_day.day, hour=0,
minute=0)
return start_earliest_full_day
def start_of_last_full_day_df(df):
time_index_level = _get_time_level_of_df_multiindex(df)
last_timestamp = (df
.index
.get_level_values(level=time_index_level)
.max())
last_full_day = last_timestamp - pd.Timedelta(days=1)
start_last_full_day = dt.datetime(last_full_day.year, last_full_day.month,
last_full_day.day, hour=0, minute=0)
return start_last_full_day
def first_and_last_days_df(df):
return (start_of_first_full_day_df(df),
start_of_last_full_day_df(df))
def number_of_intervals_in_date_range(first_day, last_day, frequency='m'):
"""Returns number of intervals of specified frequency for date
range from first to last full days of data.
Default frequency is in minutes ('m').
"""
total_days = last_day - first_day + pd.Timedelta(days=1)
intervals = int(total_days/np.timedelta64(1, frequency))
return intervals
def count_observations_by_sensor_id(df):
"""Returns the total number of readings for each
sensor within the DataFrame.
"""
device_id_label = _get_id_index_column_label(df)
data_field_labels = _get_labels_of_data_columns(df)
count_by_id_sorted = (df.groupby(level=device_id_label, sort=False)
.count()
.sort_values(data_field_labels, inplace=True,
ascending=False))
count_by_id_arr = np.zeros((len(count_by_id_sorted), 2), dtype=np.uint32)
for i, row in enumerate(count_by_id_sorted.iterrows()):
count_by_id_arr[i, :] = (row[0], row[1][0])
return count_by_id_arr
def count_observations_in_intervals_for_sensor_id(df, sensor_id, interval='D'):
"""Returns the count of inside temperature readings for a device by
interval (defaults to daily).
Args:
df (pandas DataFrame): DataFrame as created by **history** module.
sensor_id (int): ID of device.
interval (str): interval (pandas format). Defaults to daily.
Returns:
count_temps_df (pandas DataFrame): pandas DataFrame with the interval
and the count of observations by interval.
"""
idx = pd.IndexSlice
first_data_field_label = _get_label_of_first_data_column(df)
id_field_label = _get_id_index_column_label(df)
count_temps_per_day = (df.loc[idx[sensor_id, :], [first_data_field_label]]
.reset_index(level=id_field_label)
.groupby(id_field_label)
.resample(interval)
.count())
return count_temps_per_day
| {
"repo_name": "nickpowersys/CaaR",
"path": "caar/histsummary.py",
"copies": "1",
"size": "22218",
"license": "bsd-3-clause",
"hash": 5927691366379370000,
"line_mean": 33.6614664587,
"line_max": 185,
"alpha_frac": 0.6023044378,
"autogenerated": false,
"ratio": 3.5077360277865486,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9608332457517511,
"avg_score": 0.00034160161380758893,
"num_lines": 641
} |
from __future__ import absolute_import, division, print_function
import datetime
import json
__author__ = 'jgarman'
def split_process_id(guid):
if type(guid) == int:
return guid, 1
return guid[:36], guid[37:]
def get_process_id(proc):
old_style_id = proc.get('id', None)
if old_style_id and old_style_id != '':
return int(old_style_id)
else:
new_style_id = proc.get('unique_id', None)
if not new_style_id:
return None
return new_style_id
def get_parent_process_id(proc):
old_style_id = proc.get('parent_unique_id', None)
if old_style_id and old_style_id != '':
return old_style_id
else:
new_style_id = proc.get('parent_id', None)
if not new_style_id:
return None
return int(new_style_id)
def json_encode(d):
def default(o):
if type(o) is datetime.date or type(o) is datetime.datetime:
return o.strftime("%Y-%m-%d %H:%M:%S.%f%z")
return json.dumps(d, default=default)
def replace_sensor_in_guid(guid, new_id):
# first eight characters of the GUID is the sensor ID
return '%08x-%s' % (new_id, guid[9:])
def update_sensor_id_refs(proc, new_id):
# this function will mutate proc in-place
proc['sensor_id'] = new_id
parent_unique_id = proc.get('parent_unique_id', None)
if parent_unique_id:
new_parent_id = replace_sensor_in_guid(parent_unique_id, new_id)
proc['parent_unique_id'] = new_parent_id
unique_id = proc.get('unique_id', None)
if unique_id:
new_unique_id = replace_sensor_in_guid(unique_id, new_id)
proc['unique_id'] = new_unique_id
return proc
def update_feed_id_refs(feed_data, new_id):
# this function will mutate feed in-place
feed_data['feed_id'] = new_id
(_, doc_id) = feed_data['unique_id'].split(':')
feed_data['unique_id'] = '%s:%s' % (new_id, doc_id)
return feed_data
| {
"repo_name": "carbonblack/cb-event-duplicator",
"path": "cbopensource/tools/eventduplicator/utils.py",
"copies": "1",
"size": "1950",
"license": "mit",
"hash": 7475460054564106000,
"line_mean": 25,
"line_max": 72,
"alpha_frac": 0.6071794872,
"autogenerated": false,
"ratio": 3.125,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9232179487200001,
"avg_score": 0,
"num_lines": 75
} |
from __future__ import absolute_import, division, print_function
import docker
import logging
import os
import sys
import random
import time
import json
from ..container_manager import (
create_model_container_label, parse_model_container_label,
ContainerManager, CLIPPER_DOCKER_LABEL, CLIPPER_MODEL_CONTAINER_LABEL,
CLIPPER_QUERY_FRONTEND_CONTAINER_LABEL,
CLIPPER_MGMT_FRONTEND_CONTAINER_LABEL, CLIPPER_INTERNAL_RPC_PORT,
CLIPPER_INTERNAL_QUERY_PORT, CLIPPER_INTERNAL_MANAGEMENT_PORT,
CLIPPER_INTERNAL_METRIC_PORT)
from ..exceptions import ClipperException
from requests.exceptions import ConnectionError
from .docker_metric_utils import *
logger = logging.getLogger(__name__)
class DockerContainerManager(ContainerManager):
def __init__(self,
docker_ip_address="localhost",
clipper_query_port=1337,
clipper_management_port=1338,
clipper_rpc_port=7000,
redis_ip=None,
redis_port=6379,
prometheus_port=9090,
docker_network="clipper_network",
extra_container_kwargs={}):
"""
Parameters
----------
docker_ip_address : str, optional
The public hostname or IP address at which the Clipper Docker
containers can be accessed via their exposed ports. This should almost always
be "localhost". Only change if you know what you're doing!
clipper_query_port : int, optional
The port on which the query frontend should listen for incoming prediction requests.
clipper_management_port : int, optional
The port on which the management frontend should expose the management REST API.
clipper_rpc_port : int, optional
The port to start the Clipper RPC service on.
redis_ip : str, optional
The address of a running Redis cluster. If set to None, Clipper will start
a Redis container for you.
redis_port : int, optional
The Redis port. If ``redis_ip`` is set to None, Clipper will start Redis on this port.
If ``redis_ip`` is provided, Clipper will connect to Redis on this port.
docker_network : str, optional
The docker network to attach the containers to. You can read more about Docker
networking in the
`Docker User Guide <https://docs.docker.com/engine/userguide/networking/>`_.
extra_container_kwargs : dict
Any additional keyword arguments to pass to the call to
:py:meth:`docker.client.containers.run`.
"""
self.public_hostname = docker_ip_address
self.clipper_query_port = clipper_query_port
self.clipper_management_port = clipper_management_port
self.clipper_rpc_port = clipper_rpc_port
self.redis_ip = redis_ip
if redis_ip is None:
self.external_redis = False
else:
self.external_redis = True
self.redis_port = redis_port
self.prometheus_port = prometheus_port
if docker_network is "host":
raise ClipperException(
"DockerContainerManager does not support running Clipper on the "
"\"host\" docker network. Please pick a different network name"
)
self.docker_network = docker_network
self.docker_client = docker.from_env()
self.extra_container_kwargs = extra_container_kwargs.copy()
# Merge Clipper-specific labels with any user-provided labels
if "labels" in self.extra_container_kwargs:
self.common_labels = self.extra_container_kwargs.pop("labels")
self.common_labels.update({CLIPPER_DOCKER_LABEL: ""})
else:
self.common_labels = {CLIPPER_DOCKER_LABEL: ""}
container_args = {
"network": self.docker_network,
"detach": True,
}
self.extra_container_kwargs.update(container_args)
def start_clipper(self, query_frontend_image, mgmt_frontend_image,
cache_size):
try:
self.docker_client.networks.create(
self.docker_network, check_duplicate=True)
except docker.errors.APIError:
logger.debug(
"{nw} network already exists".format(nw=self.docker_network))
except ConnectionError:
msg = "Unable to Connect to Docker. Please Check if Docker is running."
raise ClipperException(msg)
if not self.external_redis:
logger.info("Starting managed Redis instance in Docker")
redis_container = self.docker_client.containers.run(
'redis:alpine',
"redis-server --port %s" % self.redis_port,
name="redis-{}".format(random.randint(
0, 100000)), # generate a random name
ports={'%s/tcp' % self.redis_port: self.redis_port},
labels=self.common_labels.copy(),
**self.extra_container_kwargs)
self.redis_ip = redis_container.name
mgmt_cmd = "--redis_ip={redis_ip} --redis_port={redis_port}".format(
redis_ip=self.redis_ip, redis_port=self.redis_port)
mgmt_labels = self.common_labels.copy()
mgmt_labels[CLIPPER_MGMT_FRONTEND_CONTAINER_LABEL] = ""
self.docker_client.containers.run(
mgmt_frontend_image,
mgmt_cmd,
name="mgmt_frontend-{}".format(random.randint(
0, 100000)), # generate a random name
ports={
'%s/tcp' % CLIPPER_INTERNAL_MANAGEMENT_PORT:
self.clipper_management_port
},
labels=mgmt_labels,
**self.extra_container_kwargs)
query_cmd = ("--redis_ip={redis_ip} --redis_port={redis_port} "
"--prediction_cache_size={cache_size}").format(
redis_ip=self.redis_ip,
redis_port=self.redis_port,
cache_size=cache_size)
query_labels = self.common_labels.copy()
query_labels[CLIPPER_QUERY_FRONTEND_CONTAINER_LABEL] = ""
query_container_id = random.randint(0, 100000)
query_name = "query_frontend-{}".format(query_container_id)
self.docker_client.containers.run(
query_frontend_image,
query_cmd,
name=query_name,
ports={
'%s/tcp' % CLIPPER_INTERNAL_QUERY_PORT:
self.clipper_query_port,
'%s/tcp' % CLIPPER_INTERNAL_RPC_PORT: self.clipper_rpc_port
},
labels=query_labels,
**self.extra_container_kwargs)
# Metric Section
query_frontend_metric_name = "query_frontend_exporter-{}".format(
query_container_id)
run_query_frontend_metric_image(
query_frontend_metric_name, self.docker_client, query_name,
self.common_labels, self.extra_container_kwargs)
setup_metric_config(query_frontend_metric_name,
CLIPPER_INTERNAL_METRIC_PORT)
run_metric_image(self.docker_client, self.common_labels,
self.prometheus_port, self.extra_container_kwargs)
self.connect()
def connect(self):
# No extra connection steps to take on connection
return
def deploy_model(self, name, version, input_type, image, num_replicas=1):
# Parameters
# ----------
# image : str
# The fully specified Docker imagesitory to deploy. If using a custom
# registry, the registry name must be prepended to the image. For example,
# "localhost:5000/my_model_name:my_model_version" or
# "quay.io/my_namespace/my_model_name:my_model_version"
self.set_num_replicas(name, version, input_type, image, num_replicas)
def _get_replicas(self, name, version):
containers = self.docker_client.containers.list(
filters={
"label":
"{key}={val}".format(
key=CLIPPER_MODEL_CONTAINER_LABEL,
val=create_model_container_label(name, version))
})
return containers
def get_num_replicas(self, name, version):
return len(self._get_replicas(name, version))
def _add_replica(self, name, version, input_type, image):
containers = self.docker_client.containers.list(
filters={
"label": CLIPPER_QUERY_FRONTEND_CONTAINER_LABEL
})
if len(containers) < 1:
logger.warning("No Clipper query frontend found.")
raise ClipperException(
"No Clipper query frontend to attach model container to")
query_frontend_hostname = containers[0].name
env_vars = {
"CLIPPER_MODEL_NAME": name,
"CLIPPER_MODEL_VERSION": version,
# NOTE: assumes this container being launched on same machine
# in same docker network as the query frontend
"CLIPPER_IP": query_frontend_hostname,
"CLIPPER_INPUT_TYPE": input_type,
}
model_container_label = create_model_container_label(name, version)
labels = self.common_labels.copy()
labels[CLIPPER_MODEL_CONTAINER_LABEL] = model_container_label
model_container_name = model_container_label + '-{}'.format(
random.randint(0, 100000))
self.docker_client.containers.run(
image,
name=model_container_name,
environment=env_vars,
labels=labels,
**self.extra_container_kwargs)
# Metric Section
add_to_metric_config(model_container_name,
CLIPPER_INTERNAL_METRIC_PORT)
# Return model_container_name so we can check if it's up and running later
return model_container_name
def set_num_replicas(self, name, version, input_type, image, num_replicas):
current_replicas = self._get_replicas(name, version)
if len(current_replicas) < num_replicas:
num_missing = num_replicas - len(current_replicas)
logger.info(
"Found {cur} replicas for {name}:{version}. Adding {missing}".
format(
cur=len(current_replicas),
name=name,
version=version,
missing=(num_missing)))
model_container_names = []
for _ in range(num_missing):
container_name = self._add_replica(name, version, input_type,
image)
model_container_names.append(container_name)
for name in model_container_names:
container = self.docker_client.containers.get(name)
while container.attrs.get("State").get("Status") != "running" or \
self.docker_client.api.inspect_container(name).get("State").get("Health").get("Status") != "healthy":
time.sleep(3)
elif len(current_replicas) > num_replicas:
num_extra = len(current_replicas) - num_replicas
logger.info(
"Found {cur} replicas for {name}:{version}. Removing {extra}".
format(
cur=len(current_replicas),
name=name,
version=version,
extra=(num_extra)))
while len(current_replicas) > num_replicas:
cur_container = current_replicas.pop()
cur_container.stop()
# Metric Section
delete_from_metric_config(cur_container.name)
def get_logs(self, logging_dir):
containers = self.docker_client.containers.list(
filters={
"label": CLIPPER_DOCKER_LABEL
})
logging_dir = os.path.abspath(os.path.expanduser(logging_dir))
log_files = []
if not os.path.exists(logging_dir):
os.makedirs(logging_dir)
logger.info("Created logging directory: %s" % logging_dir)
for c in containers:
log_file_name = "image_{image}:container_{id}.log".format(
image=c.image.short_id, id=c.short_id)
log_file = os.path.join(logging_dir, log_file_name)
if sys.version_info < (3, 0):
with open(log_file, "w") as lf:
lf.write(c.logs(stdout=True, stderr=True))
else:
with open(log_file, "wb") as lf:
lf.write(c.logs(stdout=True, stderr=True))
log_files.append(log_file)
return log_files
def stop_models(self, models):
containers = self.docker_client.containers.list(
filters={
"label": CLIPPER_MODEL_CONTAINER_LABEL
})
for c in containers:
c_name, c_version = parse_model_container_label(
c.labels[CLIPPER_MODEL_CONTAINER_LABEL])
if c_name in models and c_version in models[c_name]:
c.stop()
def stop_all_model_containers(self):
containers = self.docker_client.containers.list(
filters={
"label": CLIPPER_MODEL_CONTAINER_LABEL
})
for c in containers:
c.stop()
def stop_all(self):
containers = self.docker_client.containers.list(
filters={
"label": CLIPPER_DOCKER_LABEL
})
for c in containers:
c.stop()
def get_admin_addr(self):
return "{host}:{port}".format(
host=self.public_hostname, port=self.clipper_management_port)
def get_query_addr(self):
return "{host}:{port}".format(
host=self.public_hostname, port=self.clipper_query_port)
| {
"repo_name": "dcrankshaw/clipper",
"path": "clipper_admin/clipper_admin/docker/docker_container_manager.py",
"copies": "1",
"size": "13987",
"license": "apache-2.0",
"hash": -8323022854963453000,
"line_mean": 41.1295180723,
"line_max": 125,
"alpha_frac": 0.5790376778,
"autogenerated": false,
"ratio": 4.214221150949081,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005047595322145403,
"num_lines": 332
} |
from __future__ import absolute_import, division, print_function
import errno
import logging
import math
import socket
import scapy.config
import scapy.layers.l2
import scapy.route
from scapy.all import *
from lan_sync_controller.config_loader import SETTINGS
LOG = logging.getLogger(__name__)
def long2net(arg):
"""Convert long to netmask"""
if (arg <= 0 or arg >= 0xFFFFFFFF):
raise ValueError("illegal netmask value", hex(arg))
return 32 - int(round(math.log(0xFFFFFFFF - arg, 2)))
def to_CIDR_notation(bytes_network, bytes_netmask):
network = scapy.utils.ltoa(bytes_network)
netmask = long2net(bytes_netmask)
net = "%s/%s" % (network, netmask)
if netmask < 16:
LOG.warning("%s is too big. skipping" % net)
return None
return net
def scan_and_get_neighbors(net, interface, timeout=1):
"""Get list interfaces, then scan in each network
and get available neighbors. Actually, it will ping`
to each ip in network, then wait for reply (received packet)
:param net: (string)
:param interface: (string)
:param timeout(integer)
"""
LOG.info('arping %s on %s' % (net, interface))
try:
ans, unans = scapy.layers.l2.arping(net, iface=interface,
timeout=timeout,
verbose=True)
neighbors = []
for s, r in ans.res:
neighbors.append(r.sprintf('%ARP.psrc%'))
return neighbors
except socket.error as e:
if e.errno == errno.EPERM:
LOG.error('%s. Did you run as root?' % (e.strerror))
else:
raise
def scan_udp_port(dst_ip, dst_port, dst_timeout=1):
"""Scan UDP port with specific ip address and port
This host run code will be source host, define destination
host and port that you want to scan.
:param dst_ip: (string) destination ip address
:param dst_port: (integer) specific port
:param dst_timeout: (integer)
"""
udp_scan_resp = sr1(IP(dst=dst_ip) / UDP(dport=dst_port),
timeout=dst_timeout)
if str(type(udp_scan_resp)) == "<type 'NoneType'>":
retrans = []
for count in range(0, 3):
retrans.append(sr1(IP(dst=dst_ip) / UDP(dport=dst_port),
timeout=dst_timeout))
for item in retrans:
if str(type(item)) != "<type 'NoneType'>":
scan_udp_port(dst_ip, dst_port, dst_timeout)
return 'Open|Filtered'
elif udp_scan_resp.haslayer(UDP):
return 'Open'
elif udp_scan_resp.haslayer(ICMP):
if int(udp_scan_resp.getlayer(ICMP).type) == 3 and \
int(udp_scan_resp.getlayer(ICMP).code) == 3:
return 'Closed'
elif int(udp_scan_resp.getlayer(ICMP).type) == 3 and \
int(udp_scan_resp.getlayer(ICMP).code) in [1, 2, 9, 10, 13]:
return 'Filtered'
else:
return 'CHECK'
class NeighborsDetector(object):
def __init__(self):
self.port = SETTINGS['default-port']
def get_all_neighbors(self):
"""Get All Available Neighbors in LAN"""
result = {}
for network, netmask, _, interface, address in \
scapy.config.conf.route.routes:
# skip loopback network and default gw
if network == 0 or interface == 'lo' or \
address == '127.0.0.1' or address == '0.0.0.0':
continue
if netmask <= 0 or netmask == 0xFFFFFFFF:
continue
net = to_CIDR_notation(network, netmask)
if interface != scapy.config.conf.iface:
msg = ('Skipping %s because scapy currently doesn\'t\
support arping on non-primary network \
interfaces' % net)
LOG.warning(msg)
continue
if net:
result[interface] = scan_and_get_neighbors(net, interface)
return result
def detect_valid_hosts(self):
"""Detect valid host, which open a given port"""
neighbors = self.get_all_neighbors()
valid_host = []
for neighbor in neighbors.values():
for _n_ip in neighbor:
# If the given host opens port, get it.
if 'Open' in scan_udp_port(_n_ip, self.port):
LOG.info('Valid Host was founded: %s' % _n_ip)
valid_host.append(_n_ip)
return valid_host
| {
"repo_name": "KDBk/lan_sync_controller",
"path": "lan_sync_controller/discovery.py",
"copies": "2",
"size": "4551",
"license": "mit",
"hash": -37818799514327320,
"line_mean": 33.2180451128,
"line_max": 76,
"alpha_frac": 0.5647110525,
"autogenerated": false,
"ratio": 3.8147527242246437,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5379463776724643,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import falcon
import json
import Queue
import sys
import logging
from process import Processor
#sys.path.append('.')
# logging.basicConfig(level=logging.DEBUG,
# format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
# datefmt='%d %b %Y %H:%M:%S')
logging.basicConfig(filename='log_restapi.log', filemode='w',
level=logging.DEBUG,
format='[%(levelname)s] %(message)s [%(filename)s][line:%(lineno)d] %(asctime)s ',
datefmt='%d %b %Y %H:%M:%S')
class EventListener:
"""
A queue to store the comming message, and be passed to processor for
processing.
"""
def __init__(self):
self.queue = Queue.Queue()
self.processor = Processor('catch_upload processor', self.queue)
self.processor.start()
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
resp.body = ('\n Congratulations! You GET /event successfully!\n\n')
def on_post(self, req, resp):
try:
raw_json = req.stream.read()
logging.debug('req:%s' % raw_json)
except:
raise falcon.HTTPBadRequest('bad req',
'when read from req, please check if the req is correct.')
try:
result_json = json.loads(raw_json, encoding='utf-8')
logging.debug('result json:%s' % result_json)
logging.info('start to run process....')
self.queue.put(result_json)
except:
raise falcon.HTTPError(falcon.HTTP_400, 'malformed json')
resp.status = falcon.HTTP_202
resp.body = json.dumps(result_json, encoding='utf-8')
app = falcon.API()
event_listener = EventListener()
app.add_route('/event', event_listener)
| {
"repo_name": "ddxgz/smartcity-demo",
"path": "surveillance/restapi.py",
"copies": "1",
"size": "1844",
"license": "mit",
"hash": -4525888911884386300,
"line_mean": 31.9285714286,
"line_max": 98,
"alpha_frac": 0.5959869848,
"autogenerated": false,
"ratio": 3.6587301587301586,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9728252849097114,
"avg_score": 0.005292858886608887,
"num_lines": 56
} |
from __future__ import absolute_import, division, print_function
import functools
from guitool_ibeis.__PYQT__ import QtCore, QtGui # NOQA
from guitool_ibeis.__PYQT__.QtCore import Qt # NOQA
import utool as ut
from utool._internal import meta_util_six
ut.noinject(__name__, '[guitool_ibeis.decorators]', DEBUG=False)
DEBUG = False
signal_ = QtCore.pyqtSignal
# SLOT DECORATOR
def slot_(*types): # This is called at wrap time to get args
"""
wrapper around pyqtslot decorator keep original function info
"""
def pyqtSlotWrapper(func):
#printDBG('[GUITOOL._SLOT] Wrapping: %r' % func.__name__)
funcname = meta_util_six.get_funcname(func)
@QtCore.pyqtSlot(*types, name=funcname)
@ut.ignores_exc_tb
def slot_wrapper(self, *args, **kwargs):
result = func(self, *args, **kwargs)
return result
slot_wrapper = functools.update_wrapper(slot_wrapper, func)
return slot_wrapper
return pyqtSlotWrapper
def checks_qt_error(func):
"""
Decorator which reports qt errors which would otherwise be silent Useful if
we haven't overriden sys.excepthook but we have, so this isnt useful.
"""
@functools.wraps(func)
def checkqterr_wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
except Exception as ex:
funcname = meta_util_six.get_funcname(func)
msg = 'caught exception in %r' % (funcname,)
ut.printex(ex, msg, tb=True, pad_stdout=True)
raise
return result
return checkqterr_wrapper
| {
"repo_name": "Erotemic/guitool",
"path": "guitool_ibeis/guitool_decorators.py",
"copies": "1",
"size": "1613",
"license": "apache-2.0",
"hash": 1356741661874354700,
"line_mean": 32.6041666667,
"line_max": 79,
"alpha_frac": 0.6460012399,
"autogenerated": false,
"ratio": 3.665909090909091,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9808161909091919,
"avg_score": 0.0007496843434343434,
"num_lines": 48
} |
from __future__ import (absolute_import, division, print_function)
import glob
import os
import datetime
from addie.utilities.file_handler import FileHandler
class LogbookHandler(object):
last_files = []
def __init__(self, parent=None, max_number_of_log_files=10):
self.parent = parent
self.max_number_of_log_files = max_number_of_log_files
self.retrieve_log_files()
self.display_log_files()
def retrieve_log_files(self):
_number_of_log_files = self.max_number_of_log_files
# get list of files that start by log
list_log_files = glob.glob(self.parent.current_folder + "/logs/log.*")
if list_log_files == []:
return
# sort files by time stamp
list_log_files.sort(key=lambda x: os.path.getmtime(x))
# last x files
if len(list_log_files) > _number_of_log_files:
self.last_files = list_log_files[-_number_of_log_files: -1]
else:
self.last_files = list_log_files
def _get_text(self, filename=None):
_file_handler = FileHandler(filename=filename)
_file_handler.retrieve_contain()
return _file_handler.file_contain
def display_log_files(self):
if self.parent.job_monitor_interface.ui.pause_refresh_logbook.isChecked():
return
list_files = self.last_files[::-1]
if self.parent.previous_list_of_log_files == []:
self.parent.previous_list_of_log_files = list_files
else:
if self.parent.previous_list_of_log_files == list_files:
return
if len(list_files) > 0:
for _index, _file in enumerate(list_files):
_title = 'LOG FILE => {}'.format(_file)
_text = self._get_text(filename=_file)
_end = '#####################'
if _index == 0:
self.parent.job_monitor_interface.ui.logbook_text.setText(_title)
else:
self.parent.job_monitor_interface.ui.logbook_text.append(_title)
self.parent.job_monitor_interface.ui.logbook_text.append(_text)
self.parent.job_monitor_interface.ui.logbook_text.append(_end)
_time_format = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.parent.job_monitor_interface.ui.last_logbook_time.setText(_time_format)
else:
_time = str(datetime.datetime.now())
self.parent.job_monitor_interface.ui.logbook_text.setText("{}: No Log Files Located !".format(_time))
| {
"repo_name": "neutrons/FastGR",
"path": "addie/utilities/logbook_handler.py",
"copies": "1",
"size": "2589",
"license": "mit",
"hash": 1177743869549419300,
"line_mean": 34.4657534247,
"line_max": 113,
"alpha_frac": 0.5901892623,
"autogenerated": false,
"ratio": 3.6671388101983005,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47573280724983,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import hashlib
import json
import logging
import subprocess
import tempfile
import time
import requests
from requests.utils import urlparse
__all__ = ['Kubernetes', "get_endpoint"]
logger = logging.getLogger(__name__)
resource_endpoints = {
"daemonsets":
"apis/extensions/v1beta1/namespaces/{namespace}/daemonsets",
"deployments":
"apis/extensions/v1beta1/namespaces/{namespace}/deployments",
"horizontalpodautoscalers":
"apis/extensions/v1beta1/namespaces/{namespace}/horizontalpodautoscalers",
"ingresses":
"apis/extensions/v1beta1/namespaces/{namespace}/ingresses",
"jobs":
"apis/extensions/v1beta1/namespaces/{namespace}/jobs",
"namespaces":
"api/v1/namespaces",
"replicasets":
"apis/extensions/v1beta1/namespaces/{namespace}/replicasets",
"persistentvolumes":
"api/v1/namespaces/{namespace}/persistentvolumes",
"persistentvolumeclaims":
"api/v1/namespaces/{namespace}/persistentvolumeclaims",
"services":
"api/v1/namespaces/{namespace}/services",
"serviceaccounts":
"api/v1/namespaces/{namespace}/serviceaccounts",
"secrets":
"api/v1/namespaces/{namespace}/secrets",
"configmaps":
"api/v1/namespaces/{namespace}/configmaps",
"replicationcontrollers":
"api/v1/namespaces/{namespace}/replicationcontrollers",
"pods":
"api/v1/namespaces/{namespace}/pods",
"statefulset":
"apis/apps/v1beta1/namespaces/{namespace}/statefulsets",
"storageclass":
"apis/storage.k8s.io/v1beta1/statefulsets", }
resources_alias = {
"ds": "daemonsets",
"hpa": "horizontalpodautoscalers",
"ing": "ingresses",
"ingress": "ingresses",
"ns": "namespaces",
"sc": "storageclasses",
"sfs": "statefulsets",
"po": "pods",
"pv": "persistentvolumes",
"pvc": "persistentvolumeclaims",
"rc": "replicationcontrollers",
"svc": "services"}
ANNOTATIONS = {
'protected': 'resource.appr/protected',
'hash': 'resource.appr/hash',
'version': 'package.appr/version',
'parent': 'package.appr/parent',
'rand': 'resource.appr/rand',
'update-mode': 'resource.appr/update-mode',
'package': 'package.appr/package'}
def get_endpoint(kind):
name = None
if kind in resource_endpoints:
name = kind
elif kind in resources_alias:
name = resources_alias[kind]
elif kind + "s" in resource_endpoints:
name = kind + "s"
else:
return 'unknown'
return resource_endpoints[name]
class Kubernetes(object):
def __init__(self, namespace=None, endpoint=None, body=None, proxy=None):
self.proxy = None
if endpoint is not None and endpoint[0] == "/":
endpoint = endpoint[1:-1]
self.endpoint = endpoint
self.body = body
self.obj = None
self.protected = False
self._resource_load()
self.kind = self.obj['kind'].lower()
self.name = self.obj['metadata']['name']
self.force_rotate = ANNOTATIONS['rand'] in self.obj['metadata'].get('annotations', {})
self.namespace = self._namespace(namespace)
self.result = None
if proxy:
self.proxy = urlparse(proxy)
def _resource_load(self):
self.obj = json.loads(self.body)
if 'annotations' in self.obj['metadata']:
if (ANNOTATIONS['protected'] in self.obj['metadata']['annotations'] and
self.obj['metadata']['annotations'][ANNOTATIONS['protected']] == 'true'):
self.protected = True
def _gethash(self, src):
# Copy rand value
if (src is not None and ANNOTATIONS['rand'] in src['metadata'].get('annotations', {}) and
ANNOTATIONS['rand'] not in self.obj['metadata']['annotations']):
self.obj['metadata']['annotations'][ANNOTATIONS['rand']] = src['metadata'][
'annotations'][ANNOTATIONS['rand']]
# TODO(ant31) it should hash before the custom annotations
if ANNOTATIONS['hash'] in self.obj['metadata'].get('annotations', {}):
if self.obj['metadata']['annotations'][ANNOTATIONS['hash']] is None:
sha = hashlib.sha256(json.dumps(self.obj, sort_keys=True)).hexdigest()
self.obj['metadata']['annotations'][ANNOTATIONS['hash']] = sha
return self.obj['metadata']['annotations'][ANNOTATIONS['hash']]
else:
return None
def _namespace(self, namespace=None):
if namespace:
return namespace
elif 'namespace' in self.obj['metadata']:
return self.obj['metadata']['namespace']
else:
return 'default'
def create(self, force=False, dry=False, strategy='update'):
"""
- Check if resource name exists
- if it exists check if the apprhash is the same
- if not the same delete the resource and recreate it
- if force == true, delete the resource and recreate it
- if doesnt exists create it
"""
force = force or self.force_rotate
r = self.get()
if r is not None:
rhash = r['metadata'].get('annotations', {}).get(ANNOTATIONS['hash'], None)
objhash = self._gethash(r)
f = tempfile.NamedTemporaryFile()
method = "apply"
if self.proxy:
method = "create"
strategy = "replace"
cmd = [method, '-f', f.name]
f.write(json.dumps(self.obj))
f.flush()
if r is None:
self._call(cmd, dry=dry)
return 'created'
elif (objhash is None or rhash == objhash) and force is False:
return 'ok'
elif rhash != objhash or force is True:
if self.protected:
return 'protected'
if strategy == 'replace':
self.delete(dry=dry)
action = "replaced"
elif strategy == "update":
action = "updated"
else:
raise ValueError("Unknown action %s" % action)
self._call(cmd, dry=dry)
return action
def get(self):
cmd = ['get', self.kind, self.name, '-o', 'json']
try:
self.result = json.loads(self._call(cmd))
return self.result
except RuntimeError:
return None
except (requests.exceptions.HTTPError) as e:
if e.response.status_code == 404:
return None
else:
raise e
def delete(self, dry=False, **kwargs):
cmd = ['delete', self.kind, self.name]
if self.protected:
return 'protected'
r = self.get()
if r is not None:
self._call(cmd, dry=dry)
return 'deleted'
else:
return 'absent'
def wait(self, retries=3, seconds=1):
r = 1
time.sleep(seconds)
obj = self.get()
while (r < retries and obj is None):
r += 1
time.sleep(seconds)
obj = self.get()
return obj
def exists(self):
r = self.get()
if r is None:
return False
else:
return True
def _call(self, cmd, dry=False):
command = ['kubectl'] + cmd + ["--namespace", self.namespace]
if not dry:
if self.proxy is not None:
return self._request(cmd[0])
else:
try:
return subprocess.check_output(command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
raise RuntimeError("Kubernetes failed to create %s (%s): "
"%s" % (self.name, self.kind, e.output))
else:
return True
def _request(self, method):
if method == 'create':
headers = {'Content-Type': 'application/json'}
method = 'post'
url = "%s/%s" % (self.proxy.geturl(), self.endpoint)
return requests.post(url, data=self.body, headers=headers)
else:
url = "%s/%s/%s" % (self.proxy.geturl(), self.endpoint, self.name)
query = getattr(requests, method)
r = query(url)
r.raise_for_status()
return r.content
| {
"repo_name": "app-registry/appr",
"path": "appr/platforms/kubernetes.py",
"copies": "2",
"size": "8429",
"license": "apache-2.0",
"hash": 4946481432892303000,
"line_mean": 33.2642276423,
"line_max": 97,
"alpha_frac": 0.5682761893,
"autogenerated": false,
"ratio": 4.089762251334304,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005116359372869304,
"num_lines": 246
} |
from __future__ import absolute_import, division, print_function
import inspect
from functools import wraps
from collections import OrderedDict
from contextlib import contextmanager
import copy
import tensorflow as tf
from tfutils.crossdevice_batchnorm import crossgpu_batch_norm, CRTPUBatchNormalization
import numpy as np
def initializer(kind='xavier', *args, **kwargs):
if kind == 'xavier':
init = tf.contrib.layers.xavier_initializer(*args, **kwargs)
elif kind == 'normal':
init = normal_initializer
else:
init = getattr(tf, kind + '_initializer')(*args, **kwargs)
return init
def normal_initializer(shape, dtype=None, partition_info=None):
'''
Used for EfficientNets
'''
H, W, _, C_out = shape
fan_out = int(H * W * C_out)
return tf.random_normal(
shape, mean=0.0, stddev=np.sqrt(2.0 / fan_out), dtype=dtype)
def groupnorm(inputs, G=32, data_format='channels_last', weight_decay=0.0, epsilon=1e-5, trainable=True, gamma_init=1, beta_init=0):
'''
Like LayerNorm, z-scores features along the channel dimension only.
However, it only normalizes within G groups of C/G channels each.
Optionally applies learnable scale/shift parameters.
'''
assert len(inputs.shape.as_list()) == 4, "Applies only to conv2D layers"
if data_format == 'channels_first':
inputs = tf.transpose(inputs, [0,2,3,1])
elif data_format == 'channels_last':
pass
else:
raise ValueError("data_format must be 'channels_first' or 'channels_last'")
B,H,W,C = inputs.shape.as_list()
assert C % G == 0, "num groups G must divide C"
CpG = C // G
inputs = tf.reshape(inputs, [B,H,W,CpG,G])
mean, var = tf.nn.moments(inputs, axes=[1,2,3], keep_dims=True)
inputs = tf.div(inputs - mean, tf.sqrt(var + epsilon))
inputs = tf.reshape(inputs, [B,H,W,C])
if trainable:
gamma = tf.get_variable("groupnorm_scale", shape=[1,1,1,C], dtype=tf.float32,
initializer=initializer("constant", float(gamma_init)))
# regularizer=tf.contrib.layers.l2_regularizer(weight_decay))
beta = tf.get_variable("groupnorm_shift", shape=[1,1,1,C], dtype=tf.float32,
initializer=initializer("constant", float(beta_init)))
# regularizer=tf.contrib.layers.l2_regularizer(weight_decay))
else:
gamma = tf.constant(gamma_init, dtype=tf.float32)
beta = tf.constant(beta_init, dtype=tf.float32)
inputs = gamma*inputs + beta
if data_format == 'channels_first':
inputs = tf.transpose(inputs, [0,3,1,2])
print("applied group norm to", inputs.name.split('/')[:-1])
return inputs
def batchnorm_corr(inputs, is_training, data_format='channels_last',
decay = 0.9, epsilon = 1e-5, init_zero=None, constant_init=None,
activation=None, time_suffix=None, bn_trainable=True,
use_crossgpu_bn=False, num_dev=None, use_crtpu_bn=False):
if time_suffix is not None:
bn_op_name = "post_conv_BN_" + time_suffix
reuse_flag = tf.AUTO_REUSE # create bn variables per timestep if they do not exist
else:
bn_op_name = "post_conv_BN"
reuse_flag = None
# if activation is none, should use zeros; else ones
if constant_init is None:
if init_zero is None:
init_zero = True if activation is None else False
if init_zero:
gamma_init = tf.zeros_initializer()
else:
gamma_init = tf.ones_initializer()
else:
gamma_init = tf.constant_initializer(constant_init)
if use_crossgpu_bn:
output = crossgpu_batch_norm(inputs=inputs,
decay=decay,
epsilon=epsilon,
is_training=is_training,
data_format=data_format,
trainable=bn_trainable,
gamma_initializer=gamma_init,
scope=bn_op_name,
reuse=reuse_flag,
num_dev=num_dev)
elif use_crtpu_bn:
axis = 1 if data_format == 'channels_first' else 3
crtpu_bn_func = CRTPUBatchNormalization(axis=axis,
momentum=decay,
epsilon=epsilon,
center=True,
scale=True,
trainable=bn_trainable,
gamma_initializer=gamma_init,
name=bn_op_name,
_reuse=reuse_flag,
_scope=bn_op_name)
output = crtpu_bn_func(inputs, training=is_training)
else:
axis = 1 if data_format == 'channels_first' else 3
output = tf.layers.batch_normalization(inputs=inputs,
axis=axis,
momentum=decay,
epsilon=epsilon,
center=True,
scale=True,
training=is_training,
trainable=bn_trainable,
fused=True,
gamma_initializer=gamma_init,
name=bn_op_name,
reuse=reuse_flag)
return output
def conv(inp,
out_depth,
ksize=[3,3],
strides=[1,1,1,1],
data_format='channels_last',
padding='SAME',
kernel_init='xavier',
kernel_init_kwargs=None,
use_bias=True,
bias=0,
weight_decay=None,
activation='relu',
batch_norm=False,
group_norm=False,
num_groups=32,
is_training=False,
batch_norm_decay=0.9,
batch_norm_epsilon=1e-5,
batch_norm_gamma_init=None,
init_zero=None,
dropout=None,
dropout_seed=0,
time_sep=False,
time_suffix=None,
bn_trainable=True,
crossdevice_bn_kwargs={},
name='conv'
):
# assert out_shape is not None
if time_sep:
assert time_suffix is not None
if batch_norm or group_norm:
use_bias = False
if weight_decay is None:
weight_decay = 0.
if isinstance(ksize, int):
ksize = [ksize, ksize]
if isinstance(strides, int):
strides = [1, strides, strides, 1]
if kernel_init_kwargs is None:
kernel_init_kwargs = {}
in_depth = inp.get_shape().as_list()[-1]
if out_depth is None:
out_depth = in_depth
# weights
init = initializer(kernel_init, **kernel_init_kwargs)
kernel = tf.get_variable(initializer=init,
shape=[ksize[0], ksize[1], in_depth, out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='weights')
if use_bias:
init = initializer(kind='constant', value=bias)
biases = tf.get_variable(initializer=init,
shape=[out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='bias')
# ops
if dropout is not None:
inp = tf.nn.dropout(inp, keep_prob=dropout, seed=dropout_seed, name='dropout')
conv = tf.nn.conv2d(inp, kernel,
strides=strides,
padding=padding)
if use_bias:
output = tf.nn.bias_add(conv, biases, name=name)
else:
output = tf.identity(conv, name=name)
if batch_norm:
output = batchnorm_corr(inputs=output,
is_training=is_training,
data_format=data_format,
decay = batch_norm_decay,
epsilon = batch_norm_epsilon,
constant_init=batch_norm_gamma_init,
init_zero=init_zero,
activation=activation,
time_suffix=time_suffix,
bn_trainable=bn_trainable,
**crossdevice_bn_kwargs)
elif group_norm:
output = groupnorm(inputs=output,
G=num_groups,
data_format=data_format,
weight_decay=weight_decay,
gamma_init=(0.0 if init_zero else 1.0),
epsilon=batch_norm_epsilon)
if activation is not None:
output = getattr(tf.nn, activation)(output, name=activation)
return output
def conv_bnf(inp,
out_depth,
ksize=[3,3],
strides=[1,1,1,1],
padding='SAME',
kernel_init='xavier',
kernel_init_kwargs=None,
bias=0,
weight_decay=None,
activation='relu6',
batch_norm=True,
is_training=True,
batch_norm_decay=0.9,
batch_norm_epsilon=1e-5,
init_zero=None,
data_format='channels_last',
time_sep=False,
time_suffix=None,
bn_trainable=True,
crossdevice_bn_kwargs={},
name='conv_bnf'
):
# assert out_shape is not None
if time_sep:
assert time_suffix is not None
if weight_decay is None:
weight_decay = 0.
if isinstance(ksize, int):
ksize = [ksize, ksize]
if isinstance(strides, int):
strides = [1, strides, strides, 1]
if kernel_init_kwargs is None:
kernel_init_kwargs = {}
in_depth = inp.get_shape().as_list()[-1]
# weights
init = initializer(kernel_init, **kernel_init_kwargs)
kernel = tf.get_variable(initializer=init,
shape=[ksize[0], ksize[1], in_depth, out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='weights')
# ops
conv = tf.nn.conv2d(inp, kernel,
strides=strides,
padding=padding)
if batch_norm:
# if activation is none, should use zeros; else ones
output = batchnorm_corr(inputs=output,
is_training=is_training,
data_format=data_format,
decay = batch_norm_decay,
epsilon = batch_norm_epsilon,
init_zero=init_zero,
activation=activation,
time_suffix=time_suffix,
bn_trainable=bn_trainable,
**crossdevice_bn_kwargs)
else:
init = initializer(kind='constant', value=bias)
biases = tf.get_variable(initializer=init,
shape=[out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='bias')
output = tf.nn.bias_add(conv, biases, name=name)
if activation is not None:
output = getattr(tf.nn, activation)(output, name=activation)
return output
def depthsep_conv(inp,
out_depth,
multiplier=1,
ksize=3,
strides=1,
dep_padding='SAME',
sep_padding='SAME',
batch_norm = True,
is_training=True,
name='depthsep_conv',
*args,
**kwargs
):
with tf.variable_scope('depthwise_conv'):
d_out = depth_conv(inp, multiplier = multiplier,
ksize = ksize,
strides = strides,
padding = dep_padding,
batch_norm = batch_norm,
is_training = is_training,
*args, **kwargs)
with tf.variable_scope('pointwise_conv'):
# we batch norm first according to mobilenet paper
p_out = conv_bnf(d_out, out_depth = out_depth,
ksize = 1,
strides = 1,
padding = sep_padding,
batch_norm = batch_norm,
is_training = is_training,
*args, **kwargs)
return p_out
def depth_conv(inp,
multiplier=1,
out_depth=None,
ksize=3,
strides=1,
padding='SAME',
kernel_init='xavier',
kernel_init_kwargs=None,
activation='relu6',
weight_decay=None,
batch_norm = False,
group_norm=False,
num_groups=32,
use_bias=False,
is_training=True,
batch_norm_decay=0.9,
batch_norm_epsilon=1e-5,
batch_norm_gamma_init=None,
init_zero=None,
data_format='channels_last',
time_sep=False,
time_suffix=None,
bn_trainable=True,
crossdevice_bn_kwargs={},
name='depth_conv'
):
# assert out_shape is not None
if time_sep:
assert time_suffix is not None
if weight_decay is None:
weight_decay = 0.
if isinstance(ksize, int):
ksize = [ksize, ksize]
if isinstance(strides, int):
strides = [1, strides, strides, 1]
if kernel_init_kwargs is None:
kernel_init_kwargs = {}
in_depth = inp.get_shape().as_list()[-1]
out_depth = multiplier * in_depth
# weights
init = initializer(kernel_init, **kernel_init_kwargs)
kernel = tf.get_variable(initializer=init,
shape=[ksize[0], ksize[1], in_depth, multiplier],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='weights')
output = tf.nn.depthwise_conv2d(inp, kernel,
strides=strides,
padding=padding)
if batch_norm:
output = batchnorm_corr(inputs=output,
is_training=is_training,
data_format=data_format,
decay = batch_norm_decay,
epsilon = batch_norm_epsilon,
constant_init=batch_norm_gamma_init,
init_zero=init_zero,
activation=activation,
time_suffix=time_suffix,
bn_trainable=bn_trainable,
**crossdevice_bn_kwargs)
elif group_norm:
output = groupnorm(inputs=output,
G=num_groups,
data_format=data_format,
weight_decay=weight_decay,
gamma_init=(0.0 if init_zero else 1.0),
epsilon=batch_norm_epsilon)
elif use_bias:
init = initializer(kind='constant', value=1.0)
biases = tf.get_variable(initializer=init,
shape=[out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='bias')
output = tf.nn.bias_add(output, biases, name=name)
if activation is not None:
output = getattr(tf.nn, activation)(output, name=activation)
return output
def fc(inp,
out_depth,
kernel_init='xavier',
kernel_init_kwargs=None,
use_bias=True,
bias=1,
weight_decay=None,
activation='relu',
batch_norm=False,
is_training=False,
batch_norm_decay=0.9,
batch_norm_epsilon=1e-5,
init_zero=None,
dropout=None,
dropout_seed=0,
time_sep=False,
time_suffix=None,
bn_trainable=True,
crossdevice_bn_kwargs={},
name='fc'):
if batch_norm:
use_bias = False
if weight_decay is None:
weight_decay = 0.
# assert out_shape is not None
if kernel_init_kwargs is None:
kernel_init_kwargs = {}
resh = tf.reshape(inp, [inp.get_shape().as_list()[0], -1], name='reshape')
in_depth = resh.get_shape().as_list()[-1]
# weights
init = initializer(kernel_init, **kernel_init_kwargs)
kernel = tf.get_variable(initializer=init,
shape=[in_depth, out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='weights')
if use_bias:
init = initializer(kind='constant', value=bias)
biases = tf.get_variable(initializer=init,
shape=[out_depth],
dtype=tf.float32,
regularizer=tf.contrib.layers.l2_regularizer(weight_decay),
name='bias')
# ops
if dropout is not None:
resh = tf.nn.dropout(resh, keep_prob=dropout, seed=dropout_seed, name='dropout')
fcm = tf.matmul(resh, kernel)
if use_bias:
output = tf.nn.bias_add(fcm, biases, name=name)
else:
output = tf.identity(fcm, name=name)
if activation is not None:
output = getattr(tf.nn, activation)(output, name=activation)
if batch_norm:
# if activation is none, should use zeros; else ones
if init_zero is None:
init_zero = True if activation is None else False
if init_zero:
gamma_init = tf.zeros_initializer()
else:
gamma_init = tf.ones_initializer()
if time_suffix is not None:
bn_op_name = "post_conv_BN_" + time_suffix
reuse_flag = tf.AUTO_REUSE # create bn variables per timestep if they do not exist
else:
bn_op_name = "post_conv_BN"
reuse_flag = None
use_crossgpu_bn = crossdevice_bn_kwargs.get('use_crossgpu_bn', False)
use_crtpu_bn = crossdevice_bn_kwargs.get('use_crtpu_bn', False)
if use_crossgpu_bn:
cg_bn_kw = copy.deepcopy(crossdevice_bn_kwargs)
cg_bn_kw.pop('use_crossgpu_bn', False)
cg_bn_kw.pop('use_crtpu_bn', False)
output = crossgpu_batch_norm(inputs=inputs,
decay=batch_norm_decay,
epsilon=batch_norm_epsilon,
training=is_training,
trainable=bn_trainable,
gamma_initializer=gamma_init,
scope=bn_op_name,
reuse=reuse_flag,
**cg_bn_kw)
elif use_crtpu_bn:
crtpu_bn_func = CRTPUBatchNormalization(axis=-1,
momentum=batch_norm_decay,
epsilon=batch_norm_epsilon,
center=True,
scale=True,
trainable=bn_trainable,
gamma_initializer=gamma_init,
name=bn_op_name,
_reuse=reuse_flag,
_scope=bn_op_name)
output = crtpu_bn_func(output, training=is_training)
else:
output = tf.layers.batch_normalization(inputs=output,
axis=-1,
momentum=batch_norm_decay,
epsilon=batch_norm_epsilon,
center=True,
scale=True,
training=is_training,
trainable=bn_trainable,
fused=True,
gamma_initializer=gamma_init,
name=bn_op_name,
reuse=reuse_flag)
return output
def global_pool(inp, kind='avg', keep_dims=False, name=None):
if kind not in ['max', 'avg']:
raise ValueError('Only global avg or max pool is allowed, but'
'you requested {}.'.format(kind))
if name is None:
name = 'global_{}_pool'.format(kind)
h, w = inp.get_shape().as_list()[1:3]
out = getattr(tf.nn, kind + '_pool')(inp,
ksize=[1,h,w,1],
strides=[1,1,1,1],
padding='VALID')
if keep_dims:
output = tf.identity(out, name=name)
else:
output = tf.reshape(out, [out.get_shape().as_list()[0], -1], name=name)
return output
def avg_pool2d(inp, kernel_size, stride=2, padding='VALID', name=None):
if name is None:
name = 'avg_pool2d'
output = tf.contrib.layers.avg_pool2d(inp, kernel_size=kernel_size, stride=stride, padding=padding)
return output
class ConvNet(object):
INTERNAL_FUNC = ['arg_scope', '_func_wrapper', '_val2list', 'layer',
'_reuse_scope_name', '__call__', '_get_func']
CUSTOM_FUNC = [conv, fc, global_pool, conv_bnf, depthsep_conv, depth_conv, avg_pool2d]
def __init__(self, defaults=None, name=None):
"""
A quick convolutional neural network constructor
This is wrapper over many tf.nn functions for a quick construction of
a standard convolutional neural network that uses 2d convolutions, pooling
and fully-connected layers, and most other tf.nn methods.
It also stores layers and their parameters easily accessible per
tfutils' approach of saving everything.
Kwargs:
- defaults
Default kwargs values for functions. Complimentary to `arg_scope
- name (default: '')
If '', then the existing scope is used.
"""
self._defaults = defaults if defaults is not None else {}
self.name = name
self.state = None
self.output = None
self._layer = None
self.layers = OrderedDict()
self.params = OrderedDict()
self._scope_initialized = False
def __getattribute__(self, attr):
attrs = object.__getattribute__(self, '__dict__')
internal_func = object.__getattribute__(self, 'INTERNAL_FUNC')
if attr in attrs: # is it an attribute?
return attrs[attr]
elif attr in internal_func: # is it one of the internal functions?
return object.__getattribute__(self, attr)
else:
func = self._get_func(attr)
return self._func_wrapper(func)
def _get_func(self, attr):
custom_func = object.__getattribute__(self, 'CUSTOM_FUNC')
custom_func_names = [f.__name__ for f in custom_func]
if attr in custom_func_names: # is it one of the custom functions?
func = custom_func[custom_func_names.index(attr)]
else:
func = getattr(tf.nn, attr) # ok, so it is a tf.nn function
return func
def _func_wrapper(self, func):
"""
A wrapper on top of *any* function that is called.
- Pops `inp` and `layer` from kwargs,
- All args are turned into kwargs
- Default values from arg_scope are set
- Sets the name in kwargs to func.__name__ if not specified
- Expands `strides` from an int or list inputs for
all functions and expands `ksize` for pool functions.
If `layer` is not None, a new scope is created, else the existing scope
is reused.
Finally, all params are stored.
"""
@wraps(func)
def wrapper(*args, **kwargs):
kwargs['func_name'] = func.__name__
# convert args to kwargs
varnames = inspect.getargspec(func).args
for i, arg in enumerate(args):
kwargs[varnames[i+1]] = arg # skip the first (inputs)
layer = kwargs.pop('layer', self._layer)
if layer not in self.params:
self.params[layer] = OrderedDict()
# update kwargs with default values defined by user
if func.__name__ in self._defaults:
kwargs.update(self._defaults[func.__name__])
if 'name' not in kwargs:
fname = func.__name__
if fname in self.params[layer]:
if fname in self.params[layer]:
i = 1
while fname + '_{}'.format(i) in self.params[layer]:
i += 1
fname += '_{}'.format(i)
kwargs['name'] = fname
spec = ['avg_pool', 'max_pool', 'max_pool_with_argmax']
if 'ksize' in kwargs and func.__name__ in spec:
kwargs['ksize'] = self._val2list(kwargs['ksize'])
if 'strides' in kwargs:
kwargs['strides'] = self._val2list(kwargs['strides'])
self.params[layer][kwargs['name']] = kwargs
return wrapper
def __call__(self, inp=None):
output = inp
for layer, params in self.params.items():
with tf.variable_scope(layer):
for func_name, kwargs in params.items():
with tf.variable_scope(func_name):
output = kwargs.get('inp', output)
if output is None:
raise ValueError('Layer {} function {} got None as input'.format(layer, func_name))
kw = {k:v for k,v in kwargs.items() if k not in ['func_name', 'inp']}
func = self._get_func(kwargs['func_name'])
output = tf.identity(func(output, **kw), name='output')
self.layers[layer] = tf.identity(output, name='output')
self.output = output
return output
def _val2list(self, value):
if isinstance(value, int):
out = [1, value, value, 1]
elif len(value) == 2:
out = [1, value[0], value[1], 1]
else:
out = value
return out
@contextmanager
def arg_scope(self, defaults):
"""
Sets the arg_scope.
Pass a dict of {<func_name>: {<arg_name>: <arg_value>, ...}, ...}. These
values will then override the default values for the specified functions
whenever that function is called.
"""
self._defaults = defaults
yield
self._defaults = {}
@contextmanager
def layer(self, name):
"""
Sets the scope. Can be used with `with`.
"""
if name is None or name == '':
raise ValueError('Layer name cannot be None or an empty string')
self._layer = name
yield
def _reuse_scope_name(self, name):
graph = tf.get_default_graph()
if graph._name_stack is not None and graph._name_stack != '':
name = graph._name_stack + '/' + name + '/' # this will reuse the already-created scope
else:
name += '/'
return name
def mnist(train=True, seed=0):
m = ConvNet()
with m.arg_scope({'fc': {'kernel_init': 'truncated_normal',
'kernel_init_kwargs': {'stddev': .01, 'seed': seed},
'dropout': None, 'batch_norm': False}}):
m.fc(128, layer='hidden1')
m.fc(32, layer='hidden2')
m.fc(10, activation=None, layer='softmax_linear')
return m
def alexnet(train=True, norm=True, seed=0, **kwargs):
defaults = {'conv': {'batch_norm': False,
'kernel_init': 'xavier',
'kernel_init_kwargs': {'seed': seed}},
'weight_decay': .0005,
'max_pool': {'padding': 'SAME'},
'fc': {'batch_norm': False,
'kernel_init': 'truncated_normal',
'kernel_init_kwargs': {'stddev': .01, 'seed': seed},
'weight_decay': .0005,
'dropout_seed': 0}}
m = ConvNet(defaults=defaults)
dropout = .5 if train else None
m.conv(96, 11, 4, padding='VALID', layer='conv1')
if norm:
m.lrn(depth_radius=5, bias=1, alpha=.0001, beta=.75, layer='conv1')
m.max_pool(3, 2, layer='conv1')
m.conv(256, 5, 1, layer='conv2')
if norm:
m.lrn(depth_radius=5, bias=1, alpha=.0001, beta=.75, layer='conv2')
m.max_pool(3, 2, layer='conv2')
m.conv(384, 3, 1, layer='conv3')
m.conv(384, 3, 1, layer='conv4')
m.conv(256, 3, 1, layer='conv5')
m.max_pool(3, 2, layer='conv5')
m.fc(4096, dropout=dropout, bias=.1, layer='fc6')
m.fc(4096, dropout=dropout, bias=.1, layer='fc7')
m.fc(1000, activation=None, dropout=None, bias=0, layer='fc8')
return m
def mnist_tfutils(inputs, train=True, **kwargs):
m = mnist(train=train)
return m(inputs['images']), m.params
def alexnet_tfutils(inputs, **kwargs):
m = alexnet(**kwargs)
return m(inputs['images']), m.params
| {
"repo_name": "neuroailab/tfutils",
"path": "tfutils/model_tool_old.py",
"copies": "1",
"size": "30720",
"license": "mit",
"hash": 3235465711150839000,
"line_mean": 37.0198019802,
"line_max": 132,
"alpha_frac": 0.4926106771,
"autogenerated": false,
"ratio": 4.328589544878118,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.009653807465298416,
"num_lines": 808
} |
from __future__ import absolute_import, division, print_function
import inspect
import pprint
import sys
import traceback
from inspect import CO_VARARGS, CO_VARKEYWORDS
import attr
import re
from weakref import ref
from _pytest.compat import _PY2, _PY3, PY35, safe_str
from six import text_type
import py
import six
builtin_repr = repr
if _PY3:
from traceback import format_exception_only
else:
from ._py2traceback import format_exception_only
class Code(object):
""" wrapper around Python code objects """
def __init__(self, rawcode):
if not hasattr(rawcode, "co_filename"):
rawcode = getrawcode(rawcode)
try:
self.filename = rawcode.co_filename
self.firstlineno = rawcode.co_firstlineno - 1
self.name = rawcode.co_name
except AttributeError:
raise TypeError("not a code object: %r" % (rawcode,))
self.raw = rawcode
def __eq__(self, other):
return self.raw == other.raw
__hash__ = None
def __ne__(self, other):
return not self == other
@property
def path(self):
""" return a path object pointing to source code (note that it
might not point to an actually existing file). """
try:
p = py.path.local(self.raw.co_filename)
# maybe don't try this checking
if not p.check():
raise OSError("py.path check failed.")
except OSError:
# XXX maybe try harder like the weird logic
# in the standard lib [linecache.updatecache] does?
p = self.raw.co_filename
return p
@property
def fullsource(self):
""" return a _pytest._code.Source object for the full source file of the code
"""
from _pytest._code import source
full, _ = source.findsource(self.raw)
return full
def source(self):
""" return a _pytest._code.Source object for the code object's source only
"""
# return source only for that part of code
import _pytest._code
return _pytest._code.Source(self.raw)
def getargs(self, var=False):
""" return a tuple with the argument names for the code object
if 'var' is set True also return the names of the variable and
keyword arguments when present
"""
# handfull shortcut for getting args
raw = self.raw
argcount = raw.co_argcount
if var:
argcount += raw.co_flags & CO_VARARGS
argcount += raw.co_flags & CO_VARKEYWORDS
return raw.co_varnames[:argcount]
class Frame(object):
"""Wrapper around a Python frame holding f_locals and f_globals
in which expressions can be evaluated."""
def __init__(self, frame):
self.lineno = frame.f_lineno - 1
self.f_globals = frame.f_globals
self.f_locals = frame.f_locals
self.raw = frame
self.code = Code(frame.f_code)
@property
def statement(self):
""" statement this frame is at """
import _pytest._code
if self.code.fullsource is None:
return _pytest._code.Source("")
return self.code.fullsource.getstatement(self.lineno)
def eval(self, code, **vars):
""" evaluate 'code' in the frame
'vars' are optional additional local variables
returns the result of the evaluation
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
return eval(code, self.f_globals, f_locals)
def exec_(self, code, **vars):
""" exec 'code' in the frame
'vars' are optiona; additional local variables
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
six.exec_(code, self.f_globals, f_locals)
def repr(self, object):
""" return a 'safe' (non-recursive, one-line) string repr for 'object'
"""
return py.io.saferepr(object)
def is_true(self, object):
return object
def getargs(self, var=False):
""" return a list of tuples (name, value) for all arguments
if 'var' is set True also include the variable and keyword
arguments when present
"""
retval = []
for arg in self.code.getargs(var):
try:
retval.append((arg, self.f_locals[arg]))
except KeyError:
pass # this can occur when using Psyco
return retval
class TracebackEntry(object):
""" a single entry in a traceback """
_repr_style = None
exprinfo = None
def __init__(self, rawentry, excinfo=None):
self._excinfo = excinfo
self._rawentry = rawentry
self.lineno = rawentry.tb_lineno - 1
def set_repr_style(self, mode):
assert mode in ("short", "long")
self._repr_style = mode
@property
def frame(self):
import _pytest._code
return _pytest._code.Frame(self._rawentry.tb_frame)
@property
def relline(self):
return self.lineno - self.frame.code.firstlineno
def __repr__(self):
return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1)
@property
def statement(self):
""" _pytest._code.Source object for the current statement """
source = self.frame.code.fullsource
return source.getstatement(self.lineno)
@property
def path(self):
""" path to the source code """
return self.frame.code.path
def getlocals(self):
return self.frame.f_locals
locals = property(getlocals, None, None, "locals of underlaying frame")
def getfirstlinesource(self):
# on Jython this firstlineno can be -1 apparently
return max(self.frame.code.firstlineno, 0)
def getsource(self, astcache=None):
""" return failing source code. """
# we use the passed in astcache to not reparse asttrees
# within exception info printing
from _pytest._code.source import getstatementrange_ast
source = self.frame.code.fullsource
if source is None:
return None
key = astnode = None
if astcache is not None:
key = self.frame.code.path
if key is not None:
astnode = astcache.get(key, None)
start = self.getfirstlinesource()
try:
astnode, _, end = getstatementrange_ast(
self.lineno, source, astnode=astnode
)
except SyntaxError:
end = self.lineno + 1
else:
if key is not None:
astcache[key] = astnode
return source[start:end]
source = property(getsource)
def ishidden(self):
""" return True if the current frame has a var __tracebackhide__
resolving to True
If __tracebackhide__ is a callable, it gets called with the
ExceptionInfo instance and can decide whether to hide the traceback.
mostly for internal use
"""
try:
tbh = self.frame.f_locals["__tracebackhide__"]
except KeyError:
try:
tbh = self.frame.f_globals["__tracebackhide__"]
except KeyError:
return False
if callable(tbh):
return tbh(None if self._excinfo is None else self._excinfo())
else:
return tbh
def __str__(self):
try:
fn = str(self.path)
except py.error.Error:
fn = "???"
name = self.frame.code.name
try:
line = str(self.statement).lstrip()
except KeyboardInterrupt:
raise
except: # noqa
line = "???"
return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line)
def name(self):
return self.frame.code.raw.co_name
name = property(name, None, None, "co_name of underlaying code")
class Traceback(list):
""" Traceback objects encapsulate and offer higher level
access to Traceback entries.
"""
Entry = TracebackEntry
def __init__(self, tb, excinfo=None):
""" initialize from given python traceback object and ExceptionInfo """
self._excinfo = excinfo
if hasattr(tb, "tb_next"):
def f(cur):
while cur is not None:
yield self.Entry(cur, excinfo=excinfo)
cur = cur.tb_next
list.__init__(self, f(tb))
else:
list.__init__(self, tb)
def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
""" return a Traceback instance wrapping part of this Traceback
by provding any combination of path, lineno and firstlineno, the
first frame to start the to-be-returned traceback is determined
this allows cutting the first part of a Traceback instance e.g.
for formatting reasons (removing some uninteresting bits that deal
with handling of the exception/traceback)
"""
for x in self:
code = x.frame.code
codepath = code.path
if (
(path is None or codepath == path)
and (
excludepath is None
or not hasattr(codepath, "relto")
or not codepath.relto(excludepath)
)
and (lineno is None or x.lineno == lineno)
and (firstlineno is None or x.frame.code.firstlineno == firstlineno)
):
return Traceback(x._rawentry, self._excinfo)
return self
def __getitem__(self, key):
val = super(Traceback, self).__getitem__(key)
if isinstance(key, type(slice(0))):
val = self.__class__(val)
return val
def filter(self, fn=lambda x: not x.ishidden()):
""" return a Traceback instance with certain items removed
fn is a function that gets a single argument, a TracebackEntry
instance, and should return True when the item should be added
to the Traceback, False when not
by default this removes all the TracebackEntries which are hidden
(see ishidden() above)
"""
return Traceback(filter(fn, self), self._excinfo)
def getcrashentry(self):
""" return last non-hidden traceback entry that lead
to the exception of a traceback.
"""
for i in range(-1, -len(self) - 1, -1):
entry = self[i]
if not entry.ishidden():
return entry
return self[-1]
def recursionindex(self):
""" return the index of the frame/TracebackEntry where recursion
originates if appropriate, None if no recursion occurred
"""
cache = {}
for i, entry in enumerate(self):
# id for the code.raw is needed to work around
# the strange metaprogramming in the decorator lib from pypi
# which generates code objects that have hash/value equality
# XXX needs a test
key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
# print "checking for recursion at", key
values = cache.setdefault(key, [])
if values:
f = entry.frame
loc = f.f_locals
for otherloc in values:
if f.is_true(
f.eval(
co_equal,
__recursioncache_locals_1=loc,
__recursioncache_locals_2=otherloc,
)
):
return i
values.append(entry.frame.f_locals)
return None
co_equal = compile(
"__recursioncache_locals_1 == __recursioncache_locals_2", "?", "eval"
)
class ExceptionInfo(object):
""" wraps sys.exc_info() objects and offers
help for navigating the traceback.
"""
_striptext = ""
_assert_start_repr = (
"AssertionError(u'assert " if _PY2 else "AssertionError('assert "
)
def __init__(self, tup=None, exprinfo=None):
import _pytest._code
if tup is None:
tup = sys.exc_info()
if exprinfo is None and isinstance(tup[1], AssertionError):
exprinfo = getattr(tup[1], "msg", None)
if exprinfo is None:
exprinfo = py.io.saferepr(tup[1])
if exprinfo and exprinfo.startswith(self._assert_start_repr):
self._striptext = "AssertionError: "
self._excinfo = tup
#: the exception class
self.type = tup[0]
#: the exception instance
self.value = tup[1]
#: the exception raw traceback
self.tb = tup[2]
#: the exception type name
self.typename = self.type.__name__
#: the exception traceback (_pytest._code.Traceback instance)
self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self))
def __repr__(self):
return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
def exconly(self, tryshort=False):
""" return the exception as a string
when 'tryshort' resolves to True, and the exception is a
_pytest._code._AssertionError, only the actual exception part of
the exception representation is returned (so 'AssertionError: ' is
removed from the beginning)
"""
lines = format_exception_only(self.type, self.value)
text = "".join(lines)
text = text.rstrip()
if tryshort:
if text.startswith(self._striptext):
text = text[len(self._striptext) :]
return text
def errisinstance(self, exc):
""" return True if the exception is an instance of exc """
return isinstance(self.value, exc)
def _getreprcrash(self):
exconly = self.exconly(tryshort=True)
entry = self.traceback.getcrashentry()
path, lineno = entry.frame.code.raw.co_filename, entry.lineno
return ReprFileLocation(path, lineno + 1, exconly)
def getrepr(
self,
showlocals=False,
style="long",
abspath=False,
tbfilter=True,
funcargs=False,
truncate_locals=True,
):
""" return str()able representation of this exception info.
showlocals: show locals per traceback entry
style: long|short|no|native traceback style
tbfilter: hide entries (where __tracebackhide__ is true)
in case of style==native, tbfilter and showlocals is ignored.
"""
if style == "native":
return ReprExceptionInfo(
ReprTracebackNative(
traceback.format_exception(
self.type, self.value, self.traceback[0]._rawentry
)
),
self._getreprcrash(),
)
fmt = FormattedExcinfo(
showlocals=showlocals,
style=style,
abspath=abspath,
tbfilter=tbfilter,
funcargs=funcargs,
truncate_locals=truncate_locals,
)
return fmt.repr_excinfo(self)
def __str__(self):
entry = self.traceback[-1]
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
return str(loc)
def __unicode__(self):
entry = self.traceback[-1]
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
return text_type(loc)
def match(self, regexp):
"""
Match the regular expression 'regexp' on the string representation of
the exception. If it matches then True is returned (so that it is
possible to write 'assert excinfo.match()'). If it doesn't match an
AssertionError is raised.
"""
__tracebackhide__ = True
if not re.search(regexp, str(self.value)):
assert 0, "Pattern '{!s}' not found in '{!s}'".format(regexp, self.value)
return True
@attr.s
class FormattedExcinfo(object):
""" presenting information about failing Functions and Generators. """
# for traceback entries
flow_marker = ">"
fail_marker = "E"
showlocals = attr.ib(default=False)
style = attr.ib(default="long")
abspath = attr.ib(default=True)
tbfilter = attr.ib(default=True)
funcargs = attr.ib(default=False)
truncate_locals = attr.ib(default=True)
astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False)
def _getindent(self, source):
# figure out indent for given source
try:
s = str(source.getstatement(len(source) - 1))
except KeyboardInterrupt:
raise
except: # noqa
try:
s = str(source[-1])
except KeyboardInterrupt:
raise
except: # noqa
return 0
return 4 + (len(s) - len(s.lstrip()))
def _getentrysource(self, entry):
source = entry.getsource(self.astcache)
if source is not None:
source = source.deindent()
return source
def _saferepr(self, obj):
return py.io.saferepr(obj)
def repr_args(self, entry):
if self.funcargs:
args = []
for argname, argvalue in entry.frame.getargs(var=True):
args.append((argname, self._saferepr(argvalue)))
return ReprFuncArgs(args)
def get_source(self, source, line_index=-1, excinfo=None, short=False):
""" return formatted and marked up source lines. """
import _pytest._code
lines = []
if source is None or line_index >= len(source.lines):
source = _pytest._code.Source("???")
line_index = 0
if line_index < 0:
line_index += len(source)
space_prefix = " "
if short:
lines.append(space_prefix + source.lines[line_index].strip())
else:
for line in source.lines[:line_index]:
lines.append(space_prefix + line)
lines.append(self.flow_marker + " " + source.lines[line_index])
for line in source.lines[line_index + 1 :]:
lines.append(space_prefix + line)
if excinfo is not None:
indent = 4 if short else self._getindent(source)
lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
return lines
def get_exconly(self, excinfo, indent=4, markall=False):
lines = []
indent = " " * indent
# get the real exception information out
exlines = excinfo.exconly(tryshort=True).split("\n")
failindent = self.fail_marker + indent[1:]
for line in exlines:
lines.append(failindent + line)
if not markall:
failindent = indent
return lines
def repr_locals(self, locals):
if self.showlocals:
lines = []
keys = [loc for loc in locals if loc[0] != "@"]
keys.sort()
for name in keys:
value = locals[name]
if name == "__builtins__":
lines.append("__builtins__ = <builtins>")
else:
# This formatting could all be handled by the
# _repr() function, which is only reprlib.Repr in
# disguise, so is very configurable.
if self.truncate_locals:
str_repr = self._saferepr(value)
else:
str_repr = pprint.pformat(value)
# if len(str_repr) < 70 or not isinstance(value,
# (list, tuple, dict)):
lines.append("%-10s = %s" % (name, str_repr))
# else:
# self._line("%-10s =\\" % (name,))
# # XXX
# pprint.pprint(value, stream=self.excinfowriter)
return ReprLocals(lines)
def repr_traceback_entry(self, entry, excinfo=None):
import _pytest._code
source = self._getentrysource(entry)
if source is None:
source = _pytest._code.Source("???")
line_index = 0
else:
# entry.getfirstlinesource() can be -1, should be 0 on jython
line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
lines = []
style = entry._repr_style
if style is None:
style = self.style
if style in ("short", "long"):
short = style == "short"
reprargs = self.repr_args(entry) if not short else None
s = self.get_source(source, line_index, excinfo, short=short)
lines.extend(s)
if short:
message = "in %s" % (entry.name)
else:
message = excinfo and excinfo.typename or ""
path = self._makepath(entry.path)
filelocrepr = ReprFileLocation(path, entry.lineno + 1, message)
localsrepr = None
if not short:
localsrepr = self.repr_locals(entry.locals)
return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
if excinfo:
lines.extend(self.get_exconly(excinfo, indent=4))
return ReprEntry(lines, None, None, None, style)
def _makepath(self, path):
if not self.abspath:
try:
np = py.path.local().bestrelpath(path)
except OSError:
return path
if len(np) < len(str(path)):
path = np
return path
def repr_traceback(self, excinfo):
traceback = excinfo.traceback
if self.tbfilter:
traceback = traceback.filter()
if is_recursion_error(excinfo):
traceback, extraline = self._truncate_recursive_traceback(traceback)
else:
extraline = None
last = traceback[-1]
entries = []
for index, entry in enumerate(traceback):
einfo = (last == entry) and excinfo or None
reprentry = self.repr_traceback_entry(entry, einfo)
entries.append(reprentry)
return ReprTraceback(entries, extraline, style=self.style)
def _truncate_recursive_traceback(self, traceback):
"""
Truncate the given recursive traceback trying to find the starting point
of the recursion.
The detection is done by going through each traceback entry and finding the
point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``.
Handle the situation where the recursion process might raise an exception (for example
comparing numpy arrays using equality raises a TypeError), in which case we do our best to
warn the user of the error and show a limited traceback.
"""
try:
recursionindex = traceback.recursionindex()
except Exception as e:
max_frames = 10
extraline = (
"!!! Recursion error detected, but an error occurred locating the origin of recursion.\n"
" The following exception happened when comparing locals in the stack frame:\n"
" {exc_type}: {exc_msg}\n"
" Displaying first and last {max_frames} stack frames out of {total}."
).format(
exc_type=type(e).__name__,
exc_msg=safe_str(e),
max_frames=max_frames,
total=len(traceback),
)
traceback = traceback[:max_frames] + traceback[-max_frames:]
else:
if recursionindex is not None:
extraline = "!!! Recursion detected (same locals & position)"
traceback = traceback[: recursionindex + 1]
else:
extraline = None
return traceback, extraline
def repr_excinfo(self, excinfo):
if _PY2:
reprtraceback = self.repr_traceback(excinfo)
reprcrash = excinfo._getreprcrash()
return ReprExceptionInfo(reprtraceback, reprcrash)
else:
repr_chain = []
e = excinfo.value
descr = None
seen = set()
while e is not None and id(e) not in seen:
seen.add(id(e))
if excinfo:
reprtraceback = self.repr_traceback(excinfo)
reprcrash = excinfo._getreprcrash()
else:
# fallback to native repr if the exception doesn't have a traceback:
# ExceptionInfo objects require a full traceback to work
reprtraceback = ReprTracebackNative(
traceback.format_exception(type(e), e, None)
)
reprcrash = None
repr_chain += [(reprtraceback, reprcrash, descr)]
if e.__cause__ is not None:
e = e.__cause__
excinfo = (
ExceptionInfo((type(e), e, e.__traceback__))
if e.__traceback__
else None
)
descr = "The above exception was the direct cause of the following exception:"
elif e.__context__ is not None and not e.__suppress_context__:
e = e.__context__
excinfo = (
ExceptionInfo((type(e), e, e.__traceback__))
if e.__traceback__
else None
)
descr = "During handling of the above exception, another exception occurred:"
else:
e = None
repr_chain.reverse()
return ExceptionChainRepr(repr_chain)
class TerminalRepr(object):
def __str__(self):
s = self.__unicode__()
if _PY2:
s = s.encode("utf-8")
return s
def __unicode__(self):
# FYI this is called from pytest-xdist's serialization of exception
# information.
io = py.io.TextIO()
tw = py.io.TerminalWriter(file=io)
self.toterminal(tw)
return io.getvalue().strip()
def __repr__(self):
return "<%s instance at %0x>" % (self.__class__, id(self))
class ExceptionRepr(TerminalRepr):
def __init__(self):
self.sections = []
def addsection(self, name, content, sep="-"):
self.sections.append((name, content, sep))
def toterminal(self, tw):
for name, content, sep in self.sections:
tw.sep(sep, name)
tw.line(content)
class ExceptionChainRepr(ExceptionRepr):
def __init__(self, chain):
super(ExceptionChainRepr, self).__init__()
self.chain = chain
# reprcrash and reprtraceback of the outermost (the newest) exception
# in the chain
self.reprtraceback = chain[-1][0]
self.reprcrash = chain[-1][1]
def toterminal(self, tw):
for element in self.chain:
element[0].toterminal(tw)
if element[2] is not None:
tw.line("")
tw.line(element[2], yellow=True)
super(ExceptionChainRepr, self).toterminal(tw)
class ReprExceptionInfo(ExceptionRepr):
def __init__(self, reprtraceback, reprcrash):
super(ReprExceptionInfo, self).__init__()
self.reprtraceback = reprtraceback
self.reprcrash = reprcrash
def toterminal(self, tw):
self.reprtraceback.toterminal(tw)
super(ReprExceptionInfo, self).toterminal(tw)
class ReprTraceback(TerminalRepr):
entrysep = "_ "
def __init__(self, reprentries, extraline, style):
self.reprentries = reprentries
self.extraline = extraline
self.style = style
def toterminal(self, tw):
# the entries might have different styles
for i, entry in enumerate(self.reprentries):
if entry.style == "long":
tw.line("")
entry.toterminal(tw)
if i < len(self.reprentries) - 1:
next_entry = self.reprentries[i + 1]
if (
entry.style == "long"
or entry.style == "short"
and next_entry.style == "long"
):
tw.sep(self.entrysep)
if self.extraline:
tw.line(self.extraline)
class ReprTracebackNative(ReprTraceback):
def __init__(self, tblines):
self.style = "native"
self.reprentries = [ReprEntryNative(tblines)]
self.extraline = None
class ReprEntryNative(TerminalRepr):
style = "native"
def __init__(self, tblines):
self.lines = tblines
def toterminal(self, tw):
tw.write("".join(self.lines))
class ReprEntry(TerminalRepr):
localssep = "_ "
def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
self.lines = lines
self.reprfuncargs = reprfuncargs
self.reprlocals = reprlocals
self.reprfileloc = filelocrepr
self.style = style
def toterminal(self, tw):
if self.style == "short":
self.reprfileloc.toterminal(tw)
for line in self.lines:
red = line.startswith("E ")
tw.line(line, bold=True, red=red)
# tw.line("")
return
if self.reprfuncargs:
self.reprfuncargs.toterminal(tw)
for line in self.lines:
red = line.startswith("E ")
tw.line(line, bold=True, red=red)
if self.reprlocals:
# tw.sep(self.localssep, "Locals")
tw.line("")
self.reprlocals.toterminal(tw)
if self.reprfileloc:
if self.lines:
tw.line("")
self.reprfileloc.toterminal(tw)
def __str__(self):
return "%s\n%s\n%s" % ("\n".join(self.lines), self.reprlocals, self.reprfileloc)
class ReprFileLocation(TerminalRepr):
def __init__(self, path, lineno, message):
self.path = str(path)
self.lineno = lineno
self.message = message
def toterminal(self, tw):
# filename and lineno output for each entry,
# using an output format that most editors unterstand
msg = self.message
i = msg.find("\n")
if i != -1:
msg = msg[:i]
tw.write(self.path, bold=True, red=True)
tw.line(":%s: %s" % (self.lineno, msg))
class ReprLocals(TerminalRepr):
def __init__(self, lines):
self.lines = lines
def toterminal(self, tw):
for line in self.lines:
tw.line(line)
class ReprFuncArgs(TerminalRepr):
def __init__(self, args):
self.args = args
def toterminal(self, tw):
if self.args:
linesofar = ""
for name, value in self.args:
ns = "%s = %s" % (safe_str(name), safe_str(value))
if len(ns) + len(linesofar) + 2 > tw.fullwidth:
if linesofar:
tw.line(linesofar)
linesofar = ns
else:
if linesofar:
linesofar += ", " + ns
else:
linesofar = ns
if linesofar:
tw.line(linesofar)
tw.line("")
def getrawcode(obj, trycall=True):
""" return code object for given function. """
try:
return obj.__code__
except AttributeError:
obj = getattr(obj, "im_func", obj)
obj = getattr(obj, "func_code", obj)
obj = getattr(obj, "f_code", obj)
obj = getattr(obj, "__code__", obj)
if trycall and not hasattr(obj, "co_firstlineno"):
if hasattr(obj, "__call__") and not inspect.isclass(obj):
x = getrawcode(obj.__call__, trycall=False)
if hasattr(x, "co_firstlineno"):
return x
return obj
if PY35: # RecursionError introduced in 3.5
def is_recursion_error(excinfo):
return excinfo.errisinstance(RecursionError) # noqa
else:
def is_recursion_error(excinfo):
if not excinfo.errisinstance(RuntimeError):
return False
try:
return "maximum recursion depth exceeded" in str(excinfo.value)
except UnicodeError:
return False
| {
"repo_name": "davidszotten/pytest",
"path": "src/_pytest/_code/code.py",
"copies": "1",
"size": "32803",
"license": "mit",
"hash": 163445906813698780,
"line_mean": 32.4383282365,
"line_max": 117,
"alpha_frac": 0.552327531,
"autogenerated": false,
"ratio": 4.29246270609788,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002778081218052594,
"num_lines": 981
} |
from __future__ import absolute_import, division, print_function
import json
from flask.json import jsonify
import numpy as np
from keras.preprocessing import image
import keras.backend as K
from contextlib import contextmanager
from quiver_engine.imagenet_utils import preprocess_input, decode_imagenet_predictions
from os import path
def validate_launch(html_base_dir):
print('Starting webserver from:', html_base_dir)
assert path.exists(path.join(html_base_dir, 'quiverboard')), 'Quiverboard must be a ' \
'subdirectory of {}'.format(html_base_dir)
assert path.exists(path.join(html_base_dir, 'quiverboard', 'dist')), 'Dist must be a ' \
'subdirectory of quiverboard'
assert path.exists(
path.join(html_base_dir, 'quiverboard', 'dist', 'index.html')), 'Index.html missing'
def get_evaluation_context():
return get_evaluation_context_getter()()
def get_evaluation_context_getter():
if K.backend() == 'tensorflow':
import tensorflow as tf
return tf.get_default_graph().as_default
if K.backend() == 'theano':
return contextmanager(lambda: (yield))
def get_input_config(model):
'''
returns a tuple (inputDimensions, numChannels)
'''
return (
model.get_input_shape_at(0)[2:4],
model.get_input_shape_at(0)[1]
) if K.image_dim_ordering() == 'th' else (
#tf ordering
model.get_input_shape_at(0)[1:3],
model.get_input_shape_at(0)[3]
)
def decode_predictions(preds, classes, top):
if not classes:
print("Warning! you didn't pass your own set of classes for the model therefore imagenet classes are used")
return decode_imagenet_predictions(preds, top)
if len(preds.shape) != 2 or preds.shape[1] != len(classes):
raise ValueError('you need to provide same number of classes as model prediction output ' + \
'model returns %s predictions, while there are %s classes' % (
preds.shape[1], len(classes)))
results = []
for pred in preds:
top_indices = pred.argsort()[-top:][::-1]
result = [("", classes[i], pred[i]) for i in top_indices]
results.append(result)
return results
# util function to convert a tensor into a valid image
def deprocess_image(x):
# normalize tensor: center on 0., ensure std is 0.1
x -= x.mean()
x /= (x.std() + 1e-5)
x *= 0.1
# clip to [0, 1]
x += 0.5
x = np.clip(x, 0, 1)
return x
def load_img_scaled(input_path, target_shape, grayscale=False):
return np.expand_dims(
image.img_to_array(image.load_img(input_path, target_size=target_shape, grayscale=grayscale)) / 255.0,
axis=0
)
def load_img(input_path, target_shape, grayscale=False, mean=None, std=None):
img = image.load_img(input_path, target_size=target_shape,
grayscale=grayscale)
img_arr = np.expand_dims(image.img_to_array(img), axis=0)
if not grayscale:
img_arr = preprocess_input(img_arr, mean=mean, std=std)
return img_arr
def get_jsonable_obj(obj):
return json.loads(get_json(obj))
def get_json(obj):
return json.dumps(obj, default=get_json_type)
def safe_jsonify(obj):
return jsonify(get_jsonable_obj(obj))
def get_json_type(obj):
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
return obj.item()
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
raise TypeError('Not JSON Serializable')
| {
"repo_name": "keplr-io/quiver",
"path": "quiver_engine/util.py",
"copies": "1",
"size": "3698",
"license": "mit",
"hash": 341728210986956740,
"line_mean": 32.9266055046,
"line_max": 115,
"alpha_frac": 0.6165494862,
"autogenerated": false,
"ratio": 3.579864472410455,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9673435246184053,
"avg_score": 0.004595742485280245,
"num_lines": 109
} |
from __future__ import absolute_import, division, print_function
import json
from flask.json import jsonify
import numpy as np
import keras.backend as K
from contextlib import contextmanager
from quiver_engine.imagenet_utils import preprocess_input, decode_imagenet_predictions
from os import path
def validate_launch(html_base_dir):
print('Starting webserver from:', html_base_dir)
assert path.exists(path.join(html_base_dir, 'quiverboard')), 'Quiverboard must be a ' \
'subdirectory of {}'.format(html_base_dir)
assert path.exists(path.join(html_base_dir, 'quiverboard', 'dist')), 'Dist must be a ' \
'subdirectory of quiverboard'
assert path.exists(
path.join(html_base_dir, 'quiverboard', 'dist', 'index.html')), 'Index.html missing'
def get_evaluation_context():
return get_evaluation_context_getter()()
def get_evaluation_context_getter():
if K.backend() == 'tensorflow':
import tensorflow as tf
return tf.get_default_graph().as_default
if K.backend() == 'theano':
return contextmanager(lambda: (yield))
def get_input_config(model):
'''
returns a tuple (number of timesteps, number of hannels)
'''
return (
model.get_input_shape_at(0)[2],
model.get_input_shape_at(0)[1]
) if K.image_dim_ordering() == 'th' else (
#tf ordering
model.get_input_shape_at(0)[1],
model.get_input_shape_at(0)[2]
)
def decode_predictions(preds, classes, top):
# Number of classes
nK = len(classes)
if not classes:
print("Warning! you didn't pass your own set of classes for the model")
return preds
if len(preds.shape) != 2 or preds.shape[1] != len(classes):
raise ValueError('you need to provide same number of classes as model prediction output ' + \
'model returns %s predictions, while there are %s classes' % (
preds.shape[1], len(classes)))
results = []
for pred in preds:
if nK >= 10:
class_indices = pred.argsort()[-top:][::-1]
else:
class_indices = range(nK)
result = [("", classes[i], pred[i]) for i in class_indices]
results.append(result)
return results
def load_sig(input_path):
return np.load(input_path[:-3]+'npy')
def get_jsonable_obj(obj):
return json.loads(get_json(obj))
def get_json(obj):
return json.dumps(obj, default=get_json_type)
def safe_jsonify(obj):
return jsonify(get_jsonable_obj(obj))
def get_json_type(obj):
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
return obj.item()
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
raise TypeError('Not JSON Serializable')
| {
"repo_name": "wmkouw/quiver-time",
"path": "quiver_engine/util.py",
"copies": "1",
"size": "2959",
"license": "mit",
"hash": -6418581242657055000,
"line_mean": 31.8777777778,
"line_max": 113,
"alpha_frac": 0.5985130112,
"autogenerated": false,
"ratio": 3.7694267515923565,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9834973458184377,
"avg_score": 0.006593260921595733,
"num_lines": 90
} |
from __future__ import absolute_import, division, print_function
import json
import numpy as np
from keras.preprocessing import image
import keras.backend as K
from contextlib import contextmanager
from quiver_engine.imagenet_utils import preprocess_input, decode_imagenet_predictions
def get_evaluation_context():
return get_evaluation_context_getter()()
def get_evaluation_context_getter():
if K.backend() == 'tensorflow':
import tensorflow as tf
return tf.get_default_graph().as_default
if K.backend() == 'theano':
return contextmanager(lambda: (yield))
def get_input_config(model):
'''
returns a tuple (inputDimensions, numChannels)
'''
return (
model.get_input_shape_at(0)[2:4],
model.get_input_shape_at(0)[1]
) if K.image_dim_ordering() == 'th' else (
#tf ordering
model.get_input_shape_at(0)[1:3],
model.get_input_shape_at(0)[3]
)
def decode_predictions(preds, classes, top):
if not classes:
print("Warning! you didn't pass your own set of classes for the model therefore imagenet classes are used")
return decode_imagenet_predictions(preds, top)
if len(preds.shape) != 2 or preds.shape[1] != len(classes):
raise ValueError('you need to provide same number of classes as model prediction output ' + \
'model returns %s predictions, while there are %s classes' % (
preds.shape[1], len(classes)))
results = []
for pred in preds:
top_indices = pred.argsort()[-top:][::-1]
result = [("", classes[i], pred[i]) for i in top_indices]
results.append(result)
return results
# util function to convert a tensor into a valid image
def deprocess_image(x):
# normalize tensor: center on 0., ensure std is 0.1
x -= x.mean()
x /= (x.std() + 1e-5)
x *= 0.1
# clip to [0, 1]
x += 0.5
x = np.clip(x, 0, 1)
return x
def load_img_scaled(input_path, target_shape, grayscale=False):
return np.expand_dims(
image.img_to_array(image.load_img(input_path, target_size=target_shape, grayscale=grayscale)) / 255.0,
axis=0
)
def load_img(input_path, target_shape, grayscale=False):
img = image.load_img(input_path, target_size=target_shape, grayscale=grayscale)
img_arr = np.expand_dims(image.img_to_array(img), axis=0)
if not grayscale:
img_arr = preprocess_input(img_arr)
return img_arr
def get_json(obj):
return json.dumps(obj, default=get_json_type)
def get_json_type(obj):
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
return obj.item()
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
raise TypeError('Not JSON Serializable')
| {
"repo_name": "jakebian/quiver",
"path": "quiver_engine/util.py",
"copies": "1",
"size": "2835",
"license": "mit",
"hash": 2508923479440446000,
"line_mean": 30.1538461538,
"line_max": 115,
"alpha_frac": 0.6335097002,
"autogenerated": false,
"ratio": 3.513011152416357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9629404363161329,
"avg_score": 0.0034232978910055723,
"num_lines": 91
} |
from __future__ import (absolute_import, division, print_function)
import json
import os
import sys
import getopt
import subprocess
import re
import copy
import hashlib
def parseCmakeBoolean(var):
rejected_strings = ['false','off','no']
if var.lower() in rejected_strings:
return False;
else:
return True;
def getBranchName(directory):
"""Returns the name of the current git branch"""
return subprocess.check_output(["git","rev-parse","--abbrev-ref","HEAD"],cwd=directory).strip()
def getRemotes(directory):
"""Returns list of remote git repositories"""
gitRemoteOutput = subprocess.check_output(['git','remote','-v'],cwd=directory)
remotes = []
for line in gitRemoteOutput.splitlines():
if '(fetch)' in line:
splitLine = line.split();
remotes.append({'name': splitLine[0].strip(), 'url': splitLine[1].strip()})
return remotes
def gitLogValue(format,directory):
"""Returns git log value specified by format"""
return subprocess.check_output(["git","log","-1","--pretty=format:%"+format],cwd=directory).strip()
def getAllFilesWithExtension(directory,extension):
"""Recursively return a list of all files in directory with specified extension"""
filesWithExtension = []
for root, dirs, files in os.walk(directory):
for file in files:
if file.endswith(extension):
filesWithExtension.append(os.path.realpath(os.path.join(root, file)))
return filesWithExtension
def getSourcePathFromGcovFile(gcovFilename):
"""Return the source path corresponding to a .gcov file"""
print("filename: " +gcovFilename)
gcovPath,gcovFilenameWithExtension = os.path.split(gcovFilename)
srcFilename = re.sub(".gcov$","",gcovFilenameWithExtension)
return re.sub("#","/",srcFilename)
def main(argv):
arguments = ['COVERAGE_SRCS_FILE=','COVERALLS_OUTPUT_FILE=','COV_PATH=','PROJECT_ROOT=','TRAVISCI=']
COVERAGE_SRCS_FILE=None
COVERALLS_OUTPUT_FILE=None
COV_PATH=None
PROJECT_ROOT=None
TRAVISCI=None
optlist, args = getopt.getopt(argv,'',arguments)
for o, a in optlist:
if o == "--COVERAGE_SRCS_FILE":
COVERAGE_SRCS_FILE=a
elif o == "--COVERALLS_OUTPUT_FILE":
COVERALLS_OUTPUT_FILE=a
elif o == "--COV_PATH":
COV_PATH=a
elif o == "--PROJECT_ROOT":
PROJECT_ROOT=a
elif o == "--TRAVISCI":
TRAVISCI=a
else:
assert False, "unhandled option"
if COVERAGE_SRCS_FILE == None:
assert False, "COVERAGE_SRCS_FILE is not defined"
if COVERALLS_OUTPUT_FILE==None:
assert False, "COVERALLS_OUTPUT_FILE is not defined"
if COV_PATH==None:
assert False, "COV_PATH is not defined"
if PROJECT_ROOT==None:
assert False, "PROJECT_ROOT is not defined"
gcdaAllFiles = getAllFilesWithExtension(COV_PATH,".gcda")
for gcdaFile in gcdaAllFiles:
gcdaDirectory = os.path.dirname(gcdaFile)
subprocess.check_call(["gcov","-p","-o",gcdaDirectory,gcdaFile],cwd=COV_PATH)
gcovAllFiles = getAllFilesWithExtension(COV_PATH,".gcov")
sourcesToCheck = [line.strip() for line in open(COVERAGE_SRCS_FILE, 'r')]
gcovCheckedFiles = []
uncheckedSources = sourcesToCheck
for gcovFile in gcovAllFiles:
sourceWithPath = getSourcePathFromGcovFile(gcovFile)
if sourceWithPath in sourcesToCheck:
print("YES: ",sourceWithPath.strip()," WAS FOUND")
gcovCheckedFiles.append(gcovFile)
uncheckedSources.remove(sourceWithPath)
else:
print("NO: ",sourceWithPath.strip()," WAS NOT FOUND")
coverageList = []
for gcovFilename in gcovCheckedFiles:
fileCoverage = {}
#get name for json file
sourceWithPath = getSourcePathFromGcovFile(gcovFilename)
fileCoverage['name'] = os.path.relpath(sourceWithPath,PROJECT_ROOT)
print("Generating JSON file for "+fileCoverage['name'])
fileCoverage['source_digest'] = hashlib.md5(open(sourceWithPath, 'rb').read()).hexdigest()
lineCoverage = []
gcovFile = open(gcovFilename,'r')
for line in gcovFile:
line = [i.strip() for i in line.split(':')]
lineNumber = int(line[1])
if lineNumber != 0:
if line[0] == '#####':
lineCoverage.append(0)
elif line[0] == '=====':
lineCoverage.append(0)
elif line[0] == '-':
lineCoverage.append(None)
else:
lineCoverage.append(int(line[0]))
if lineNumber != len(lineCoverage):
raise RuntimeError['line_number does not match len(array)']
gcovFile.close()
fileCoverage['coverage'] = lineCoverage
coverageList.append(copy.deepcopy(fileCoverage))
for uncheckedFilename in uncheckedSources:
fileCoverage = {}
fileCoverage['name'] = os.path.relpath(uncheckedFilename,PROJECT_ROOT)
fileCoverage['source_digest'] = hashlib.md5(open(uncheckedFilename, 'rb').read()).hexdigest()
lineCoverage = []
uncheckedFile = open(uncheckedFilename,'r')
for line in uncheckedFile:
if line.strip() == "":
lineCoverage.append(None)
else:
lineCoverage.append(0)
uncheckedFile.close()
fileCoverage['coverage'] = lineCoverage
coverageList.append(copy.deepcopy(fileCoverage))
coverallsOutput = {}
coverallsOutput['source_files'] = coverageList
if parseCmakeBoolean(TRAVISCI):
print("Generating for travis-ci")
coverallsOutput['service_name'] = 'travis-ci'
coverallsOutput['service_job_id'] = os.environ.get('TRAVIS_JOB_ID')
else:
print("Generating for other")
coverallsOutput['repo_token'] = os.environ.get('COVERALLS_REPO_TOKEN')
head = {'id':gitLogValue('H',PROJECT_ROOT),'author_name':gitLogValue('an',PROJECT_ROOT), \
'author_email':gitLogValue('ae',PROJECT_ROOT),'committer_name':gitLogValue('cn',PROJECT_ROOT), \
'committer_email':gitLogValue('ce',PROJECT_ROOT), 'message':gitLogValue('B',PROJECT_ROOT)}
gitDict = {'head':head,'branch':getBranchName(PROJECT_ROOT),'remotes':getRemotes(COV_PATH)}
coverallsOutput['git'] = gitDict
with open(COVERALLS_OUTPUT_FILE, 'w') as outfile:
json.dump(coverallsOutput,outfile,indent=4)
if __name__ == "__main__":
main(sys.argv[1:])
| {
"repo_name": "peterfpeterson/morebin",
"path": "cmake/CoverallsGenerateGcov.py",
"copies": "1",
"size": "6601",
"license": "mit",
"hash": -3994749552231509500,
"line_mean": 38.0591715976,
"line_max": 112,
"alpha_frac": 0.6297530677,
"autogenerated": false,
"ratio": 3.756972111553785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.982151462786323,
"avg_score": 0.013042110278110964,
"num_lines": 169
} |
from __future__ import absolute_import, division, print_function
import json
template_structure = '''
{{
"Description": "Create instances ready for IBEIS IA CodeDeploy.",
"AWSTemplateFormatVersion": "2010-09-09",
"Parameters": {{
"TagKey": {{
"Description": "The tag key that identifies this as a target for deployments.",
"Type": "String",
"Default": "Name",
"AllowedPattern": "{{pattern}}",
"ConstraintDescription": "Can contain only ASCII characters."
}},
"TagValue": {{
"Description": "The tag value that identifies this as a target for deployments.",
"Type": "String",
"Default": "IBEIS-IA-CodeDeploy",
"AllowedPattern": "{{pattern}}",
"ConstraintDescription": "Can contain only ASCII characters."
}},
"KeyPairName": {{
"Description": "Name of an existing Amazon EC2 key pair to enable SSH or RDP access to the instances.",
"Type": "AWS::EC2::KeyPair::KeyName",
"Default": "shared-ibeis-team-key"
}},
"InstanceType": {{
"Description": "Amazon EC2 instance type.",
"Type": "String",
"Default": "c3.large",
"ConstraintDescription": "Must be a valid Amazon EC2 instance type.",
"AllowedValues": [
"t1.micro",
"m3.medium",
"m3.large",
"m3.xlarge",
"c3.large",
"c3.xlarge"
]
}},
"InstanceCount": {{
"Description": "Number of Amazon EC2 instances.",
"Type": "Number",
"Default": "1",
"ConstraintDescription": "Must be a number between 1 and {number}.",
"AllowedValues": [
{template_number_str}
]
}}
}},
"Mappings": {{
"RegionOS2AMI": {{
"us-east-1": {{
"Linux": "ami-b2e3c6d8"
}},
"us-west-1": {{
"Linux": "ami-42116522"
}},
"us-west-2": {{
"Linux": "ami-9dbea4fc"
}},
"eu-west-1": {{
"Linux": "ami-be5cf7cd"
}},
"eu-central-1": {{
"Linux": "ami-d0574ebc"
}},
"ap-northeast-1": {{
"Linux": "ami-d91428b7"
}},
"ap-southeast-1": {{
"Linux": "ami-a2c10dc1"
}},
"ap-southeast-2": {{
"Linux": "ami-530b2e30"
}}
}}
}},
"Conditions": {{
{template_condition_str}
}},
"Resources": {{
"WaitHandle": {{
"Type": "AWS::CloudFormation::WaitConditionHandle"
}},
"WaitCondition": {{
"Type": "AWS::CloudFormation::WaitCondition",
"Properties": {{
"Count": {{
"Ref": "InstanceCount"
}},
"Handle": {{
"Ref": "WaitHandle"
}},
"Timeout": "900"
}}
}},
"SecurityGroup": {{
"Type": "AWS::EC2::SecurityGroup",
"Properties": {{
"GroupDescription": "Enable HTTP access via ports 80, 5000 and SSH access.",
"SecurityGroupIngress": [{{
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "80",
"CidrIp": "0.0.0.0/0"
}}, {{
"IpProtocol": "tcp",
"FromPort": "22",
"ToPort": "22",
"CidrIp": "0.0.0.0/0"
}}, {{
"IpProtocol": "tcp",
"FromPort": "5000",
"ToPort": "5000",
"CidrIp": "0.0.0.0/0"
}}]
}}
}},
"CodeDeployTrustRole": {{
"Type": "AWS::IAM::Role",
"Properties": {{
"AssumeRolePolicyDocument": {{
"Statement": [
{{
"Sid": "1",
"Effect": "Allow",
"Principal": {{
"Service": [
"codedeploy.us-east-1.amazonaws.com",
"codedeploy.us-west-1.amazonaws.com",
"codedeploy.us-west-2.amazonaws.com",
"codedeploy.eu-west-1.amazonaws.com",
"codedeploy.eu-central-1.amazonaws.com",
"codedeploy.ap-northeast-1.amazonaws.com",
"codedeploy.ap-southeast-1.amazonaws.com",
"codedeploy.ap-southeast-2.amazonaws.com"
]
}},
"Action": "sts:AssumeRole"
}}
]
}},
"Path": "/"
}}
}},
"CodeDeployRolePolicies": {{
"Type": "AWS::IAM::Policy",
"Properties": {{
"PolicyName": "CodeDeployPolicy",
"PolicyDocument": {{
"Statement": [{{
"Effect": "Allow",
"Resource": [
"*"
],
"Action": [
"ec2:Describe*"
]
}}, {{
"Effect": "Allow",
"Resource": [
"*"
],
"Action": [
"autoscaling:CompleteLifecycleAction",
"autoscaling:DeleteLifecycleHook",
"autoscaling:DescribeLifecycleHooks",
"autoscaling:DescribeAutoScalingGroups",
"autoscaling:PutLifecycleHook",
"autoscaling:RecordLifecycleActionHeartbeat"
]
}}, {{
"Effect": "Allow",
"Resource": [
"*"
],
"Action": [
"Tag:getResources",
"Tag:getTags",
"Tag:getTagsForResource",
"Tag:getTagsForResourceList"
]
}}]
}},
"Roles": [{{
"Ref": "CodeDeployTrustRole"
}}]
}}
}},
"InstanceRole": {{
"Type": "AWS::IAM::Role",
"Properties": {{
"AssumeRolePolicyDocument": {{
"Statement": [{{
"Effect": "Allow",
"Principal": {{
"Service": [
"ec2.amazonaws.com"
]
}},
"Action": [
"sts:AssumeRole"
]
}}]
}},
"Path": "/"
}}
}},
"InstanceRolePolicies": {{
"Type": "AWS::IAM::Policy",
"Properties": {{
"PolicyName": "InstanceRole",
"PolicyDocument": {{
"Statement": [{{
"Effect": "Allow",
"Action": [
"autoscaling:Describe*",
"cloudformation:Describe*",
"cloudformation:GetTemplate",
"s3:Get*",
"s3:List*"
],
"Resource": "*"
}}]
}},
"Roles": [{{
"Ref": "InstanceRole"
}}]
}}
}},
"InstanceRoleInstanceProfile": {{
"Type": "AWS::IAM::InstanceProfile",
"Properties": {{
"Path": "/",
"Roles": [{{
"Ref": "InstanceRole"
}}]
}}
}},
{template_instance_str}
}},
"Outputs": {{
"CodeDeployTrustRoleARN": {{
"Value": {{
"Fn::GetAtt": [
"CodeDeployTrustRole",
"Arn"
]
}}
}}
}}
}}
'''
template_instance = '''
"EC2Instance{index}": {{
"Condition": "LaunchEC2Instance{index}",
"Type": "AWS::EC2::Instance",
"Metadata": {{
"AWS::CloudFormation::Init": {{
"services": {{
"sysvint": {{
"codedeploy-agent": {{
"enabled": "true",
"ensureRunning": "true"
}}
}}
}}
}}
}},
"Properties": {{
"ImageId": {{
"Fn::FindInMap": [
"RegionOS2AMI", {{
"Ref": "AWS::Region"
}},
"Linux"
]
}},
"InstanceType": {{
"Ref": "InstanceType"
}},
"SecurityGroups": [{{
"Ref": "SecurityGroup"
}}],
"UserData": {{
"Fn::Base64": {{
"Fn::Join": [
"", [
"#!/bin/bash\\n",
"cd /home/ubuntu/\\n",
"echo \\"'", {{ "Ref": "WaitHandle" }}, "' '", {{ "Ref": "AWS::Region" }}, "' '", {{ "Ref": "AWS::StackId" }}, "' 'EC2Instance{index}'\\" > init.config\\n",
"sudo curl -O https://raw.githubusercontent.com/bluemellophone/ibeis_aws_codedeploy/master/scripts/ibeis-ia-cloudformation.sh\\n",
"chmod +x ./ibeis-ia-cloudformation.sh\\n",
"sudo ./ibeis-ia-cloudformation.sh '", {{ "Ref": "WaitHandle" }}, "' '", {{ "Ref": "AWS::Region" }}, "' '", {{ "Ref": "AWS::StackId" }}, "' 'EC2Instance{index}' > init.log\\n"
]
]
}}
}},
"KeyName": {{
"Ref": "KeyPairName"
}},
"Tags": [{{
"Key": {{
"Ref": "TagKey"
}},
"Value": {{
"Ref": "TagValue"
}}
}}],
"IamInstanceProfile": {{
"Ref": "InstanceRoleInstanceProfile"
}}
}}
}}
'''
template_condition = '''
"LaunchEC2Instance{index}": {template_condition_extend}
'''
template_condition_base = '''
{{
"Fn::Equals": [
"{number}", {{
"Ref": "InstanceCount"
}}
]
}}
'''
template_condition_recursive = '''
{{
"Fn::Or": [{{
"Fn::Equals": [
"{number}", {{
"Ref": "InstanceCount"
}}
]
}},
{template_condition_extend}
]
}}
'''
if __name__ == '__main__':
def template_condition_extend_(number, total):
if number >= total - 1:
return template_condition_base.format(
number=number + 1
)
else:
return template_condition_recursive.format(
number=number + 1,
template_condition_extend=template_condition_extend_(
number + 1,
total
)
)
config = {
'number' : 20,
}
# Compile numbers
template_number_str = ','.join(
[
'"%d"' % (index + 1, )
for index in range(config.get('number'))
]
)
# Compile instances
template_instance_str = ','.join(
[
template_instance.format(
index=index + 1
)
for index in range(config.get('number'))
]
)
# Compile conditions
template_condition_str = ','.join(
[
template_condition.format(
index=index + 1,
template_condition_extend=template_condition_extend_(
index,
config.get('number')
)
)
for index in range(config.get('number'))
]
)
# Compile full template
template_str = template_structure.format(
template_number_str=template_number_str,
template_condition_str=template_condition_str,
template_instance_str=template_instance_str,
**config
)
# Save to file
with open('ibeis-ia-cloudformation.new.json', 'w') as output_file:
parsed = json.loads(template_str)
json_str = json.dumps(
parsed,
sort_keys=True,
indent=2,
separators=(',', ': ')
)
json_str = json_str.replace('{pattern}', '[\\\\x20-\\\\x7E]*')
output_file.write(json_str)
| {
"repo_name": "bluemellophone/ibeis_aws_codedeploy",
"path": "scripts/ibeis-ia-cloudformation.py",
"copies": "1",
"size": "13334",
"license": "apache-2.0",
"hash": 2344867382397354000,
"line_mean": 30.7476190476,
"line_max": 199,
"alpha_frac": 0.3661316934,
"autogenerated": false,
"ratio": 4.625043357613597,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5491175051013598,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import logging; _L = logging.getLogger('openaddr.cache')
from .compat import standard_library
import ogr
import os
import errno
import socket
import mimetypes
import shutil
import re
import simplejson as json
from os import mkdir
from hashlib import md5
from os.path import join, basename, exists, abspath, splitext
from urllib.parse import urlparse
from subprocess import check_output
from tempfile import mkstemp
from hashlib import sha1
from shutil import move
from time import time
import requests
import requests_ftp
requests_ftp.monkeypatch_session()
# HTTP timeout in seconds, used in various calls to requests.get() and requests.post()
_http_timeout = 180
from .compat import csvopen, csvDictWriter
from .conform import X_FIELDNAME, Y_FIELDNAME, GEOM_FIELDNAME
def mkdirsp(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def request(method, url, **kwargs):
try:
_L.debug("Requesting %s with args %s", url, kwargs.get('params') or kwargs.get('data'))
return requests.request(method, url, timeout=_http_timeout, **kwargs)
except requests.exceptions.SSLError as e:
_L.warning("Retrying %s without SSL verification", url)
return requests.request(method, url, timeout=_http_timeout, verify=False, **kwargs)
class CacheResult:
cache = None
fingerprint = None
version = None
elapsed = None
def __init__(self, cache, fingerprint, version, elapsed):
self.cache = cache
self.fingerprint = fingerprint
self.version = version
self.elapsed = elapsed
@staticmethod
def empty():
return CacheResult(None, None, None, None)
def todict(self):
return dict(cache=self.cache, fingerprint=self.fingerprint, version=self.version)
def compare_cache_details(filepath, resultdir, data):
''' Compare cache file with known source data, return cache and fingerprint.
Checks if fresh data is already cached, returns a new file path if not.
'''
if not exists(filepath):
raise Exception('cached file {} is missing'.format(filepath))
fingerprint = md5()
with open(filepath, 'rb') as file:
for line in file:
fingerprint.update(line)
# Determine if anything needs to be done at all.
if urlparse(data.get('cache', '')).scheme == 'http' and 'fingerprint' in data:
if fingerprint.hexdigest() == data['fingerprint']:
return data['cache'], data['fingerprint']
cache_name = basename(filepath)
if not exists(resultdir):
mkdir(resultdir)
move(filepath, join(resultdir, cache_name))
data_cache = 'file://' + join(abspath(resultdir), cache_name)
return data_cache, fingerprint.hexdigest()
class DownloadError(Exception):
pass
class DownloadTask(object):
def __init__(self, source_prefix):
self.source_prefix = source_prefix
@classmethod
def from_type_string(clz, type_string, source_prefix=None):
if type_string.lower() == 'http':
return URLDownloadTask(source_prefix)
elif type_string.lower() == 'ftp':
return URLDownloadTask(source_prefix)
elif type_string.lower() == 'esri':
return EsriRestDownloadTask(source_prefix)
else:
raise KeyError("I don't know how to extract for type {}".format(type_string))
def download(self, source_urls, workdir):
raise NotImplementedError()
def guess_url_file_extension(url):
''' Get a filename extension for a URL using various hints.
'''
scheme, _, path, _, query, _ = urlparse(url)
mimetypes.add_type('application/x-zip-compressed', '.zip', False)
_, likely_ext = os.path.splitext(path)
bad_extensions = '', '.cgi', '.php', '.aspx', '.asp', '.do'
if not query and likely_ext not in bad_extensions:
#
# Trust simple URLs without meaningless filename extensions.
#
_L.debug(u'URL says "{}" for {}'.format(likely_ext, url))
path_ext = likely_ext
else:
#
# Get a dictionary of headers and a few bytes of content from the URL.
#
if scheme in ('http', 'https'):
response = request('GET', url, stream=True)
content_chunk = next(response.iter_content(99))
headers = response.headers
response.close()
elif scheme in ('file', ''):
headers = dict()
with open(path) as file:
content_chunk = file.read(99)
else:
raise ValueError('Unknown scheme "{}": {}'.format(scheme, url))
path_ext = False
# Guess path extension from Content-Type header
if 'content-type' in headers:
content_type = headers['content-type'].split(';')[0]
_L.debug('Content-Type says "{}" for {}'.format(content_type, url))
path_ext = mimetypes.guess_extension(content_type, False)
#
# Uh-oh, see if Content-Disposition disagrees with Content-Type.
# Socrata recently started using Content-Disposition instead
# of normal response headers so it's no longer easy to identify
# file type.
#
if 'content-disposition' in headers:
pattern = r'attachment; filename=("?)(?P<filename>[^;]+)\1'
match = re.match(pattern, headers['content-disposition'], re.I)
if match:
_, attachment_ext = splitext(match.group('filename'))
if path_ext == attachment_ext:
_L.debug('Content-Disposition agrees: "{}"'.format(match.group('filename')))
else:
_L.debug('Content-Disposition disagrees: "{}"'.format(match.group('filename')))
path_ext = False
if not path_ext:
#
# Headers didn't clearly define a known extension.
# Instead, shell out to `file` to peek at the content.
#
mime_type = get_content_mimetype(content_chunk)
_L.debug('file says "{}" for {}'.format(mime_type, url))
path_ext = mimetypes.guess_extension(mime_type, False)
return path_ext
def get_content_mimetype(chunk):
''' Get a mime-type for a short length of file content.
'''
handle, file = mkstemp()
os.write(handle, chunk)
os.close(handle)
mime_type = check_output(('file', '--mime-type', '-b', file)).strip()
os.remove(file)
return mime_type.decode('utf-8')
class URLDownloadTask(DownloadTask):
USER_AGENT = 'openaddresses-extract/1.0 (https://github.com/openaddresses/openaddresses)'
CHUNK = 16 * 1024
def get_file_path(self, url, dir_path):
''' Return a local file path in a directory for a URL.
May need to fill in a filename extension based on HTTP Content-Type.
'''
scheme, host, path, _, _, _ = urlparse(url)
path_base, _ = os.path.splitext(path)
if self.source_prefix is None:
# With no source prefix like "us-ca-oakland" use the name as given.
name_base = os.path.basename(path_base)
else:
# With a source prefix, create a safe and unique filename with a hash.
hash = sha1((host + path_base).encode('utf-8'))
name_base = u'{}-{}'.format(self.source_prefix, hash.hexdigest()[:8])
path_ext = guess_url_file_extension(url)
_L.debug(u'Guessed {}{} for {}'.format(name_base, path_ext, url))
return os.path.join(dir_path, name_base + path_ext)
def download(self, source_urls, workdir):
output_files = []
download_path = os.path.join(workdir, 'http')
mkdirsp(download_path)
for source_url in source_urls:
file_path = self.get_file_path(source_url, download_path)
# FIXME: For URLs with file:// scheme, simply copy the file
# to the expected location so that os.path.exists() returns True.
# Instead, implement a FileDownloadTask class?
scheme, _, path, _, _, _ = urlparse(source_url)
if scheme == 'file':
shutil.copy(path, file_path)
if os.path.exists(file_path):
output_files.append(file_path)
_L.debug("File exists %s", file_path)
continue
_L.info("Requesting %s", source_url)
headers = {'User-Agent': self.USER_AGENT}
try:
resp = request('GET', source_url, headers=headers, stream=True)
except Exception as e:
raise DownloadError("Could not connect to URL", e)
if resp.status_code in range(400, 499):
raise DownloadError('{} response from {}'.format(resp.status_code, source_url))
size = 0
with open(file_path, 'wb') as fp:
for chunk in resp.iter_content(self.CHUNK):
size += len(chunk)
fp.write(chunk)
output_files.append(file_path)
_L.info("Downloaded %s bytes for file %s", size, file_path)
return output_files
class EsriRestDownloadTask(DownloadTask):
USER_AGENT = 'openaddresses-extract/1.0 (https://github.com/openaddresses/openaddresses)'
def build_ogr_geometry(self, geom_type, esri_feature):
if 'geometry' not in esri_feature:
raise TypeError("No geometry for feature")
if geom_type == 'esriGeometryPoint':
geom = ogr.Geometry(ogr.wkbPoint)
geom.AddPoint(esri_feature['geometry']['x'], esri_feature['geometry']['y'])
elif geom_type == 'esriGeometryMultipoint':
geom = ogr.Geometry(ogr.wkbMultiPoint)
for point in esri_feature['geometry']['points']:
pt = ogr.Geometry(ogr.wkbPoint)
pt.AddPoint(point[0], point[1])
geom.AddGeometry(pt)
elif geom_type == 'esriGeometryPolygon':
geom = ogr.Geometry(ogr.wkbPolygon)
for esri_ring in esri_feature['geometry']['rings']:
ring = ogr.Geometry(ogr.wkbLinearRing)
for esri_pt in esri_ring:
ring.AddPoint(esri_pt[0], esri_pt[1])
geom.AddGeometry(ring)
elif geom_type == 'esriGeometryPolyline':
geom = ogr.Geometry(ogr.wkbMultiLineString)
for esri_ring in esri_feature['geometry']['rings']:
line = ogr.Geometry(ogr.wkbLineString)
for esri_pt in esri_ring:
line.AddPoint(esri_pt[0], esri_pt[1])
geom.AddGeometry(line)
else:
raise KeyError("Don't know how to convert esri geometry type {}".format(geom_type))
return geom
def get_file_path(self, url, dir_path):
''' Return a local file path in a directory for a URL.
'''
_, host, path, _, _, _ = urlparse(url)
hash, path_ext = sha1((host + path).encode('utf-8')), '.csv'
# With no source prefix like "us-ca-oakland" use the host as a hint.
name_base = '{}-{}'.format(self.source_prefix or host, hash.hexdigest()[:8])
_L.debug('Downloading {} to {}{}'.format(url, name_base, path_ext))
return os.path.join(dir_path, name_base + path_ext)
def download(self, source_urls, workdir):
output_files = []
download_path = os.path.join(workdir, 'esri')
mkdirsp(download_path)
for source_url in source_urls:
size = 0
file_path = self.get_file_path(source_url, download_path)
if os.path.exists(file_path):
output_files.append(file_path)
_L.debug("File exists %s", file_path)
continue
headers = {'User-Agent': self.USER_AGENT}
# Get the fields
query_args = {
'f': 'json'
}
response = request('GET', source_url, params=query_args, headers=headers)
if response.status_code != 200:
raise DownloadError('Could not retrieve field names from ESRI source: HTTP {} {}'.format(
response.status_code,
response.text
))
try:
metadata = response.json()
except:
_L.error("Could not parse response from {} as JSON:\n\n{}".format(
response.request.url,
response.text,
))
raise
error = metadata.get('error')
if error:
raise DownloadError("Problem querying ESRI field names: {}" .format(error['message']))
if not metadata.get('fields'):
raise DownloadError("No fields available in the source")
field_names = [f['name'] for f in metadata['fields']]
if X_FIELDNAME not in field_names:
field_names.append(X_FIELDNAME)
if Y_FIELDNAME not in field_names:
field_names.append(Y_FIELDNAME)
if GEOM_FIELDNAME not in field_names:
field_names.append(GEOM_FIELDNAME)
query_url = source_url + '/query'
# Get the count of rows in the layer
query_args = {
'where': '1=1',
'returnCountOnly': 'true',
'f': 'json',
}
response = request('GET', query_url, params=query_args, headers=headers)
if response.status_code != 200:
raise DownloadError('Could not retrieve row count from ESRI source: HTTP {} {}'.format(
response.status_code,
response.text
))
row_count = response.json().get('count')
page_size = metadata.get('maxRecordCount', 500)
if page_size > 1000:
page_size = 1000
_L.info("Source has {} rows".format(row_count))
page_args = []
if metadata.get('supportsPagination') or \
(metadata.get('advancedQueryCapabilities') and metadata['advancedQueryCapabilities']['supportsPagination']):
# If the layer supports pagination, we can use resultOffset/resultRecordCount to paginate
for offset in range(0, row_count, page_size):
page_args.append({
'resultOffset': offset,
'resultRecordCount': page_size,
'where': '1=1',
'geometryPrecision': 7,
'returnGeometry': 'true',
'outSR': 4326,
'outFields': '*',
'f': 'json',
})
_L.info("Built {} requests using resultOffset method".format(len(page_args)))
else:
# If not, we can still use the `where` argument to paginate
if metadata.get('supportsStatistics'):
# If the layer supports statistics, we can request maximum and minimum object ID
# to help build the pages
# Find the OID field
oid_field_name = metadata.get('objectIdField')
if not oid_field_name:
for f in metadata['fields']:
if f['type'] == 'esriFieldTypeOID':
oid_field_name = f['name']
break
if not oid_field_name:
raise DownloadError("Could not find object ID field name")
# Find the min and max values for the OID field
query_args = {
'f': 'json',
'outFields': '',
'outStatistics': json.dumps([
dict(statisticType='min', onStatisticField=oid_field_name, outStatisticFieldName='THE_MIN'),
dict(statisticType='max', onStatisticField=oid_field_name, outStatisticFieldName='THE_MAX'),
])
}
response = request('GET', query_url, params=query_args, headers=headers)
if response.status_code != 200:
raise DownloadError('Could not retrieve min/max oid values from ESRI source: HTTP {} {}'.format(
response.status_code,
response.text
))
resp_attrs = response.json()['features'][0]['attributes']
oid_min = resp_attrs['THE_MIN']
oid_max = resp_attrs['THE_MAX']
for page_min in range(oid_min - 1, oid_max, page_size):
page_max = min(page_min + page_size, oid_max)
page_args.append({
'where': '{} > {} AND {} <= {}'.format(
oid_field_name,
page_min,
oid_field_name,
page_max,
),
'geometryPrecision': 7,
'returnGeometry': 'true',
'outSR': 4326,
'outFields': '*',
'f': 'json',
})
_L.info("Built {} requests using OID where clause method".format(len(page_args)))
else:
# If the layer does not support statistics, we can request
# all the individual IDs and page through them one chunk at
# a time.
# Get all the OIDs
query_args = {
'where': '1=1', # So we get everything
'returnIdsOnly': 'true',
'f': 'json',
}
response = request('GET', query_url, params=query_args, headers=headers)
if response.status_code != 200:
raise DownloadError('Could not retrieve object IDs from ESRI source: HTTP {} {}'.format(
response.status_code,
response.text
))
try:
oids = response.json().get('objectIds', [])
except:
_L.error("Could not parse response from {} as JSON:\n\n{}".format(
response.request.url,
response.text,
))
raise
for i in range(0, len(oids), 100):
oid_chunk = oids[i:i+100]
page_args.append({
'objectIds': ','.join(map(str, oid_chunk)),
'geometryPrecision': 7,
'returnGeometry': 'true',
'outSR': 4326,
'outFields': '*',
'f': 'json',
})
_L.info("Built {} requests using OID enumeration method".format(len(page_args)))
with csvopen(file_path, 'w', encoding='utf-8') as f:
writer = csvDictWriter(f, fieldnames=field_names, encoding='utf-8')
writer.writeheader()
due = time() + 7200
for query_args in page_args:
if time() > due:
raise RuntimeError('Ran out of time caching Esri features')
try:
response = request('POST', query_url, headers=headers, data=query_args)
_L.debug("Requesting %s", response.url)
if response.status_code != 200:
raise DownloadError('Could not retrieve this chunk of objects from ESRI source: HTTP {} {}'.format(
response.status_code,
response.text
))
data = response.json()
except socket.timeout as e:
raise DownloadError("Timeout when connecting to URL", e)
except ValueError as e:
raise DownloadError("Could not parse JSON", e)
except Exception as e:
raise DownloadError("Could not connect to URL", e)
finally:
# Wipe out whatever we had written out so far
f.truncate()
error = data.get('error')
if error:
raise DownloadError("Problem querying ESRI dataset with args {}. Server said: {}".format(query_args, error['message']))
geometry_type = data.get('geometryType')
features = data.get('features')
for feature in features:
try:
ogr_geom = self.build_ogr_geometry(geometry_type, feature)
row = feature.get('attributes', {})
row[GEOM_FIELDNAME] = ogr_geom.ExportToWkt()
try:
centroid = ogr_geom.Centroid()
except RuntimeError as e:
if 'Invalid number of points in LinearRing found' not in str(e):
raise
xmin, xmax, ymin, ymax = ogr_geom.GetEnvelope()
row[X_FIELDNAME] = round(xmin/2 + xmax/2, 7)
row[Y_FIELDNAME] = round(ymin/2 + ymax/2, 7)
else:
row[X_FIELDNAME] = round(centroid.GetX(), 7)
row[Y_FIELDNAME] = round(centroid.GetY(), 7)
writer.writerow(row)
size += 1
except TypeError:
_L.debug("Skipping a geometry", exc_info=True)
_L.info("Downloaded %s ESRI features for file %s", size, file_path)
output_files.append(file_path)
return output_files
| {
"repo_name": "slibby/machine",
"path": "openaddr/cache.py",
"copies": "1",
"size": "22824",
"license": "isc",
"hash": -5345387154597378000,
"line_mean": 39.0421052632,
"line_max": 143,
"alpha_frac": 0.5164300736,
"autogenerated": false,
"ratio": 4.549332270281044,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5565762343881046,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import logging; _L = logging.getLogger('openaddr.cache')
import os
import errno
import math
import mimetypes
import shutil
import re
import csv
import simplejson as json
from os import mkdir
from hashlib import md5
from os.path import join, basename, exists, abspath, splitext
from urllib.parse import urlparse
from subprocess import check_output
from tempfile import mkstemp
from hashlib import sha1
from shutil import move
from shapely.geometry import shape
from esridump import EsriDumper
from esridump.errors import EsriDownloadError
import requests
# HTTP timeout in seconds, used in various calls to requests.get() and requests.post()
_http_timeout = 180
from .conform import X_FIELDNAME, Y_FIELDNAME, GEOM_FIELDNAME, attrib_types
from . import util
def mkdirsp(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def traverse(item):
"Iterates over nested iterables"
if isinstance(item, list):
for i in item:
for j in traverse(i):
yield j
else:
yield item
def request(method, url, **kwargs):
if urlparse(url).scheme == 'ftp':
if method != 'GET':
raise NotImplementedError("Don't know how to {} with {}".format(method, url))
return util.request_ftp_file(url)
try:
_L.debug("Requesting %s with args %s", url, kwargs.get('params') or kwargs.get('data'))
return requests.request(method, url, timeout=_http_timeout, **kwargs)
except requests.exceptions.SSLError as e:
_L.warning("Retrying %s without SSL verification", url)
return requests.request(method, url, timeout=_http_timeout, verify=False, **kwargs)
class CacheResult:
cache = None
fingerprint = None
version = None
elapsed = None
def __init__(self, cache, fingerprint, version, elapsed):
self.cache = cache
self.fingerprint = fingerprint
self.version = version
self.elapsed = elapsed
@staticmethod
def empty():
return CacheResult(None, None, None, None)
def todict(self):
return dict(cache=self.cache, fingerprint=self.fingerprint, version=self.version)
def compare_cache_details(filepath, resultdir, data):
''' Compare cache file with known source data, return cache and fingerprint.
Checks if fresh data is already cached, returns a new file path if not.
'''
if not exists(filepath):
raise Exception('cached file {} is missing'.format(filepath))
fingerprint = md5()
with open(filepath, 'rb') as file:
for line in file:
fingerprint.update(line)
# Determine if anything needs to be done at all.
if urlparse(data.get('cache', '')).scheme == 'http' and 'fingerprint' in data:
if fingerprint.hexdigest() == data['fingerprint']:
return data['cache'], data['fingerprint']
cache_name = basename(filepath)
if not exists(resultdir):
mkdir(resultdir)
move(filepath, join(resultdir, cache_name))
data_cache = 'file://' + join(abspath(resultdir), cache_name)
return data_cache, fingerprint.hexdigest()
class DownloadError(Exception):
pass
class DownloadTask(object):
def __init__(self, source_prefix, params={}, headers={}):
'''
params: Additional query parameters, used by EsriRestDownloadTask.
headers: Additional HTTP headers.
'''
self.source_prefix = source_prefix
self.headers = {
'User-Agent': 'openaddresses-extract/1.0 (https://github.com/openaddresses/openaddresses)',
}
self.headers.update(dict(**headers))
self.query_params = dict(**params)
@classmethod
def from_protocol_string(clz, protocol_string, source_prefix=None):
if protocol_string.lower() == 'http':
return URLDownloadTask(source_prefix)
elif protocol_string.lower() == 'ftp':
return URLDownloadTask(source_prefix)
elif protocol_string.lower() == 'esri':
return EsriRestDownloadTask(source_prefix)
else:
raise KeyError("I don't know how to extract for protocol {}".format(protocol_string))
def download(self, source_urls, workdir, conform):
raise NotImplementedError()
def guess_url_file_extension(url):
''' Get a filename extension for a URL using various hints.
'''
scheme, _, path, _, query, _ = urlparse(url)
mimetypes.add_type('application/x-zip-compressed', '.zip', False)
mimetypes.add_type('application/vnd.geo+json', '.json', False)
_, likely_ext = os.path.splitext(path)
bad_extensions = '', '.cgi', '.php', '.aspx', '.asp', '.do'
if not query and likely_ext not in bad_extensions:
#
# Trust simple URLs without meaningless filename extensions.
#
_L.debug(u'URL says "{}" for {}'.format(likely_ext, url))
path_ext = likely_ext
else:
#
# Get a dictionary of headers and a few bytes of content from the URL.
#
if scheme in ('http', 'https'):
response = request('GET', url, stream=True)
content_chunk = next(response.iter_content(99))
headers = response.headers
response.close()
elif scheme in ('file', ''):
headers = dict()
with open(path) as file:
content_chunk = file.read(99)
else:
raise ValueError('Unknown scheme "{}": {}'.format(scheme, url))
path_ext = False
# Guess path extension from Content-Type header
if 'content-type' in headers:
content_type = headers['content-type'].split(';')[0]
_L.debug('Content-Type says "{}" for {}'.format(content_type, url))
path_ext = mimetypes.guess_extension(content_type, False)
#
# Uh-oh, see if Content-Disposition disagrees with Content-Type.
# Socrata recently started using Content-Disposition instead
# of normal response headers so it's no longer easy to identify
# file type.
#
if 'content-disposition' in headers:
pattern = r'attachment; filename=("?)(?P<filename>[^;]+)\1'
match = re.match(pattern, headers['content-disposition'], re.I)
if match:
_, attachment_ext = splitext(match.group('filename'))
if path_ext == attachment_ext:
_L.debug('Content-Disposition agrees: "{}"'.format(match.group('filename')))
else:
_L.debug('Content-Disposition disagrees: "{}"'.format(match.group('filename')))
path_ext = False
if not path_ext:
#
# Headers didn't clearly define a known extension.
# Instead, shell out to `file` to peek at the content.
#
mime_type = get_content_mimetype(content_chunk)
_L.debug('file says "{}" for {}'.format(mime_type, url))
path_ext = mimetypes.guess_extension(mime_type, False)
return path_ext
def get_content_mimetype(chunk):
''' Get a mime-type for a short length of file content.
'''
handle, file = mkstemp()
os.write(handle, chunk)
os.close(handle)
mime_type = check_output(('file', '--mime-type', '-b', file)).strip()
os.remove(file)
return mime_type.decode('utf-8')
class URLDownloadTask(DownloadTask):
CHUNK = 16 * 1024
def get_file_path(self, url, dir_path):
''' Return a local file path in a directory for a URL.
May need to fill in a filename extension based on HTTP Content-Type.
'''
scheme, host, path, _, _, _ = urlparse(url)
path_base, _ = os.path.splitext(path)
if self.source_prefix is None:
# With no source prefix like "us-ca-oakland" use the name as given.
name_base = os.path.basename(path_base)
else:
# With a source prefix, create a safe and unique filename with a hash.
hash = sha1((host + path_base).encode('utf-8'))
name_base = u'{}-{}'.format(self.source_prefix, hash.hexdigest()[:8])
path_ext = guess_url_file_extension(url)
_L.debug(u'Guessed {}{} for {}'.format(name_base, path_ext, url))
return os.path.join(dir_path, name_base + path_ext)
def download(self, source_urls, workdir, conform=None):
output_files = []
download_path = os.path.join(workdir, 'http')
mkdirsp(download_path)
for source_url in source_urls:
file_path = self.get_file_path(source_url, download_path)
# FIXME: For URLs with file:// scheme, simply copy the file
# to the expected location so that os.path.exists() returns True.
# Instead, implement a FileDownloadTask class?
scheme, _, path, _, _, _ = urlparse(source_url)
if scheme == 'file':
shutil.copy(path, file_path)
if os.path.exists(file_path):
output_files.append(file_path)
_L.debug("File exists %s", file_path)
continue
try:
resp = request('GET', source_url, headers=self.headers, stream=True)
except Exception as e:
raise DownloadError("Could not connect to URL", e)
if resp.status_code in range(400, 499):
raise DownloadError('{} response from {}'.format(resp.status_code, source_url))
size = 0
with open(file_path, 'wb') as fp:
for chunk in resp.iter_content(self.CHUNK):
size += len(chunk)
fp.write(chunk)
output_files.append(file_path)
_L.info("Downloaded %s bytes for file %s", size, file_path)
return output_files
class EsriRestDownloadTask(DownloadTask):
def get_file_path(self, url, dir_path):
''' Return a local file path in a directory for a URL.
'''
_, host, path, _, _, _ = urlparse(url)
hash, path_ext = sha1((host + path).encode('utf-8')), '.csv'
# With no source prefix like "us-ca-oakland" use the host as a hint.
name_base = '{}-{}'.format(self.source_prefix or host, hash.hexdigest()[:8])
_L.debug('Downloading {} to {}{}'.format(url, name_base, path_ext))
return os.path.join(dir_path, name_base + path_ext)
@classmethod
def fields_from_conform_function(cls, v):
fxn = v.get('function')
if fxn:
if fxn in ('join', 'format'):
return set(v['fields'])
elif fxn == 'chain':
fields = set()
user_vars = set([v['variable']])
for func in v['functions']:
if isinstance(func, dict) and 'function' in func:
fields |= cls.fields_from_conform_function(func) - user_vars
return fields
else:
return set([v.get('field')])
@classmethod
def field_names_to_request(cls, conform):
''' Return list of fieldnames to request based on conform, or None.
'''
if not conform:
return None
fields = set()
for k, v in conform.items():
if k in attrib_types:
if isinstance(v, dict):
# It's a function of some sort?
if 'function' in v:
fields |= cls.fields_from_conform_function(v)
elif isinstance(v, list):
# It's a list of field names
fields |= set(v)
else:
fields.add(v)
if fields:
return list(filter(None, sorted(fields)))
else:
return None
def download(self, source_urls, workdir, conform=None):
output_files = []
download_path = os.path.join(workdir, 'esri')
mkdirsp(download_path)
query_fields = EsriRestDownloadTask.field_names_to_request(conform)
for source_url in source_urls:
size = 0
file_path = self.get_file_path(source_url, download_path)
if os.path.exists(file_path):
output_files.append(file_path)
_L.debug("File exists %s", file_path)
continue
downloader = EsriDumper(source_url, parent_logger=_L, timeout=300)
metadata = downloader.get_metadata()
if query_fields is None:
field_names = [f['name'] for f in metadata['fields']]
else:
field_names = query_fields[:]
if X_FIELDNAME not in field_names:
field_names.append(X_FIELDNAME)
if Y_FIELDNAME not in field_names:
field_names.append(Y_FIELDNAME)
if GEOM_FIELDNAME not in field_names:
field_names.append(GEOM_FIELDNAME)
# Get the count of rows in the layer
try:
row_count = downloader.get_feature_count()
_L.info("Source has {} rows".format(row_count))
except EsriDownloadError:
_L.info("Source doesn't support count")
with open(file_path, 'w', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=field_names)
writer.writeheader()
for feature in downloader:
try:
geom = feature.get('geometry') or {}
row = feature.get('properties') or {}
if not geom:
raise TypeError("No geometry parsed")
if any((isinstance(g, float) and math.isnan(g)) for g in traverse(geom)):
raise TypeError("Geometry has NaN coordinates")
shp = shape(feature['geometry'])
row[GEOM_FIELDNAME] = shp.wkt
try:
centroid = shp.centroid
except RuntimeError as e:
if 'Invalid number of points in LinearRing found' not in str(e):
raise
xmin, xmax, ymin, ymax = shp.bounds
row[X_FIELDNAME] = round(xmin/2 + xmax/2, 7)
row[Y_FIELDNAME] = round(ymin/2 + ymax/2, 7)
else:
if centroid.is_empty:
raise TypeError(json.dumps(feature['geometry']))
row[X_FIELDNAME] = round(centroid.x, 7)
row[Y_FIELDNAME] = round(centroid.y, 7)
writer.writerow({fn: row.get(fn) for fn in field_names})
size += 1
except TypeError:
_L.debug("Skipping a geometry", exc_info=True)
_L.info("Downloaded %s ESRI features for file %s", size, file_path)
output_files.append(file_path)
return output_files
| {
"repo_name": "openaddresses/machine",
"path": "openaddr/cache.py",
"copies": "1",
"size": "15360",
"license": "isc",
"hash": -1887406637697649000,
"line_mean": 35.1411764706,
"line_max": 103,
"alpha_frac": 0.5629557292,
"autogenerated": false,
"ratio": 4.207066557107642,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0016212461409942742,
"num_lines": 425
} |
from __future__ import absolute_import, division, print_function
import logging; _L = logging.getLogger('openaddr')
from .compat import standard_library
from tempfile import mkdtemp, mkstemp
from os.path import realpath, join, basename, splitext, exists, dirname, abspath, relpath
from shutil import copy, move, rmtree
from os import mkdir, environ, close, utime, remove
from urllib.parse import urlparse
from datetime import datetime, date
from calendar import timegm
import json, io, zipfile
from osgeo import ogr
from requests import get
from boto import connect_s3
from dateutil.parser import parse
from .sample import sample_geojson
from .cache import (
CacheResult,
compare_cache_details,
DownloadTask,
URLDownloadTask,
)
from .conform import (
ConformResult,
DecompressionTask,
ExcerptDataTask,
ConvertToCsvTask,
elaborate_filenames,
)
with open(join(dirname(__file__), 'VERSION')) as file:
__version__ = file.read().strip()
# Deprecated location for sources from old batch mode.
SOURCES_DIR = '/var/opt/openaddresses'
class S3:
_bucket = None
def __init__(self, key, secret, bucketname):
self._key, self._secret = key, secret
self.bucketname = bucketname
def _make_bucket(self):
if not self._bucket:
connection = connect_s3(self._key, self._secret)
self._bucket = connection.get_bucket(self.bucketname)
def get_key(self, name):
self._make_bucket()
return self._bucket.get_key(name)
def new_key(self, name):
self._make_bucket()
return self._bucket.new_key(name)
def cache(srcjson, destdir, extras):
''' Python wrapper for openaddress-cache.
Return a CacheResult object:
cache: URL of cached data, possibly with file:// schema
fingerprint: md5 hash of data,
version: data version as date?
elapsed: elapsed time as timedelta object
output: subprocess output as string
Creates and destroys a subdirectory in destdir.
'''
start = datetime.now()
source, _ = splitext(basename(srcjson))
workdir = mkdtemp(prefix='cache-', dir=destdir)
with open(srcjson, 'r') as src_file:
data = json.load(src_file)
data.update(extras)
#
#
#
source_urls = data.get('data')
if not isinstance(source_urls, list):
source_urls = [source_urls]
task = DownloadTask.from_type_string(data.get('type'), source)
downloaded_files = task.download(source_urls, workdir)
# FIXME: I wrote the download stuff to assume multiple files because
# sometimes a Shapefile fileset is splayed across multiple files instead
# of zipped up nicely. When the downloader downloads multiple files,
# we should zip them together before uploading to S3 instead of picking
# the first one only.
filepath_to_upload = abspath(downloaded_files[0])
#
# Find the cached data and hold on to it.
#
resultdir = join(destdir, 'cached')
data['cache'], data['fingerprint'] \
= compare_cache_details(filepath_to_upload, resultdir, data)
rmtree(workdir)
return CacheResult(data.get('cache', None),
data.get('fingerprint', None),
data.get('version', None),
datetime.now() - start)
def conform(srcjson, destdir, extras):
''' Python wrapper for openaddresses-conform.
Return a ConformResult object:
processed: URL of processed data CSV
path: local path to CSV of processed data
geometry_type: typically Point or Polygon
elapsed: elapsed time as timedelta object
output: subprocess output as string
Creates and destroys a subdirectory in destdir.
'''
start = datetime.now()
source, _ = splitext(basename(srcjson))
workdir = mkdtemp(prefix='conform-', dir=destdir)
with open(srcjson, 'r') as src_file:
data = json.load(src_file)
data.update(extras)
#
# The cached data will be a local path.
#
scheme, _, cache_path, _, _, _ = urlparse(extras.get('cache', ''))
if scheme == 'file':
copy(cache_path, workdir)
source_urls = data.get('cache')
if not isinstance(source_urls, list):
source_urls = [source_urls]
task1 = URLDownloadTask(source)
downloaded_path = task1.download(source_urls, workdir)
_L.info("Downloaded to %s", downloaded_path)
task2 = DecompressionTask.from_type_string(data.get('compression'))
names = elaborate_filenames(data.get('conform', {}).get('file', None))
decompressed_paths = task2.decompress(downloaded_path, workdir, names)
_L.info("Decompressed to %d files", len(decompressed_paths))
task3 = ExcerptDataTask()
try:
conform = data.get('conform', {})
data_sample, geometry_type = task3.excerpt(decompressed_paths, workdir, conform)
_L.info("Sampled %d records", len(data_sample))
except Exception as e:
_L.warning("Error doing excerpt; skipping", exc_info=True)
data_sample = None
geometry_type = None
task4 = ConvertToCsvTask()
try:
csv_path, addr_count = task4.convert(data, decompressed_paths, workdir)
_L.info("Converted to %s with %d addresses", csv_path, addr_count)
except Exception as e:
_L.warning("Error doing conform; skipping", exc_info=True)
csv_path, addr_count = None, 0
out_path = None
if csv_path is not None and exists(csv_path):
move(csv_path, join(destdir, 'out.csv'))
out_path = realpath(join(destdir, 'out.csv'))
rmtree(workdir)
return ConformResult(data.get('processed', None),
data_sample,
data.get('website'),
data.get('license'),
geometry_type,
addr_count,
out_path,
datetime.now() - start)
def package_output(source, processed_path, website, license):
''' Write a zip archive to temp dir with processed data and optional .vrt.
'''
_, ext = splitext(processed_path)
handle, zip_path = mkstemp(suffix='.zip')
close(handle)
zip_file = zipfile.ZipFile(zip_path, mode='w', compression=zipfile.ZIP_DEFLATED)
template = join(dirname(__file__), 'templates', 'README.txt')
with io.open(template, encoding='utf8') as file:
content = file.read().format(website=website, license=license, date=date.today())
zip_file.writestr('README.txt', content.encode('utf8'))
if ext == '.csv':
# Add virtual format to make CSV readable by QGIS, OGR, etc.
# More information: http://www.gdal.org/drv_vrt.html
template = join(dirname(__file__), 'templates', 'conform-result.vrt')
with io.open(template, encoding='utf8') as file:
content = file.read().format(source=basename(source))
zip_file.writestr(source + '.vrt', content.encode('utf8'))
zip_file.write(processed_path, source + ext)
zip_file.close()
return zip_path
def iterate_local_processed_files(runs):
''' Yield a stream of local processed result files for a list of runs.
'''
key = lambda run: run.datetime_tz or date(1970, 1, 1)
for run in sorted(runs, key=key, reverse=True):
source_base, _ = splitext(relpath(run.source_path, 'sources'))
processed_url = run.state and run.state.get('processed')
run_state = run.state
if not processed_url:
continue
try:
filename = download_processed_file(processed_url)
except:
_L.error('Failed to download {}'.format(processed_url))
continue
else:
yield (source_base, filename, run_state)
if filename and exists(filename):
remove(filename)
def download_processed_file(url):
''' Download a URL to a local temporary file, return its path.
Local file will have an appropriate timestamp and extension.
'''
_, ext = splitext(urlparse(url).path)
handle, filename = mkstemp(prefix='processed-', suffix=ext)
close(handle)
response = get(url, stream=True, timeout=5)
with open(filename, 'wb') as file:
for chunk in response.iter_content(chunk_size=8192):
file.write(chunk)
last_modified = response.headers.get('Last-Modified')
timestamp = timegm(parse(last_modified).utctimetuple())
utime(filename, (timestamp, timestamp))
return filename
| {
"repo_name": "slibby/machine",
"path": "openaddr/__init__.py",
"copies": "1",
"size": "8718",
"license": "isc",
"hash": 4279637605105907000,
"line_mean": 32.4022988506,
"line_max": 89,
"alpha_frac": 0.6264051388,
"autogenerated": false,
"ratio": 3.9663330300272976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5092738168827298,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import logging; _L = logging.getLogger('openaddr')
from tempfile import mkdtemp, mkstemp
from os.path import realpath, join, splitext, exists, dirname, abspath, relpath
from shutil import copy, move, rmtree
from os import close, utime, remove
from urllib.parse import urlparse
from datetime import datetime, date
from calendar import timegm
import requests
from boto.s3.connection import S3Connection
from dateutil.parser import parse
from .cache import (
CacheResult,
compare_cache_details,
DownloadTask,
URLDownloadTask,
)
from .conform import (
ConformResult,
DecompressionTask,
ExcerptDataTask,
ConvertToCsvTask,
elaborate_filenames,
conform_license,
conform_attribution,
conform_sharealike,
)
with open(join(dirname(__file__), 'VERSION')) as file:
__version__ = file.read().strip()
class S3:
_bucket = None
def __init__(self, key, secret, bucketname):
self._key, self._secret = key, secret
self.bucketname = bucketname
def _make_bucket(self):
if not self._bucket:
# see https://github.com/boto/boto/issues/2836#issuecomment-67896932
kwargs = dict(calling_format='boto.s3.connection.OrdinaryCallingFormat')
connection = S3Connection(self._key, self._secret, **kwargs)
self._bucket = connection.get_bucket(self.bucketname)
@property
def bucket(self):
self._make_bucket()
return self._bucket
def get_key(self, name):
return self.bucket.get_key(name)
def new_key(self, name):
return self.bucket.new_key(name)
class LocalProcessedResult:
def __init__(self, source_base, filename, run_state, code_version):
for attr in ('attribution_name', 'attribution_flag', 'website', 'license'):
assert hasattr(run_state, attr), 'Run state should have {} property'.format(attr)
self.source_base = source_base
self.filename = filename
self.run_state = run_state
self.code_version = code_version
def cache(data_source_name, data_source, destdir, extras):
''' Python wrapper for openaddress-cache.
Return a CacheResult object:
cache: URL of cached data, possibly with file:// schema
fingerprint: md5 hash of data,
version: data version as date?
elapsed: elapsed time as timedelta object
output: subprocess output as string
Creates and destroys a subdirectory in destdir.
'''
start = datetime.now()
workdir = mkdtemp(prefix='cache-', dir=destdir)
data_source.update(extras)
source_urls = data_source.get('data')
if not isinstance(source_urls, list):
source_urls = [source_urls]
protocol_string = data_source.get('protocol')
task = DownloadTask.from_protocol_string(protocol_string, data_source_name)
downloaded_files = task.download(source_urls, workdir, data_source.get('conform'))
# FIXME: I wrote the download stuff to assume multiple files because
# sometimes a Shapefile fileset is splayed across multiple files instead
# of zipped up nicely. When the downloader downloads multiple files,
# we should zip them together before uploading to S3 instead of picking
# the first one only.
filepath_to_upload = abspath(downloaded_files[0])
#
# Find the cached data and hold on to it.
#
resultdir = join(destdir, 'cached')
data_source['cache'], data_source['fingerprint'] \
= compare_cache_details(filepath_to_upload, resultdir, data_source)
rmtree(workdir)
return CacheResult(data_source.get('cache', None),
data_source.get('fingerprint', None),
data_source.get('version', None),
datetime.now() - start)
def conform(data_source_name, data_source, destdir, extras):
''' Python wrapper for openaddresses-conform.
Return a ConformResult object:
processed: URL of processed data CSV
path: local path to CSV of processed data
geometry_type: typically Point or Polygon
elapsed: elapsed time as timedelta object
output: subprocess output as string
Creates and destroys a subdirectory in destdir.
'''
start = datetime.now()
workdir = mkdtemp(prefix='conform-', dir=destdir)
data_source.update(extras)
#
# The cached data will be a local path.
#
scheme, _, cache_path, _, _, _ = urlparse(extras.get('cache', ''))
if scheme == 'file':
copy(cache_path, workdir)
source_urls = data_source.get('cache')
if not isinstance(source_urls, list):
source_urls = [source_urls]
task1 = URLDownloadTask(data_source_name)
downloaded_path = task1.download(source_urls, workdir)
_L.info("Downloaded to %s", downloaded_path)
task2 = DecompressionTask.from_format_string(data_source.get('compression'))
names = elaborate_filenames(data_source.get('conform', {}).get('file', None))
decompressed_paths = task2.decompress(downloaded_path, workdir, names)
_L.info("Decompressed to %d files", len(decompressed_paths))
task3 = ExcerptDataTask()
try:
conform = data_source.get('conform', {})
data_sample, geometry_type = task3.excerpt(decompressed_paths, workdir, conform)
_L.info("Sampled %d records", len(data_sample))
except Exception as e:
_L.warning("Error doing excerpt; skipping", exc_info=True)
data_sample = None
geometry_type = None
task4 = ConvertToCsvTask()
try:
csv_path, addr_count = task4.convert(data_source, decompressed_paths, workdir)
if addr_count > 0:
_L.info("Converted to %s with %d addresses", csv_path, addr_count)
else:
_L.warning('Found no addresses in source data')
csv_path = None
except Exception as e:
_L.warning("Error doing conform; skipping", exc_info=True)
csv_path, addr_count = None, 0
out_path = None
if csv_path is not None and exists(csv_path):
move(csv_path, join(destdir, 'out.csv'))
out_path = realpath(join(destdir, 'out.csv'))
rmtree(workdir)
sharealike_flag = conform_sharealike(data_source.get('license'))
attr_flag, attr_name = conform_attribution(data_source.get('license'), data_source.get('attribution'))
return ConformResult(data_source.get('processed', None),
data_sample,
data_source.get('website'),
conform_license(data_source.get('license')),
geometry_type,
addr_count,
out_path,
datetime.now() - start,
sharealike_flag,
attr_flag,
attr_name)
def iterate_local_processed_files(runs, sort_on='datetime_tz'):
''' Yield a stream of local processed result files for a list of runs.
Used in ci.collect and dotmap processes.
'''
if sort_on == 'source_path':
reverse, key = False, lambda run: run.source_path
else:
reverse, key = True, lambda run: run.datetime_tz or date(1970, 1, 1)
for run in sorted(runs, key=key, reverse=reverse):
source_base, _ = splitext(relpath(run.source_path, 'sources'))
processed_url = run.state and run.state.processed
run_state = run.state
if not processed_url:
continue
try:
filename = download_processed_file(processed_url)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
continue
else:
_L.error('HTTP {} while downloading {}: {}'.format(e.response.status_code, processed_url, e))
continue
except Exception as e:
_L.error('Failed to download {}: {}'.format(processed_url, e))
continue
yield LocalProcessedResult(source_base, filename, run_state, run.code_version)
if filename and exists(filename):
remove(filename)
def download_processed_file(url):
''' Download a URL to a local temporary file, return its path.
Local file will have an appropriate timestamp and extension.
'''
urlparts = urlparse(url)
_, ext = splitext(urlparts.path)
handle, filename = mkstemp(prefix='processed-', suffix=ext)
close(handle)
if urlparts.hostname.endswith('s3.amazonaws.com'):
# Use boto directly if it's an S3 URL
if urlparts.hostname == 's3.amazonaws.com':
# Bucket and key are in the path part of the URL
bucket, key = urlparts.path[1:].split('/', 1)
else:
# Bucket is part of the domain, path is the key
bucket = urlparts.hostname[:-17]
key = urlparts.path[1:]
s3 = S3(None, None, bucket)
k = s3.get_key(key)
k.get_contents_to_filename(filename)
last_modified = datetime.strptime(k.last_modified, '%a, %d %b %Y %H:%M:%S %Z')
timestamp = timegm(last_modified.utctimetuple())
else:
for i in range(3):
# Otherwise just download via HTTP
response = requests.get(url, stream=True, timeout=5)
if response.status_code == 200:
break
elif response.status_code == 404:
response.raise_for_status()
else:
# Retry
continue
# Raise an exception if we failed after retries
response.raise_for_status()
with open(filename, 'wb') as file:
for chunk in response.iter_content(chunk_size=8192):
file.write(chunk)
last_modified = response.headers.get('Last-Modified')
timestamp = timegm(parse(last_modified).utctimetuple())
utime(filename, (timestamp, timestamp))
return filename
| {
"repo_name": "openaddresses/machine",
"path": "openaddr/__init__.py",
"copies": "1",
"size": "10021",
"license": "isc",
"hash": -7747096749304520000,
"line_mean": 33.9163763066,
"line_max": 109,
"alpha_frac": 0.6232910887,
"autogenerated": false,
"ratio": 4.003595685177786,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5126886773877787,
"avg_score": null,
"num_lines": null
} |
from __future__ import absolute_import, division, print_function
import logging; _L = logging.getLogger('openaddr.process_one')
from .compat import standard_library
from urllib.parse import urlparse
from os.path import join, basename, dirname, exists, splitext, relpath
from shutil import copy, move, rmtree
from argparse import ArgumentParser
from os import mkdir, rmdir, close, chmod
from _thread import get_ident
import tempfile, json, csv
from . import cache, conform, CacheResult, ConformResult
from .compat import csvopen, csvwriter
class SourceSaysSkip(RuntimeError): pass
def process(source, destination, extras=dict()):
''' Process a single source and destination, return path to JSON state file.
Creates a new directory and files under destination.
'''
temp_dir = tempfile.mkdtemp(prefix='process_one-', dir=destination)
temp_src = join(temp_dir, basename(source))
copy(source, temp_src)
log_handler = get_log_handler(temp_dir)
logging.getLogger('openaddr').addHandler(log_handler)
cache_result, conform_result = CacheResult.empty(), ConformResult.empty()
skipped_source = False
try:
with open(temp_src) as file:
if json.load(file).get('skip', None):
raise SourceSaysSkip()
# Cache source data.
cache_result = cache(temp_src, temp_dir, extras)
if not cache_result.cache:
_L.warning('Nothing cached')
else:
_L.info(u'Cached data in {}'.format(cache_result.cache))
# Conform cached source data.
conform_result = conform(temp_src, temp_dir, cache_result.todict())
if not conform_result.path:
_L.warning('Nothing processed')
else:
_L.info('Processed data in {}'.format(conform_result.path))
except SourceSaysSkip as e:
_L.info('Source says to skip in process_one.process()')
skipped_source = True
except Exception:
_L.warning('Error in process_one.process()', exc_info=True)
finally:
# Make sure this gets done no matter what
logging.getLogger('openaddr').removeHandler(log_handler)
# Write output
state_path = write_state(source, skipped_source, destination, log_handler,
cache_result, conform_result, temp_dir)
log_handler.close()
rmtree(temp_dir)
return state_path
class LogFilter:
''' Logging filter object to match only record in the current thread.
'''
def __init__(self):
# Seems to work as unique ID with multiprocessing.Process() as well as threading.Thread()
self.thread_id = get_ident()
def filter(self, record):
return record.thread == self.thread_id
def get_log_handler(directory):
''' Create a new file handler for the current thread and return it.
'''
handle, filename = tempfile.mkstemp(dir=directory, suffix='.log')
close(handle)
chmod(filename, 0o644)
handler = logging.FileHandler(filename)
handler.setFormatter(logging.Formatter(u'%(asctime)s %(levelname)08s: %(message)s'))
handler.setLevel(logging.DEBUG)
handler.addFilter(LogFilter())
return handler
def write_state(source, skipped, destination, log_handler, cache_result,
conform_result, temp_dir):
'''
'''
source_id, _ = splitext(basename(source))
statedir = join(destination, source_id)
if not exists(statedir):
mkdir(statedir)
if cache_result.cache:
scheme, _, cache_path1, _, _, _ = urlparse(cache_result.cache)
if scheme in ('file', ''):
cache_path2 = join(statedir, 'cache{1}'.format(*splitext(cache_path1)))
copy(cache_path1, cache_path2)
state_cache = relpath(cache_path2, statedir)
else:
state_cache = cache_result.cache
else:
state_cache = None
if conform_result.path:
_, _, processed_path1, _, _, _ = urlparse(conform_result.path)
processed_path2 = join(statedir, 'out{1}'.format(*splitext(processed_path1)))
copy(processed_path1, processed_path2)
# Write the sample data to a sample.json file
if conform_result.sample:
sample_path = join(statedir, 'sample.json')
with open(sample_path, 'w') as sample_file:
json.dump(conform_result.sample, sample_file, indent=2)
log_handler.flush()
output_path = join(statedir, 'output.txt')
copy(log_handler.stream.name, output_path)
state = [
('source', basename(source)),
('skipped', bool(skipped)),
('cache', state_cache),
('sample', conform_result.sample and relpath(sample_path, statedir)),
('website', conform_result.website),
('license', conform_result.license),
('geometry type', conform_result.geometry_type),
('address count', conform_result.address_count),
('version', cache_result.version),
('fingerprint', cache_result.fingerprint),
('cache time', cache_result.elapsed and str(cache_result.elapsed)),
('processed', conform_result.path and relpath(processed_path2, statedir)),
('process time', conform_result.elapsed and str(conform_result.elapsed)),
('output', relpath(output_path, statedir))
]
with csvopen(join(statedir, 'index.txt'), 'w', encoding='utf8') as file:
out = csvwriter(file, dialect='excel-tab', encoding='utf8')
for row in zip(*state):
out.writerow(row)
with open(join(statedir, 'index.json'), 'w') as file:
json.dump(list(zip(*state)), file, indent=2)
_L.info(u'Wrote to state: {}'.format(file.name))
return file.name
parser = ArgumentParser(description='Run one source file locally, prints output path.')
parser.add_argument('source', help='Required source file name.')
parser.add_argument('destination', help='Required output directory name.')
parser.add_argument('-l', '--logfile', help='Optional log file name.')
parser.add_argument('-v', '--verbose', help='Turn on verbose logging',
action='store_const', dest='loglevel',
const=logging.DEBUG, default=logging.INFO)
parser.add_argument('-q', '--quiet', help='Turn off most logging',
action='store_const', dest='loglevel',
const=logging.WARNING, default=logging.INFO)
def main():
'''
'''
from .jobs import setup_logger
args = parser.parse_args()
setup_logger(logfile=args.logfile, log_level=args.loglevel)
try:
file_path = process(args.source, args.destination)
except Exception as e:
_L.error(e, exc_info=True)
return 1
else:
print(file_path)
return 0
if __name__ == '__main__':
exit(main())
| {
"repo_name": "slibby/machine",
"path": "openaddr/process_one.py",
"copies": "1",
"size": "6888",
"license": "isc",
"hash": -5071988435065862000,
"line_mean": 34.3230769231,
"line_max": 97,
"alpha_frac": 0.6289198606,
"autogenerated": false,
"ratio": 3.970028818443804,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5098948679043803,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.