Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
2,300 | @contextlib.contextmanager
def print_output():
try:
yield
except __HOLE__ as e:
if STATUS:
print(STATUS)
raise
except Exception as e:
logging.exception('The plugin %s has failed with an unhandled '
'exception', sys.argv[0])
status_err(traceback.format_exc(), force_print=True, exception=e)
else:
if STATUS:
print(STATUS)
for metric in METRICS:
print(metric) | SystemExit | dataset/ETHPy150Open rcbops/rpc-openstack/maas/plugins/maas_common.py/print_output |
2,301 | def parse_cpu_spec(spec):
"""Parse a CPU set specification.
:param spec: cpu set string eg "1-4,^3,6"
Each element in the list is either a single
CPU number, a range of CPU numbers, or a
caret followed by a CPU number to be excluded
from a previous range.
:returns: a set of CPU indexes
"""
cpuset_ids = set()
cpuset_reject_ids = set()
for rule in spec.split(','):
rule = rule.strip()
# Handle multi ','
if len(rule) < 1:
continue
# Note the count limit in the .split() call
range_parts = rule.split('-', 1)
if len(range_parts) > 1:
reject = False
if range_parts[0] and range_parts[0][0] == '^':
reject = True
range_parts[0] = str(range_parts[0][1:])
# So, this was a range; start by converting the parts to ints
try:
start, end = [int(p.strip()) for p in range_parts]
except ValueError:
raise exception.Invalid(_("Invalid range expression %r")
% rule)
# Make sure it's a valid range
if start > end:
raise exception.Invalid(_("Invalid range expression %r")
% rule)
# Add available CPU ids to set
if not reject:
cpuset_ids |= set(range(start, end + 1))
else:
cpuset_reject_ids |= set(range(start, end + 1))
elif rule[0] == '^':
# Not a range, the rule is an exclusion rule; convert to int
try:
cpuset_reject_ids.add(int(rule[1:].strip()))
except __HOLE__:
raise exception.Invalid(_("Invalid exclusion "
"expression %r") % rule)
else:
# OK, a single CPU to include; convert to int
try:
cpuset_ids.add(int(rule))
except ValueError:
raise exception.Invalid(_("Invalid inclusion "
"expression %r") % rule)
# Use sets to handle the exclusion rules for us
cpuset_ids -= cpuset_reject_ids
return cpuset_ids | ValueError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/hardware.py/parse_cpu_spec |
2,302 | def get_number_of_serial_ports(flavor, image_meta):
"""Get the number of serial consoles from the flavor or image
:param flavor: Flavor object to read extra specs from
:param image_meta: nova.objects.ImageMeta object instance
If flavor extra specs is not set, then any image meta value is permitted.
If flavor extra specs *is* set, then this provides the default serial
port count. The image meta is permitted to override the extra specs, but
*only* with a lower value. ie
- flavor hw:serial_port_count=4
VM gets 4 serial ports
- flavor hw:serial_port_count=4 and image hw_serial_port_count=2
VM gets 2 serial ports
- image hw_serial_port_count=6
VM gets 6 serial ports
- flavor hw:serial_port_count=4 and image hw_serial_port_count=6
Abort guest boot - forbidden to exceed flavor value
:returns: number of serial ports
"""
def get_number(obj, property):
num_ports = obj.get(property)
if num_ports is not None:
try:
num_ports = int(num_ports)
except __HOLE__:
raise exception.ImageSerialPortNumberInvalid(
num_ports=num_ports, property=property)
return num_ports
flavor_num_ports = get_number(flavor.extra_specs, "hw:serial_port_count")
image_num_ports = image_meta.properties.get("hw_serial_port_count", None)
if (flavor_num_ports and image_num_ports) is not None:
if image_num_ports > flavor_num_ports:
raise exception.ImageSerialPortNumberExceedFlavorValue()
return image_num_ports
return flavor_num_ports or image_num_ports or 1 | ValueError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/hardware.py/get_number_of_serial_ports |
2,303 | def _numa_get_pagesize_constraints(flavor, image_meta):
"""Return the requested memory page size
:param flavor: a Flavor object to read extra specs from
:param image_meta: nova.objects.ImageMeta object instance
:raises: MemoryPagesSizeInvalid or MemoryPageSizeForbidden
:returns: a page size requested or MEMPAGES_*
"""
def check_and_return_pages_size(request):
if request == "any":
return MEMPAGES_ANY
elif request == "large":
return MEMPAGES_LARGE
elif request == "small":
return MEMPAGES_SMALL
else:
try:
request = int(request)
except __HOLE__:
try:
request = strutils.string_to_bytes(
request, return_int=True) / units.Ki
except ValueError:
request = 0
if request <= 0:
raise exception.MemoryPageSizeInvalid(pagesize=request)
return request
flavor_request = flavor.get('extra_specs', {}).get("hw:mem_page_size", "")
image_request = image_meta.properties.get("hw_mem_page_size", "")
if not flavor_request and image_request:
raise exception.MemoryPageSizeForbidden(
pagesize=image_request,
against="<empty>")
if not flavor_request:
# Nothing was specified for hugepages,
# let's the default process running.
return None
pagesize = check_and_return_pages_size(flavor_request)
if image_request and (pagesize in (MEMPAGES_ANY, MEMPAGES_LARGE)):
return check_and_return_pages_size(image_request)
elif image_request:
raise exception.MemoryPageSizeForbidden(
pagesize=image_request,
against=flavor_request)
return pagesize | ValueError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/hardware.py/_numa_get_pagesize_constraints |
2,304 | def host_topology_and_format_from_host(host):
"""Convenience method for getting the numa_topology out of hosts
Since we may get a host as either a dict, a db object, or an actual
ComputeNode object, or an instance of HostState class, this makes sure we
get beck either None, or an instance of objects.NUMATopology class.
:returns: A two-tuple, first element is the topology itself or None, second
is a boolean set to True if topology was in JSON format.
"""
was_json = False
try:
host_numa_topology = host.get('numa_topology')
except __HOLE__:
host_numa_topology = host.numa_topology
if host_numa_topology is not None and isinstance(
host_numa_topology, six.string_types):
was_json = True
host_numa_topology = (objects.NUMATopology.obj_from_db_obj(
host_numa_topology))
return host_numa_topology, was_json
# TODO(ndipanov): Remove when all code paths are using objects | AttributeError | dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/hardware.py/host_topology_and_format_from_host |
2,305 | def _process_bulk_chunk(client, bulk_actions, raise_on_exception=True, raise_on_error=True, **kwargs):
"""
Send a bulk request to elasticsearch and process the output.
"""
# if raise on error is set, we need to collect errors per chunk before raising them
errors = []
try:
# send the actual request
resp = client.bulk('\n'.join(bulk_actions) + '\n', **kwargs)
except TransportError as e:
# default behavior - just propagate exception
if raise_on_exception:
raise e
# if we are not propagating, mark all actions in current chunk as failed
err_message = str(e)
exc_errors = []
# deserialize the data back, thisis expensive but only run on
# errors if raise_on_exception is false, so shouldn't be a real
# issue
bulk_data = map(client.transport.serializer.loads, bulk_actions)
while True:
try:
# collect all the information about failed actions
action = next(bulk_data)
op_type, action = action.popitem()
info = {"error": err_message, "status": e.status_code, "exception": e}
if op_type != 'delete':
info['data'] = next(bulk_data)
info.update(action)
exc_errors.append({op_type: info})
except __HOLE__:
break
# emulate standard behavior for failed actions
if raise_on_error:
raise BulkIndexError('%i document(s) failed to index.' % len(exc_errors), exc_errors)
else:
for err in exc_errors:
yield False, err
return
# go through request-reponse pairs and detect failures
for op_type, item in map(methodcaller('popitem'), resp['items']):
ok = 200 <= item.get('status', 500) < 300
if not ok and raise_on_error:
errors.append({op_type: item})
if ok or not errors:
# if we are not just recording all errors to be able to raise
# them all at once, yield items individually
yield ok, {op_type: item}
if errors:
raise BulkIndexError('%i document(s) failed to index.' % len(errors), errors) | StopIteration | dataset/ETHPy150Open elastic/elasticsearch-py/elasticsearch/helpers/__init__.py/_process_bulk_chunk |
2,306 | def __init__(self, text=None, filename=None, exclude=None):
"""
Source can be provided as `text`, the text itself, or `filename`, from
which the text will be read. Excluded lines are those that match
`exclude`, a regex.
"""
assert text or filename, "CodeParser needs either text or filename"
self.filename = filename or "<code>"
self.text = text
if not self.text:
try:
sourcef = open_source(self.filename)
try:
self.text = sourcef.read()
finally:
sourcef.close()
except __HOLE__:
_, err, _ = sys.exc_info()
raise NoSource(
"No source for code: '%s': %s" % (self.filename, err)
)
# Scrap the BOM if it exists.
if self.text and ord(self.text[0]) == 0xfeff:
self.text = self.text[1:]
self.exclude = exclude
self.show_tokens = False
# The text lines of the parsed code.
self.lines = self.text.split('\n')
# The line numbers of excluded lines of code.
self.excluded = set()
# The line numbers of docstring lines.
self.docstrings = set()
# The line numbers of class definitions.
self.classdefs = set()
# A dict mapping line numbers to (lo,hi) for multi-line statements.
self.multiline = {}
# The line numbers that start statements.
self.statement_starts = set()
# Lazily-created ByteParser
self._byte_parser = None | IOError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/coverage/parser.py/CodeParser.__init__ |
2,307 | def _opcode_set(*names):
"""Return a set of opcodes by the names in `names`."""
s = set()
for name in names:
try:
s.add(_opcode(name))
except __HOLE__:
pass
return s
# Opcodes that leave the code object. | KeyError | dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/coverage/parser.py/_opcode_set |
2,308 | @transform( thresholdFoldChange, suffix(".foldchange"), ".shared.foldchange")
def sharedIntervalsFoldChangeThreshold(infile, outfile):
'''identify shared intervals between datasets at different foldchange thresholds'''
# Open foldchange file and read
fc_file = open(infile, "r")
foldchange = []
for line in fc_file:
threshold, interval_count = line.split()
foldchange.append(threshold)
in_track = P.snip( os.path.basename( infile ), ".foldchange" )
in_dir = os.path.dirname(infile)
out_dir = in_dir.replace("macs","fc")
try: os.mkdir( out_dir )
except __HOLE__: pass
if os.path.exists(outfile):
statement = '''rm %(outfile)s'''
P.run()
# for each foldchange
for fc in foldchange:
in_bed = "foldchange/" + in_track.replace("-","_") + ".fc" + str(fc) + ".bed"
# For each track
for track in TRACKS:
if (str(track) != in_track):
compare_bed = "foldchange/" + str(track).replace("-","_") + ".fc" + str(fc) + ".bed"
statement = '''echo %(track)s %(fc)s >> %(outfile)s; intersectBed -a %(in_bed)s -b %(compare_bed)s -u | wc -l >> %(outfile)s; '''
P.run()
statement = '''sed -i '{N;s/\\n/\\t/}' %(outfile)s; sed -i '{s/ /\\t/g}' %(outfile)s; '''
P.run()
############################################################ | OSError | dataset/ETHPy150Open CGATOxford/cgat/obsolete/pipeline_proj012_chipseq.py/sharedIntervalsFoldChangeThreshold |
2,309 | def fromMessage(klass, message, op_endpoint=UNUSED):
"""Construct me from an OpenID Message.
@param message: The OpenID associate request
@type message: openid.message.Message
@returntype: L{AssociateRequest}
"""
if message.isOpenID1():
session_type = message.getArg(OPENID1_NS, 'session_type')
if session_type == 'no-encryption':
oidutil.log('Received OpenID 1 request with a no-encryption '
'assocaition session type. Continuing anyway.')
elif not session_type:
session_type = 'no-encryption'
else:
session_type = message.getArg(OPENID2_NS, 'session_type')
if session_type is None:
raise ProtocolError(message,
text="session_type missing from request")
try:
session_class = klass.session_classes[session_type]
except KeyError:
raise ProtocolError(message,
"Unknown session type %r" % (session_type,))
try:
session = session_class.fromMessage(message)
except __HOLE__, why:
raise ProtocolError(message, 'Error parsing %s session: %s' %
(session_class.session_type, why[0]))
assoc_type = message.getArg(OPENID_NS, 'assoc_type', 'HMAC-SHA1')
if assoc_type not in session.allowed_assoc_types:
fmt = 'Session type %s does not support association type %s'
raise ProtocolError(message, fmt % (session_type, assoc_type))
self = klass(session, assoc_type)
self.message = message
self.namespace = message.getOpenIDNamespace()
return self | ValueError | dataset/ETHPy150Open CollabQ/CollabQ/openid/server/server.py/AssociateRequest.fromMessage |
2,310 | def verify(self, assoc_handle, message):
"""Verify that the signature for some data is valid.
@param assoc_handle: The handle of the association used to sign the
data.
@type assoc_handle: str
@param message: The signed message to verify
@type message: openid.message.Message
@returns: C{True} if the signature is valid, C{False} if not.
@returntype: bool
"""
assoc = self.getAssociation(assoc_handle, dumb=True)
if not assoc:
oidutil.log("failed to get assoc with handle %r to verify "
"message %r"
% (assoc_handle, message))
return False
try:
valid = assoc.checkMessageSignature(message)
except __HOLE__, ex:
oidutil.log("Error in verifying %s with %s: %s" % (message,
assoc,
ex))
return False
return valid | ValueError | dataset/ETHPy150Open CollabQ/CollabQ/openid/server/server.py/Signatory.verify |
2,311 | def get_page():
""" Get current page
Get current page from query string `page=x`,
if page not given returns `1` instead.
"""
try:
page = request.args.get('page', 1)
return int(page)
except __HOLE__:
return 1 | ValueError | dataset/ETHPy150Open DoubleCiti/daimaduan.com/daimaduan/utils/pagination.py/get_page |
2,312 | def post_template_populate(self):
current_post = getattr(self.request, get_setting('CURRENT_POST_IDENTIFIER'), None)
if current_post and self.request.user.has_perm('djangocms_blog.change_post'): # pragma: no cover # NOQA
# removing page meta menu, if present, to avoid confusion
try: # pragma: no cover
import djangocms_page_meta # NOQA
menu = self.request.toolbar.get_or_create_menu('page')
pagemeta = menu.get_or_create_menu('pagemeta', 'meta')
menu.remove_item(pagemeta)
except __HOLE__:
pass
# removing page tags menu, if present, to avoid confusion
try: # pragma: no cover
import djangocms_page_tags # NOQA
menu = self.request.toolbar.get_or_create_menu('page')
pagetags = menu.get_or_create_menu('pagetags', 'tags')
menu.remove_item(pagetags)
except ImportError:
pass
self.add_publish_button() | ImportError | dataset/ETHPy150Open nephila/djangocms-blog/djangocms_blog/cms_toolbar.py/BlogToolbar.post_template_populate |
2,313 | def _split_mod_var_names(resource_name):
""" Return (module_name, class_name) pair from given string. """
try:
dot_index = resource_name.rindex('.')
except __HOLE__:
# no dot found
return '', resource_name
return resource_name[:dot_index], resource_name[dot_index + 1:] | ValueError | dataset/ETHPy150Open wuher/devil/devil/perm/management.py/_split_mod_var_names |
2,314 | def _instantiate_resource(item):
try:
res = item()
except __HOLE__:
return None
else:
return res if _is_resource_obj(res) else None | TypeError | dataset/ETHPy150Open wuher/devil/devil/perm/management.py/_instantiate_resource |
2,315 | def get_resources():
from django.conf import settings
try:
acl_resources = settings.ACL_RESOURCES
except __HOLE__:
# ACL_RESOURCES is not specified in settings
return []
else:
return _handle_list(acl_resources) | AttributeError | dataset/ETHPy150Open wuher/devil/devil/perm/management.py/get_resources |
2,316 | def GetApplication():
'''Return app environment as: ARCMAP, ARCGIS_PRO, OTHER'''
global app_found
if app_found != 'NOT_SET':
return app_found
try:
from arcpy import mp
except __HOLE__:
try:
from arcpy import mapping
mxd = arcpy.mapping.MapDocument("CURRENT")
app_found = "ARCMAP"
return app_found
except:
app_found = "OTHER"
return app_found
try:
aprx = arcpy.mp.ArcGISProject('CURRENT')
app_found = "ARCGIS_PRO"
return app_found
except:
app_found = "OTHER"
return app_found | ImportError | dataset/ETHPy150Open Esri/solutions-geoprocessing-toolbox/operational_graphics/toolboxes/scripts/Utilities.py/GetApplication |
2,317 | @task
def watch_docs():
"""Run build the docs when a file changes."""
try:
import sphinx_autobuild # noqa
except __HOLE__:
print('ERROR: watch task requires the sphinx_autobuild package.')
print('Install it with:')
print(' pip install sphinx-autobuild')
sys.exit(1)
docs()
run('sphinx-autobuild {} {}'.format(docs_dir, build_dir), pty=True) | ImportError | dataset/ETHPy150Open jmcarp/nplusone/tasks.py/watch_docs |
2,318 | def erase_in_display(self, type_of=0, private=False):
"""Erases display in a specific way.
:param int type_of: defines the way the line should be erased in:
* ``0`` -- Erases from cursor to end of screen, including
cursor position.
* ``1`` -- Erases from beginning of screen to cursor,
including cursor position.
* ``2`` -- Erases complete display. All lines are erased
and changed to single-width. Cursor does not move.
* ``3`` -- Erase saved lines. (Xterm) Clears the history.
:param bool private: when ``True`` character attributes aren left
unchanged **not implemented**.
"""
line_offset = self.line_offset
pt_cursor_position = self.pt_cursor_position
if type_of == 3:
# Clear data buffer.
for y in list(self.data_buffer):
self.data_buffer.pop(y, None)
# Reset line_offset.
pt_cursor_position.y = 0
self.max_y = 0
else:
try:
interval = (
# a) erase from cursor to the end of the display, including
# the cursor,
range(pt_cursor_position.y + 1, line_offset + self.lines),
# b) erase from the beginning of the display to the cursor,
# including it,
range(line_offset, pt_cursor_position.y),
# c) erase the whole display.
range(line_offset, line_offset + self.lines)
)[type_of]
except __HOLE__:
return
data_buffer = self.data_buffer
for line in interval:
data_buffer[line] = defaultdict(lambda: Char(' '))
# In case of 0 or 1 we have to erase the line with the cursor.
if type_of in [0, 1]:
self.erase_in_line(type_of) | IndexError | dataset/ETHPy150Open jonathanslenders/pymux/pymux/screen.py/BetterScreen.erase_in_display |
2,319 | def select_graphic_rendition(self, *attrs):
""" Support 256 colours """
replace = {}
if not attrs:
attrs = [0]
else:
attrs = list(attrs[::-1])
while attrs:
attr = attrs.pop()
if attr in self._fg_colors:
replace["color"] = self._fg_colors[attr]
elif attr in self._bg_colors:
replace["bgcolor"] = self._bg_colors[attr]
elif attr == 1:
replace["bold"] = True
elif attr == 3:
replace["italic"] = True
elif attr == 4:
replace["underline"] = True
elif attr == 5:
replace["blink"] = True
elif attr == 6:
replace["blink"] = True # Fast blink.
elif attr == 7:
replace["reverse"] = True
elif attr == 22:
replace["bold"] = False
elif attr == 23:
replace["italic"] = False
elif attr == 24:
replace["underline"] = False
elif attr == 25:
replace["blink"] = False
elif attr == 27:
replace["reverse"] = False
elif not attr:
replace = {}
self._attrs = Attrs(color=None, bgcolor=None, bold=False,
underline=False, italic=False, blink=False, reverse=False)
elif attr in (38, 48):
n = attrs.pop()
# 256 colors.
if n == 5:
if attr == 38:
m = attrs.pop()
replace["color"] = self._256_colors.get(1024 + m)
elif attr == 48:
m = attrs.pop()
replace["bgcolor"] = self._256_colors.get(1024 + m)
# True colors.
if n == 2:
try:
color_str = '%02x%02x%02x' % (attrs.pop(), attrs.pop(), attrs.pop())
except __HOLE__:
pass
else:
if attr == 38:
replace["color"] = color_str
elif attr == 48:
replace["bgcolor"] = color_str
self._attrs = self._attrs._replace(**replace) | IndexError | dataset/ETHPy150Open jonathanslenders/pymux/pymux/screen.py/BetterScreen.select_graphic_rendition |
2,320 | def __ReadPickled(self, filename):
"""Reads a pickled object from the given file and returns it.
"""
self.__file_lock.acquire()
try:
try:
if (filename and
filename != '/dev/null' and
os.path.isfile(filename) and
os.stat(filename).st_size > 0):
return pickle.load(open(filename, 'rb'))
else:
logging.warning('Could not read datastore data from %s', filename)
except (AttributeError, LookupError, ImportError, NameError, __HOLE__,
ValueError, struct.error, pickle.PickleError), e:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.INTERNAL_ERROR,
'Could not read data from %s. Try running with the '
'--clear_datastore flag. Cause:\n%r' % (filename, e))
finally:
self.__file_lock.release()
return [] | TypeError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore_file_stub.py/DatastoreFileStub.__ReadPickled |
2,321 | def __WritePickled(self, obj, filename):
"""Pickles the object and writes it to the given file.
"""
if not filename or filename == '/dev/null' or not obj:
return
descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename))
tmpfile = os.fdopen(descriptor, 'wb')
pickler = pickle.Pickler(tmpfile, protocol=1)
pickler.fast = True
pickler.dump(obj)
tmpfile.close()
self.__file_lock.acquire()
try:
try:
os.rename(tmp_filename, filename)
except __HOLE__:
try:
os.remove(filename)
except:
pass
os.rename(tmp_filename, filename)
finally:
self.__file_lock.release() | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore_file_stub.py/DatastoreFileStub.__WritePickled |
2,322 | def _Get(self, key):
app_kind, _, k = self._GetEntityLocation(key)
try:
return datastore_stub_util.LoadEntity(
self.__entities_by_kind[app_kind][k].protobuf)
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore_file_stub.py/DatastoreFileStub._Get |
2,323 | def _Delete(self, key):
app_kind, eg_k, k = self._GetEntityLocation(key)
self.__entities_lock.acquire()
try:
del self.__entities_by_kind[app_kind][k]
del self.__entities_by_group[eg_k][k]
if not self.__entities_by_kind[app_kind]:
del self.__entities_by_kind[app_kind]
if not self.__entities_by_group[eg_k]:
del self.__entities_by_group[eg_k]
del self.__schema_cache[app_kind]
except __HOLE__:
pass
finally:
self.__entities_lock.release() | KeyError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore_file_stub.py/DatastoreFileStub._Delete |
2,324 | def _GetQueryCursor(self, query, filters, orders, index_list):
app_id = query.app()
namespace = query.name_space()
pseudo_kind = None
if query.has_kind() and query.kind() in self._pseudo_kinds:
pseudo_kind = self._pseudo_kinds[query.kind()]
self.__entities_lock.acquire()
try:
app_ns = datastore_types.EncodeAppIdNamespace(app_id, namespace)
if pseudo_kind:
(results, filters, orders) = pseudo_kind.Query(query, filters, orders)
elif query.has_kind():
results = [entity.protobuf for entity in
self.__entities_by_kind[app_ns, query.kind()].values()]
else:
results = []
for (cur_app_ns, _), entities in self.__entities_by_kind.iteritems():
if cur_app_ns == app_ns:
results.extend(entity.protobuf for entity in entities.itervalues())
except __HOLE__:
results = []
finally:
self.__entities_lock.release()
return datastore_stub_util._ExecuteQuery(results, query,
filters, orders, index_list) | KeyError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/datastore_file_stub.py/DatastoreFileStub._GetQueryCursor |
2,325 | def test_csv_file_header_index_error_in_legislators_current(self):
try:
with open(LEGISLATORS_CURRENT_CSV_FILE, 'rU') as legislators_current_data:
reader = csv.reader(legislators_current_data) # Create a regular tuple reader
for index, legislator_row in enumerate(reader):
print legislator_row[29]
self.fail('There is a value in the array after the last expected item.')
break
except __HOLE__:
pass | IndexError | dataset/ETHPy150Open wevoteeducation/WeVoteBase/import_export_theunitedstatesio/tests.py/ImportExportTheUnitedStatesIoTests.test_csv_file_header_index_error_in_legislators_current |
2,326 | def issue(server, account, domains, key_size, key_file=None, csr_file=None, output_path=None):
if not output_path or output_path == '.':
output_path = os.getcwd()
# Load key or generate
if key_file:
try:
with open(key_file, 'rb') as f:
certificate_key = load_private_key(f.read())
except (ValueError, AttributeError, TypeError, IOError) as e:
logger.error("Couldn't read certificate key.")
raise ManualeError(e)
else:
logger.info("Generating a {} bit RSA key. This might take a second.".format(key_size))
certificate_key = generate_rsa_key(key_size)
logger.info("Key generated.")
logger.info("")
# Load CSR or generate
if csr_file:
try:
with open(csr_file, 'rb') as f:
csr = export_csr_for_acme(load_csr(f.read()))
except (ValueError, AttributeError, TypeError, IOError) as e:
logger.error("Couldn't read CSR.")
raise ManualeError(e)
else:
csr = create_csr(certificate_key, domains)
acme = Acme(server, account)
try:
logger.info("Requesting certificate issuance...")
result = acme.issue_certificate(csr)
logger.info("Certificate issued.")
except __HOLE__ as e:
logger.error("Connection or service request failed. Aborting.")
raise ManualeError(e)
try:
certificate = load_der_certificate(result.certificate)
# Print some neat info
logger.info("")
logger.info(" Expires: {}".format(certificate.not_valid_after.strftime(EXPIRATION_FORMAT)))
logger.info(" SHA256: {}".format(binascii.hexlify(certificate.fingerprint(SHA256())).decode('ascii')))
logger.info("")
# Write the key, certificate and full chain
os.makedirs(output_path, exist_ok=True)
cert_path = os.path.join(output_path, domains[0] + '.crt')
chain_path = os.path.join(output_path, domains[0] + '.chain.crt')
intermediate_path = os.path.join(output_path, domains[0] + '.intermediate.crt')
key_path = os.path.join(output_path, domains[0] + '.pem')
with open(key_path, 'wb') as f:
os.chmod(key_path, 0o600)
f.write(export_private_key(certificate_key))
logger.info("Wrote key to {}".format(f.name))
with open(cert_path, 'wb') as f:
f.write(export_pem_certificate(certificate))
logger.info("Wrote certificate to {}".format(f.name))
with open(chain_path, 'wb') as f:
f.write(export_pem_certificate(certificate))
if result.intermediate:
f.write(export_pem_certificate(load_der_certificate(result.intermediate)))
logger.info("Wrote certificate with intermediate to {}".format(f.name))
if result.intermediate:
with open(intermediate_path, 'wb') as f:
f.write(export_pem_certificate(load_der_certificate(result.intermediate)))
logger.info("Wrote intermediate certificate to {}".format(f.name))
except IOError as e:
logger.error("Failed to write certificate or key. Going to print them for you instead.")
logger.error("")
for line in export_private_key(certificate_key).decode('ascii').split('\n'):
logger.error(line)
for line in export_pem_certificate(certificate).decode('ascii').split('\n'):
logger.error(line)
raise ManualeError(e) | IOError | dataset/ETHPy150Open veeti/manuale/manuale/issue.py/issue |
2,327 | def run(plotIt=True):
"""
EM: FDEM: 1D: Inversion
=======================
Here we will create and run a FDEM 1D inversion.
"""
cs, ncx, ncz, npad = 5., 25, 15, 15
hx = [(cs,ncx), (cs,npad,1.3)]
hz = [(cs,npad,-1.3), (cs,ncz), (cs,npad,1.3)]
mesh = Mesh.CylMesh([hx,1,hz], '00C')
layerz = -100.
active = mesh.vectorCCz<0.
layer = (mesh.vectorCCz<0.) & (mesh.vectorCCz>=layerz)
actMap = Maps.InjectActiveCells(mesh, active, np.log(1e-8), nC=mesh.nCz)
mapping = Maps.ExpMap(mesh) * Maps.SurjectVertical1D(mesh) * actMap
sig_half = 2e-2
sig_air = 1e-8
sig_layer = 1e-2
sigma = np.ones(mesh.nCz)*sig_air
sigma[active] = sig_half
sigma[layer] = sig_layer
mtrue = np.log(sigma[active])
if plotIt:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1,1, figsize = (3, 6))
plt.semilogx(sigma[active], mesh.vectorCCz[active])
ax.set_ylim(-500, 0)
ax.set_xlim(1e-3, 1e-1)
ax.set_xlabel('Conductivity (S/m)', fontsize = 14)
ax.set_ylabel('Depth (m)', fontsize = 14)
ax.grid(color='k', alpha=0.5, linestyle='dashed', linewidth=0.5)
rxOffset=10.
bzi = EM.FDEM.Rx(np.array([[rxOffset, 0., 1e-3]]), 'bzi')
freqs = np.logspace(1,3,10)
srcLoc = np.array([0., 0., 10.])
srcList = [EM.FDEM.Src.MagDipole([bzi],freq, srcLoc,orientation='Z') for freq in freqs]
survey = EM.FDEM.Survey(srcList)
prb = EM.FDEM.Problem_b(mesh, mapping=mapping)
try:
from pymatsolver import MumpsSolver
prb.Solver = MumpsSolver
except __HOLE__, e:
prb.Solver = SolverLU
prb.pair(survey)
std = 0.05
survey.makeSyntheticData(mtrue, std)
survey.std = std
survey.eps = np.linalg.norm(survey.dtrue)*1e-5
if plotIt:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1,1, figsize = (6, 6))
ax.semilogx(freqs,survey.dtrue[:freqs.size], 'b.-')
ax.semilogx(freqs,survey.dobs[:freqs.size], 'r.-')
ax.legend(('Noisefree', '$d^{obs}$'), fontsize = 16)
ax.set_xlabel('Time (s)', fontsize = 14)
ax.set_ylabel('$B_z$ (T)', fontsize = 16)
ax.set_xlabel('Time (s)', fontsize = 14)
ax.grid(color='k', alpha=0.5, linestyle='dashed', linewidth=0.5)
dmisfit = DataMisfit.l2_DataMisfit(survey)
regMesh = Mesh.TensorMesh([mesh.hz[mapping.maps[-1].indActive]])
reg = Regularization.Tikhonov(regMesh)
opt = Optimization.InexactGaussNewton(maxIter = 6)
invProb = InvProblem.BaseInvProblem(dmisfit, reg, opt)
# Create an inversion object
beta = Directives.BetaSchedule(coolingFactor=5, coolingRate=2)
betaest = Directives.BetaEstimate_ByEig(beta0_ratio=1e0)
inv = Inversion.BaseInversion(invProb, directiveList=[beta,betaest])
m0 = np.log(np.ones(mtrue.size)*sig_half)
reg.alpha_s = 1e-3
reg.alpha_x = 1.
prb.counter = opt.counter = Utils.Counter()
opt.LSshorten = 0.5
opt.remember('xc')
mopt = inv.run(m0)
if plotIt:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1,1, figsize = (3, 6))
plt.semilogx(sigma[active], mesh.vectorCCz[active])
plt.semilogx(np.exp(mopt), mesh.vectorCCz[active])
ax.set_ylim(-500, 0)
ax.set_xlim(1e-3, 1e-1)
ax.set_xlabel('Conductivity (S/m)', fontsize = 14)
ax.set_ylabel('Depth (m)', fontsize = 14)
ax.grid(color='k', alpha=0.5, linestyle='dashed', linewidth=0.5)
plt.legend(['$\sigma_{true}$', '$\sigma_{pred}$'],loc='best')
plt.show() | ImportError | dataset/ETHPy150Open simpeg/simpeg/SimPEG/Examples/EM_FDEM_1D_Inversion.py/run |
2,328 | def tearDown(self):
"""
Remove all remaining triggers from the reactor.
"""
while self.triggers:
trigger = self.triggers.pop()
try:
reactor.removeSystemEventTrigger(trigger)
except (ValueError, __HOLE__):
pass | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/test/test_internet.py/SystemEventTestCase.tearDown |
2,329 | def main(self, argv=None):
"""The main program controller."""
if argv is None:
argv = sys.argv
# Step 1: Determine the command and arguments.
try:
self.progName = progName = os.path.basename(argv[0])
self.command = command = optionDashesRE.sub("", argv[1])
if command == 'test':
self.testOpts = argv[2:]
else:
self.parseOpts(argv[2:])
except __HOLE__:
usage(HELP_PAGE1, "not enough command-line arguments")
# Step 2: Call the command
meths = (self.compile, self.fill, self.help, self.options,
self.test, self.version)
for meth in meths:
methName = meth.__name__
# Or meth.im_func.func_name
# Or meth.func_name (Python >= 2.1 only, sometimes works on 2.0)
methInitial = methName[0]
if command in (methName, methInitial):
sys.argv[0] += (" " + methName)
# @@MO: I don't necessarily agree sys.argv[0] should be
# modified.
meth()
return
# If none of the commands matched.
usage(HELP_PAGE1, "unknown command '%s'" % command) | IndexError | dataset/ETHPy150Open skyostil/tracy/src/generator/Cheetah/CheetahWrapper.py/CheetahWrapper.main |
2,330 | @property
def container_id(self):
"""
Find a container id
If one isn't already set, we ask docker for the container whose name is
the same as the recorded container_name
"""
if getattr(self, "_container_id", None):
return self._container_id
try:
containers = self.harpoon.docker_context.containers(all=True)
except __HOLE__:
log.warning("Failed to get a list of active docker files")
containers = []
self._container_id = None
for container in containers:
if any(self.name in container.get("Names", []) for name in (self.container_name, "/{0}".format(self.container_name))):
self._container_id = container["Id"]
break
return self._container_id | ValueError | dataset/ETHPy150Open realestate-com-au/harpoon/harpoon/option_spec/image_objs.py/Image.container_id |
2,331 | def headerData(self, section, orientation, role=Qt.DisplayRole):
"""defines which labels the view/user shall see.
Args:
section (int): the row or column number.
orientation (Qt.Orienteation): Either horizontal or vertical.
role (Qt.ItemDataRole, optional): Defaults to `Qt.DisplayRole`.
Returns
str if a header for the appropriate section is set and the requesting
role is fitting, None if not.
"""
if role != Qt.DisplayRole:
return None
if orientation == Qt.Horizontal:
try:
return self.headers[section]
except (__HOLE__, ):
return None | IndexError | dataset/ETHPy150Open datalyze-solutions/pandas-qt/pandasqt/models/ColumnDtypeModel.py/ColumnDtypeModel.headerData |
2,332 | def do_search(self, args):
try:
#add arguments
doParser = self.arg_search()
try:
doArgs = doParser.parse_args(args.split())
except __HOLE__ as e:
return
#call UForge API
printer.out("Search package '"+doArgs.pkg+"' ...")
distribution = self.api.Distributions(doArgs.id).Get()
printer.out("for OS '"+distribution.name+"', version "+distribution.version)
pkgs = self.api.Distributions(distribution.dbId).Pkgs.Getall(Query="name=="+doArgs.pkg)
pkgs = pkgs.pkgs.pkg
if pkgs is None or len(pkgs) == 0:
printer.out("No package found")
else:
table = Texttable(800)
table.set_cols_dtype(["t","t","t","t","t","t"])
table.header(["Name", "Version", "Arch", "Release", "Build date", "Size"])
pkgs = generics_utils.order_list_object_by(pkgs, "name")
for pkg in pkgs:
table.add_row([pkg.name, pkg.version, pkg.arch, pkg.release, pkg.pkgBuildDate.strftime("%Y-%m-%d %H:%M:%S"), size(pkg.size)])
print table.draw() + "\n"
printer.out("Found "+str(len(pkgs))+" packages")
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: "+str(e), printer.ERROR)
self.help_search()
except Exception as e:
return handle_uforge_exception(e) | SystemExit | dataset/ETHPy150Open usharesoft/hammr/src/hammr/commands/os/os.py/Os.do_search |
2,333 | @filters("sqlite", sqla_exc.IntegrityError,
(r"^.*columns?(?P<columns>[^)]+)(is|are)\s+not\s+unique$",
r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$",
r"^.*PRIMARY\s+KEY\s+must\s+be\s+unique.*$"))
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for SQLite duplicate key error.
note(boris-42): In current versions of DB backends unique constraint
violation messages follow the structure:
sqlite:
1 column - (IntegrityError) column c1 is not unique
N columns - (IntegrityError) column c1, c2, ..., N are not unique
sqlite since 3.7.16:
1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1
N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2
sqlite since 3.8.2:
(IntegrityError) PRIMARY KEY must be unique
"""
columns = []
# NOTE(ochuprykov): We can get here by last filter in which there are no
# groups. Trying to access the substring that matched by
# the group will lead to IndexError. In this case just
# pass empty list to exception.DBDuplicateEntry
try:
columns = match.group('columns')
columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
except __HOLE__:
pass
raise exception.DBDuplicateEntry(columns, integrity_error) | IndexError | dataset/ETHPy150Open openstack/oslo.db/oslo_db/sqlalchemy/exc_filters.py/_sqlite_dupe_key_error |
2,334 | @filters("sqlite", sqla_exc.IntegrityError,
r"(?i).*foreign key constraint failed")
@filters("postgresql", sqla_exc.IntegrityError,
r".*on table \"(?P<table>[^\"]+)\" violates "
"foreign key constraint \"(?P<constraint>[^\"]+)\".*\n"
"DETAIL: Key \((?P<key>.+)\)=\(.+\) "
"is (not present in|still referenced from) table "
"\"(?P<key_table>[^\"]+)\".")
@filters("mysql", sqla_exc.IntegrityError,
r".* u?'Cannot (add|delete) or update a (child|parent) row: "
'a foreign key constraint fails \([`"].+[`"]\.[`"](?P<table>.+)[`"], '
'CONSTRAINT [`"](?P<constraint>.+)[`"] FOREIGN KEY '
'\([`"](?P<key>.+)[`"]\) REFERENCES [`"](?P<key_table>.+)[`"] ')
def _foreign_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for foreign key errors."""
try:
table = match.group("table")
except IndexError:
table = None
try:
constraint = match.group("constraint")
except IndexError:
constraint = None
try:
key = match.group("key")
except __HOLE__:
key = None
try:
key_table = match.group("key_table")
except IndexError:
key_table = None
raise exception.DBReferenceError(table, constraint, key, key_table,
integrity_error) | IndexError | dataset/ETHPy150Open openstack/oslo.db/oslo_db/sqlalchemy/exc_filters.py/_foreign_key_error |
2,335 | @filters("postgresql", sqla_exc.IntegrityError,
r".*new row for relation \"(?P<table>.+)\" "
"violates check constraint "
"\"(?P<check_name>.+)\"")
def _check_constraint_error(
integrity_error, match, engine_name, is_disconnect):
"""Filter for check constraint errors."""
try:
table = match.group("table")
except IndexError:
table = None
try:
check_name = match.group("check_name")
except __HOLE__:
check_name = None
raise exception.DBConstraintError(table, check_name, integrity_error) | IndexError | dataset/ETHPy150Open openstack/oslo.db/oslo_db/sqlalchemy/exc_filters.py/_check_constraint_error |
2,336 | def resolve_model_string(model_string, default_app=None):
"""
Resolve an 'app_label.model_name' string into an actual model class.
If a model class is passed in, just return that.
Raises a LookupError if a model can not be found, or ValueError if passed
something that is neither a model or a string.
"""
if isinstance(model_string, string_types):
try:
app_label, model_name = model_string.split(".")
except __HOLE__:
if default_app is not None:
# If we can't split, assume a model in current app
app_label = default_app
model_name = model_string
else:
raise ValueError("Can not resolve {0!r} into a model. Model names "
"should be in the form app_label.model_name".format(
model_string), model_string)
return apps.get_model(app_label, model_name)
elif isinstance(model_string, type) and issubclass(model_string, Model):
return model_string
else:
raise ValueError("Can not resolve {0!r} into a model".format(model_string), model_string) | ValueError | dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailcore/utils.py/resolve_model_string |
2,337 | def run_scenario(user_asked_for, data_exists_as, allow_derivation=True,
allow_integration=False, allow_prefixes_in_denominator=False,
round_result=6):
userunit = unitconv.parse_unitname(user_asked_for, fold_scale_prefix=False)
prefixclass = unitconv.prefix_class_for(userunit['scale_multiplier'])
use_unit = userunit['base_unit']
compatibles = unitconv.determine_compatible_units(
allow_derivation=allow_derivation,
allow_integration=allow_integration,
allow_prefixes_in_denominator=allow_prefixes_in_denominator,
**userunit)
try:
scale, extra_op = compatibles[data_exists_as]
except __HOLE__:
return
if round_result is not None:
scale = round(scale, round_result)
return (data_exists_as, use_unit, scale, extra_op, prefixclass) | KeyError | dataset/ETHPy150Open vimeo/graph-explorer/graph_explorer/test/test_unitconv.py/run_scenario |
2,338 | def conform_to_value(self, owner, value):
"""
When no root node has been set, we prompt the user to choose one from
the list of choices. Otherwise, we set the ``WidgyWidget`` class as
the widget we use for this field instance.
"""
self.owner = owner
if isinstance(value, Node):
self.node = value
try:
# Sometimes the WidgyWidget is wrapped in a
# RelatedFieldWidgetWrapper
self.widget.widget.node = value
except __HOLE__:
self.widget.node = value
self.queryset = None
else:
# remove the empty choice
choices = [c for c in self.choices if c[0]]
if len(choices) == 1:
self._value = choices[0][0]
self.widget = DisplayWidget(display_name=choices[0][1])
self.help_text = _('You must save before you can edit this.')
else:
self.widget = ContentTypeRadioSelect(
choices=choices,
)
try:
self.widget.widget.site = self.site
self.widget.widget.owner = owner
except AttributeError:
self.widget.site = self.site
self.widget.owner = owner | AttributeError | dataset/ETHPy150Open fusionbox/django-widgy/widgy/forms.py/WidgyFormField.conform_to_value |
2,339 | def __init__(self, *args, **kwargs):
super(WidgyFormMixin, self).__init__(*args, **kwargs)
for name, field in self.fields.items():
if isinstance(field, WidgyFormField):
try:
value = getattr(self.instance, name)
except __HOLE__:
value = None
field.conform_to_value(self.instance, value) | ObjectDoesNotExist | dataset/ETHPy150Open fusionbox/django-widgy/widgy/forms.py/WidgyFormMixin.__init__ |
2,340 | def setup_key_pair(self, context):
key_name = '%s%s' % (context.project_id, FLAGS.vpn_key_suffix)
try:
result = cloud._gen_key(context, context.user_id, key_name)
private_key = result['private_key']
key_dir = os.path.join(FLAGS.keys_path, context.user_id)
if not os.path.exists(key_dir):
os.makedirs(key_dir)
key_path = os.path.join(key_dir, '%s.pem' % key_name)
with open(key_path, 'w') as f:
f.write(private_key)
except (exception.Duplicate, os.error, __HOLE__):
pass
return key_name | IOError | dataset/ETHPy150Open nii-cloud/dodai-compute/nova/cloudpipe/pipelib.py/CloudPipe.setup_key_pair |
2,341 | def _getResolver(self, serverResponses, maximumQueries=10):
"""
Create and return a new L{root.Resolver} modified to resolve queries
against the record data represented by C{servers}.
@param serverResponses: A mapping from dns server addresses to
mappings. The inner mappings are from query two-tuples (name,
type) to dictionaries suitable for use as **arguments to
L{_respond}. See that method for details.
"""
roots = ['1.1.2.3']
resolver = Resolver(roots, maximumQueries)
def query(query, serverAddresses, timeout, filter):
msg("Query for QNAME %s at %r" % (query.name, serverAddresses))
for addr in serverAddresses:
try:
server = serverResponses[addr]
except __HOLE__:
continue
records = server[query.name.name, query.type]
return succeed(self._respond(**records))
resolver._query = query
return resolver | KeyError | dataset/ETHPy150Open twisted/twisted/twisted/names/test/test_rootresolve.py/RootResolverTests._getResolver |
2,342 | def _load_plugin_config(self, plugin):
"""Loads a JSON or YAML config for a given plugin
Keyword arguments:
plugin -- Name of plugin to load config for.
Raises:
AmbiguousConfigError -- Raised when two configs exist for plugin.
ConfigNotFoundError -- Raised when expected config isn't found.
"""
# Initialize variable to hold plugin config
json_config = False
yaml_config = False
# Attempt to load and parse JSON config file
file = os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
'plugins',
plugin,
'config.json'
)
try:
f = open(file, 'r')
json_config = json.load(f)
f.close()
# File did not exist or we can't open it for another reason
except IOError:
self.logger.debug(
"Can't open %s - maybe it doesn't exist?" % file
)
# Thrown by json.load() when the content isn't valid JSON
except ValueError:
self.logger.warning(
"Invalid JSON in %s, skipping it" % file
)
# Attempt to load and parse YAML config file
file = os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
'plugins',
plugin,
'config.yaml'
)
try:
f = open(file, 'r')
yaml_config = yaml.load(f)
f.close()
except __HOLE__:
self.logger.debug(
"Can't open %s - maybe it doesn't exist?" % file
)
except ValueError:
self.logger.warning(
"Invalid YAML in %s, skipping it" % file
)
# Loaded YAML successfully
else:
# If we already loaded JSON, this is a problem because we won't
# know which config to use.
if json_config:
raise AmbiguousConfigError(
"Found both a JSON and YAML config for plugin"
)
# No JSON config, found YAML config, return it
return yaml_config
# If neither config was found, raise an exception
if not yaml_config and not json_config:
raise ConfigNotFoundError(
"No config found for plugin: %s" % plugin
)
# Return JSON config, since YAML config wasn't found
return json_config | IOError | dataset/ETHPy150Open JohnMaguire/Cardinal/cardinal/plugins.py/PluginManager._load_plugin_config |
2,343 | def get_object_from_date_based_view(request, *args, **kwargs): # noqa
"""
Get object from generic date_based.detail view
Parameters
----------
request : instance
An instance of HttpRequest
Returns
-------
instance
An instance of model object or None
"""
import time
import datetime
from django.http import Http404
from django.db.models.fields import DateTimeField
try:
from django.utils import timezone
datetime_now = timezone.now
except ImportError:
datetime_now = datetime.datetime.now
year, month, day = kwargs['year'], kwargs['month'], kwargs['day']
month_format = kwargs.get('month_format', '%b')
day_format = kwargs.get('day_format', '%d')
date_field = kwargs['date_field']
queryset = kwargs['queryset']
object_id = kwargs.get('object_id', None)
slug = kwargs.get('slug', None)
slug_field = kwargs.get('slug_field', 'slug')
try:
tt = time.strptime(
'%s-%s-%s' % (year, month, day),
'%s-%s-%s' % ('%Y', month_format, day_format)
)
date = datetime.date(*tt[:3])
except __HOLE__:
raise Http404
model = queryset.model
if isinstance(model._meta.get_field(date_field), DateTimeField):
lookup_kwargs = {
'%s__range' % date_field: (
datetime.datetime.combine(date, datetime.time.min),
datetime.datetime.combine(date, datetime.time.max),
)}
else:
lookup_kwargs = {date_field: date}
now = datetime_now()
if date >= now.date() and not kwargs.get('allow_future', False):
lookup_kwargs['%s__lte' % date_field] = now
if object_id:
lookup_kwargs['pk'] = object_id
elif slug and slug_field:
lookup_kwargs['%s__exact' % slug_field] = slug
else:
raise AttributeError(
"Generic detail view must be called with either an "
"object_id or a slug/slug_field."
)
return get_object_or_404(queryset, **lookup_kwargs) | ValueError | dataset/ETHPy150Open lambdalisue/django-permission/src/permission/decorators/functionbase.py/get_object_from_date_based_view |
2,344 | def test_execute_failed(self):
task = Task(retries=3, async=True)
task.to_call(fail_task, 2, 7)
self.assertEqual(task.status, WAITING)
try:
self.gator.execute(task)
self.assertEqual(task.status, RETRYING)
self.gator.execute(task)
self.gator.execute(task)
self.gator.execute(task)
self.fail()
except __HOLE__:
self.assertEqual(task.retries, 0)
self.assertEqual(task.status, FAILED) | IOError | dataset/ETHPy150Open toastdriven/alligator/tests/test_gator.py/GatorTestCase.test_execute_failed |
2,345 | def test_execute_retries(self):
task = Task(retries=3, async=True)
task.to_call(eventual_success(), 2, 7)
try:
self.gator.execute(task)
except IOError:
pass
try:
self.gator.execute(task)
except __HOLE__:
pass
res = self.gator.execute(task)
self.assertEqual(res, 9)
self.assertEqual(task.retries, 1) | IOError | dataset/ETHPy150Open toastdriven/alligator/tests/test_gator.py/GatorTestCase.test_execute_retries |
2,346 | def get(self):
old = False
parse = urlparse(self.url)
if parse.netloc == "www.svtplay.se" or parse.netloc == "svtplay.se":
if parse.path[:6] != "/video" and parse.path[:6] != "/klipp":
yield ServiceError("This mode is not supported anymore. need the url with the video")
return
vid = self.find_video_id()
if vid is None:
yield ServiceError("Cant find video id for this video")
return
if re.match("^[0-9]+$", vid):
old = True
url = "http://www.svt.se/videoplayer-api/video/%s" % vid
data = self.http.request("get", url)
if data.status_code == 404:
yield ServiceError("Can't get the json file for %s" % url)
return
data = data.json()
if "live" in data:
self.options.live = data["live"]
if old:
params = {"output": "json"}
try:
dataj = self.http.request("get", self.url, params=params).json()
except ValueError:
dataj = data
old = False
else:
dataj = data
if self.options.output_auto:
self.options.service = "svtplay"
self.options.output = self.outputfilename(dataj, self.options.output, ensure_unicode(self.get_urldata()))
if self.exclude(self.options):
yield ServiceError("Excluding video")
return
if "subtitleReferences" in data:
for i in data["subtitleReferences"]:
if i["format"] == "websrt":
yield subtitle(copy.copy(self.options), "wrst", i["url"])
if old and dataj["video"]["subtitleReferences"]:
try:
suburl = dataj["video"]["subtitleReferences"][0]["url"]
except __HOLE__:
pass
if suburl and len(suburl) > 0:
yield subtitle(copy.copy(self.options), "wrst", suburl)
if self.options.force_subtitle:
return
if len(data["videoReferences"]) == 0:
yield ServiceError("Media doesn't have any associated videos (yet?)")
return
for i in data["videoReferences"]:
if i["format"] == "hls" or i["format"] == "ios":
streams = hlsparse(self.options, self.http.request("get", i["url"]), i["url"])
if streams:
for n in list(streams.keys()):
yield streams[n]
if i["format"] == "hds" or i["format"] == "flash":
match = re.search(r"\/se\/secure\/", i["url"])
if not match:
streams = hdsparse(self.options, self.http.request("get", i["url"], params={"hdcore": "3.7.0"}), i["url"])
if streams:
for n in list(streams.keys()):
yield streams[n]
if i["format"] == "dash264":
streams = dashparse(self.options, self.http.request("get", i["url"]), i["url"])
if streams:
for n in list(streams.keys()):
yield streams[n] | KeyError | dataset/ETHPy150Open spaam/svtplay-dl/lib/svtplay_dl/service/svtplay.py/Svtplay.get |
2,347 | def _lock(self):
"""Lock the entire multistore."""
self._thread_lock.acquire()
try:
self._file.open_and_lock()
except __HOLE__ as e:
if e.errno == errno.ENOSYS:
logger.warn('File system does not support locking the '
'credentials file.')
elif e.errno == errno.ENOLCK:
logger.warn('File system is out of resources for writing the '
'credentials file (is your disk full?).')
elif e.errno == errno.EDEADLK:
logger.warn('Lock contention on multistore file, opening '
'in read-only mode.')
elif e.errno == errno.EACCES:
logger.warn('Cannot access credentials file.')
else:
raise
if not self._file.is_locked():
self._read_only = True
if self._warn_on_readonly:
logger.warn('The credentials file (%s) is not writable. '
'Opening in read-only mode. Any refreshed '
'credentials will only be '
'valid for this run.', self._file.filename())
if os.path.getsize(self._file.filename()) == 0:
logger.debug('Initializing empty multistore file')
# The multistore is empty so write out an empty file.
self._data = {}
self._write()
elif not self._read_only or self._data is None:
# Only refresh the data if we are read/write or we haven't
# cached the data yet. If we are readonly, we assume is isn't
# changing out from under us and that we only have to read it
# once. This prevents us from whacking any new access keys that
# we have cached in memory but were unable to write out.
self._refresh_data_cache() | IOError | dataset/ETHPy150Open google/oauth2client/oauth2client/contrib/multistore_file.py/_MultiStore._lock |
2,348 | def _refresh_data_cache(self):
"""Refresh the contents of the multistore.
The multistore must be locked when this is called.
Raises:
NewerCredentialStoreError: Raised when a newer client has written
the store.
"""
self._data = {}
try:
raw_data = self._locked_json_read()
except Exception:
logger.warn('Credential data store could not be loaded. '
'Will ignore and overwrite.')
return
version = 0
try:
version = raw_data['file_version']
except Exception:
logger.warn('Missing version for credential data store. It may be '
'corrupt or an old version. Overwriting.')
if version > 1:
raise NewerCredentialStoreError(
'Credential file has file_version of %d. '
'Only file_version of 1 is supported.' % version)
credentials = []
try:
credentials = raw_data['data']
except (__HOLE__, KeyError):
pass
for cred_entry in credentials:
try:
key, credential = self._decode_credential_from_json(cred_entry)
self._data[key] = credential
except:
# If something goes wrong loading a credential, just ignore it
logger.info('Error decoding credential, skipping',
exc_info=True) | TypeError | dataset/ETHPy150Open google/oauth2client/oauth2client/contrib/multistore_file.py/_MultiStore._refresh_data_cache |
2,349 | def _delete_credential(self, key):
"""Delete a credential and write the multistore.
This must be called when the multistore is locked.
Args:
key: The key used to retrieve the credential
"""
try:
del self._data[key]
except __HOLE__:
pass
self._write() | KeyError | dataset/ETHPy150Open google/oauth2client/oauth2client/contrib/multistore_file.py/_MultiStore._delete_credential |
2,350 | def update_result(self, context):
outfile = os.path.join(context.output_directory, 'hwuitest.output')
with open(outfile, 'w') as wfh:
wfh.write(self.output)
context.add_artifact('hwuitest', outfile, kind='raw')
normal = re.compile(r'(?P<value>\d*)(?P<unit>\w*)')
with_pct = re.compile(r'(?P<value>\d*) \((?P<percent>.*)%\)')
count = 0
for line in self.output.splitlines():
#Filters out "Success!" and blank lines
try:
metric, value_string = [p.strip() for p in line.split(':', 1)]
except __HOLE__:
continue
# Filters out unwanted lines
if metric in IGNORED_METRICS:
continue
if metric == "Janky frames":
count += 1
match = with_pct.match(value_string).groupdict()
context.result.add_metric(metric,
match['value'],
None,
classifiers={"loop": count,
"frames": self.frames})
context.result.add_metric(metric + "_pct",
match['value'],
"%",
classifiers={"loop": count,
"frames": self.frames})
else:
match = normal.match(value_string).groupdict()
context.result.add_metric(metric,
match['value'],
match['unit'],
classifiers={"loop": count,
"frames": self.frames}) | ValueError | dataset/ETHPy150Open ARM-software/workload-automation/wlauto/workloads/hwuitest/__init__.py/HWUITest.update_result |
2,351 | def fetch():
try:
stream = _local.stream
except __HOLE__:
return ''
return stream.reset() | AttributeError | dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/debug/console.py/ThreadedStream.fetch |
2,352 | def displayhook(obj):
try:
stream = _local.stream
except __HOLE__:
return _displayhook(obj)
# stream._write bypasses escaping as debug_repr is
# already generating HTML for us.
if obj is not None:
_local._current_ipy.locals['_'] = obj
stream._write(debug_repr(obj)) | AttributeError | dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/debug/console.py/ThreadedStream.displayhook |
2,353 | def __getattribute__(self, name):
if name == '__members__':
return dir(sys.__stdout__)
try:
stream = _local.stream
except __HOLE__:
stream = sys.__stdout__
return getattr(stream, name) | AttributeError | dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/debug/console.py/ThreadedStream.__getattribute__ |
2,354 | def get_source_by_code(self, code):
try:
return self._storage[id(code)]
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open Eforcers/gae-flask-todo/lib/werkzeug/debug/console.py/_ConsoleLoader.get_source_by_code |
2,355 | def GetString(self, name, all=False):
"""Get the first value for a key, or None if it is not defined.
This configuration file is used first, if the key is not
defined or all = True then the defaults are also searched.
"""
try:
v = self._cache[_key(name)]
except __HOLE__:
if self.defaults:
return self.defaults.GetString(name, all = all)
v = []
if not all:
if v:
return v[0]
return None
r = []
r.extend(v)
if self.defaults:
r.extend(self.defaults.GetString(name, all = True))
return r | KeyError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig.GetString |
2,356 | def SetString(self, name, value):
"""Set the value(s) for a key.
Only this configuration file is modified.
The supplied value should be either a string,
or a list of strings (to store multiple values).
"""
key = _key(name)
try:
old = self._cache[key]
except __HOLE__:
old = []
if value is None:
if old:
del self._cache[key]
self._do('--unset-all', name)
elif isinstance(value, list):
if len(value) == 0:
self.SetString(name, None)
elif len(value) == 1:
self.SetString(name, value[0])
elif old != value:
self._cache[key] = list(value)
self._do('--replace-all', name, value[0])
for i in xrange(1, len(value)):
self._do('--add', name, value[i])
elif len(old) != 1 or old[0] != value:
self._cache[key] = [value]
self._do('--replace-all', name, value) | KeyError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig.SetString |
2,357 | def GetRemote(self, name):
"""Get the remote.$name.* configuration values as an object.
"""
try:
r = self._remotes[name]
except __HOLE__:
r = Remote(self, name)
self._remotes[r.name] = r
return r | KeyError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig.GetRemote |
2,358 | def GetBranch(self, name):
"""Get the branch.$name.* configuration values as an object.
"""
try:
b = self._branches[name]
except __HOLE__:
b = Branch(self, name)
self._branches[b.name] = b
return b | KeyError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig.GetBranch |
2,359 | def HasSection(self, section, subsection = ''):
"""Does at least one key in section.subsection exist?
"""
try:
return subsection in self._sections[section]
except __HOLE__:
return False | KeyError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig.HasSection |
2,360 | def _ReadPickle(self):
try:
if os.path.getmtime(self._pickle) \
<= os.path.getmtime(self.file):
os.remove(self._pickle)
return None
except OSError:
return None
try:
Trace(': unpickle %s', self.file)
fd = open(self._pickle, 'rb')
try:
return cPickle.load(fd)
finally:
fd.close()
except EOFError:
os.remove(self._pickle)
return None
except __HOLE__:
os.remove(self._pickle)
return None
except cPickle.PickleError:
os.remove(self._pickle)
return None | IOError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig._ReadPickle |
2,361 | def _SavePickle(self, cache):
try:
fd = open(self._pickle, 'wb')
try:
cPickle.dump(cache, fd, cPickle.HIGHEST_PROTOCOL)
finally:
fd.close()
except __HOLE__:
if os.path.exists(self._pickle):
os.remove(self._pickle)
except cPickle.PickleError:
if os.path.exists(self._pickle):
os.remove(self._pickle) | IOError | dataset/ETHPy150Open android/tools_repo/git_config.py/GitConfig._SavePickle |
2,362 | def close_ssh():
global _master_keys_lock
terminate_ssh_clients()
for p in _master_processes:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
del _master_processes[:]
_master_keys.clear()
d = ssh_sock(create=False)
if d:
try:
os.rmdir(os.path.dirname(d))
except __HOLE__:
pass
# We're done with the lock, so we can delete it.
_master_keys_lock = None | OSError | dataset/ETHPy150Open android/tools_repo/git_config.py/close_ssh |
2,363 | @property
def ReviewProtocol(self):
if self._review_protocol is None:
if self.review is None:
return None
u = self.review
if not u.startswith('http:') and not u.startswith('https:'):
u = 'http://%s' % u
if u.endswith('/Gerrit'):
u = u[:len(u) - len('/Gerrit')]
if not u.endswith('/ssh_info'):
if not u.endswith('/'):
u += '/'
u += 'ssh_info'
if u in REVIEW_CACHE:
info = REVIEW_CACHE[u]
self._review_protocol = info[0]
self._review_host = info[1]
self._review_port = info[2]
else:
try:
info = urlopen(u).read()
if info == 'NOT_AVAILABLE':
raise UploadError('%s: SSH disabled' % self.review)
if '<' in info:
# Assume the server gave us some sort of HTML
# response back, like maybe a login page.
#
raise UploadError('%s: Cannot parse response' % u)
self._review_protocol = 'ssh'
self._review_host = info.split(" ")[0]
self._review_port = info.split(" ")[1]
except urllib2.URLError, e:
raise UploadError('%s: %s' % (self.review, e.reason[1]))
except __HOLE__, e:
if e.code == 404:
self._review_protocol = 'http-post'
self._review_host = None
self._review_port = None
else:
raise UploadError('Upload over ssh unavailable')
REVIEW_CACHE[u] = (
self._review_protocol,
self._review_host,
self._review_port)
return self._review_protocol | HTTPError | dataset/ETHPy150Open android/tools_repo/git_config.py/Remote.ReviewProtocol |
2,364 | @permission_required("core.manage_shop")
def manage_properties(request, product_id, template_name="manage/product/properties.html"):
"""Displays the UI for manage the properties for the product with passed
product_id.
"""
product = get_object_or_404(Product, pk=product_id)
# Generate lists of properties. For entering values.
display_configurables = False
display_filterables = False
display_displayables = False
configurables = []
filterables = []
displayables = []
product_variant_properties = []
# Configurable
if not product.is_product_with_variants():
for property_group in product.property_groups.all():
properties = []
for prop in property_group.properties.filter(configurable=True).order_by("groupspropertiesrelation"):
display_configurables = True
try:
ppv = ProductPropertyValue.objects.get(property=prop,
property_group=property_group,
product=product,
type=PROPERTY_VALUE_TYPE_DEFAULT)
except ProductPropertyValue.DoesNotExist:
ppv_id = None
ppv_value = ""
else:
ppv_id = ppv.id
ppv_value = ppv.value
# Mark selected options
options = []
for option in prop.options.all():
if str(option.id) == ppv_value:
selected = True
else:
selected = False
options.append({
"id": option.id,
"name": option.name,
"selected": selected,
})
properties.append({
"id": prop.id,
"name": prop.name,
"title": prop.title,
"type": prop.type,
"options": options,
"display_number_field": prop.type == PROPERTY_NUMBER_FIELD,
"display_text_field": prop.type == PROPERTY_TEXT_FIELD,
"display_select_field": prop.type == PROPERTY_SELECT_FIELD,
"value": ppv_value,
})
if properties:
configurables.append({
"id": property_group.id,
"name": property_group.name,
"properties": properties,
})
# Filterable
for property_group in product.property_groups.all():
properties = []
for prop in property_group.properties.filter(filterable=True).order_by("groupspropertiesrelation"):
display_filterables = True
# Try to get the value, if it already exists.
ppvs = ProductPropertyValue.objects.filter(property=prop,
property_group=property_group,
product=product,
type=PROPERTY_VALUE_TYPE_FILTER)
value_ids = [ppv.value for ppv in ppvs]
# Mark selected options
options = []
for option in prop.options.all():
if str(option.id) in value_ids:
selected = True
else:
selected = False
options.append({
"id": option.id,
"name": option.name,
"selected": selected,
})
value = ""
if prop.type == PROPERTY_SELECT_FIELD:
display_select_field = True
else:
display_select_field = False
try:
value = value_ids[0]
except IndexError:
pass
properties.append({
"id": prop.id,
"name": prop.name,
"title": prop.title,
"type": prop.type,
"options": options,
"value": value,
"display_on_product": prop.display_on_product,
"display_number_field": prop.type == PROPERTY_NUMBER_FIELD,
"display_text_field": prop.type == PROPERTY_TEXT_FIELD,
"display_select_field": display_select_field,
})
if properties:
filterables.append({
"id": property_group.id,
"name": property_group.name,
"properties": properties,
})
# Displayable
for property_group in product.property_groups.all():
properties = []
for prop in property_group.properties.filter(display_on_product=True).order_by("groupspropertiesrelation"):
display_displayables = True
# Try to get the value, if it already exists.
ppvs = ProductPropertyValue.objects.filter(property=prop,
property_group=property_group,
product=product,
type=PROPERTY_VALUE_TYPE_DISPLAY)
value_ids = [ppv.value for ppv in ppvs]
# Mark selected options
options = []
for option in prop.options.all():
if str(option.id) in value_ids:
selected = True
else:
selected = False
options.append({
"id": option.id,
"name": option.name,
"selected": selected,
})
value = ""
if prop.type == PROPERTY_SELECT_FIELD:
display_select_field = True
else:
display_select_field = False
try:
value = value_ids[0]
except __HOLE__:
pass
properties.append({
"id": prop.id,
"name": prop.name,
"title": prop.title,
"type": prop.type,
"options": options,
"value": value,
"filterable": prop.filterable,
"display_number_field": prop.type == PROPERTY_NUMBER_FIELD,
"display_text_field": prop.type == PROPERTY_TEXT_FIELD,
"display_select_field": display_select_field,
})
if properties:
displayables.append({
"id": property_group.id,
"name": property_group.name,
"properties": properties,
})
if product.is_variant():
product_variant_properties_dict = {}
qs = ProductPropertyValue.objects.filter(product=product, type=PROPERTY_VALUE_TYPE_VARIANT)
for ppv in qs:
try:
property_option = PropertyOption.objects.get(property_id=ppv.property_id, pk=ppv.value)
property_group_name = ppv.property_group.name if ppv.property_group_id else ''
group_dict = product_variant_properties_dict.setdefault(ppv.property_group_id or 0,
{'property_group_name': property_group_name,
'properties': []})
group_dict['properties'].append(property_option)
except (ProductPropertyValue.DoesNotExist, PropertyOption.DoesNotExist):
continue
groups = product_variant_properties_dict.values()
sorted_groups = sorted(groups, key=lambda group: group['property_group_name'])
for group in sorted_groups:
product_variant_properties.append(group)
# Generate list of all property groups; used for group selection
product_property_group_ids = [p.id for p in product.property_groups.all()]
shop_property_groups = []
for property_group in PropertyGroup.objects.all():
shop_property_groups.append({
"id": property_group.id,
"name": property_group.name,
"selected": property_group.id in product_property_group_ids,
})
return render_to_string(template_name, RequestContext(request, {
"product": product,
"filterables": filterables,
"display_filterables": display_filterables,
"configurables": configurables,
"display_configurables": display_configurables,
"displayables": displayables,
"display_displayables": display_displayables,
"product_property_groups": product.property_groups.all(),
"shop_property_groups": shop_property_groups,
"product_variant_properties": product_variant_properties
})) | IndexError | dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/product/properties.py/manage_properties |
2,365 | @permission_required("core.manage_shop")
@require_POST
def update_property_groups(request, product_id):
"""Updates property groups for the product with passed id.
"""
selected_group_ids = request.POST.getlist("selected-property-groups")
product = Product.objects.get(pk=product_id)
for property_group in PropertyGroup.objects.all():
# if the group is within selected groups we try to add it to the product
# otherwise we try do delete it
if str(property_group.id) in selected_group_ids:
try:
property_group.products.get(pk=product_id)
except __HOLE__:
property_group.products.add(product_id)
else:
property_group.products.remove(product_id)
product_removed_property_group.send(sender=property_group, product=product)
update_product_cache(product)
url = reverse("lfs_manage_product", kwargs={"product_id": product_id})
return HttpResponseRedirect(url) | ObjectDoesNotExist | dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/product/properties.py/update_property_groups |
2,366 | def getreal(z):
try:
return z.real
except __HOLE__:
return z | AttributeError | dataset/ETHPy150Open sunlightlabs/clearspending/completeness/statlib/matfunc.py/getreal |
2,367 | def getimag(z):
try:
return z.imag
except __HOLE__:
return 0 | AttributeError | dataset/ETHPy150Open sunlightlabs/clearspending/completeness/statlib/matfunc.py/getimag |
2,368 | def getconj(z):
try:
return z.conjugate()
except __HOLE__:
return z | AttributeError | dataset/ETHPy150Open sunlightlabs/clearspending/completeness/statlib/matfunc.py/getconj |
2,369 | def lookup(self, service, **kwargs):
service_name = service.name
try:
instances = self.registry[service_name]
for data in instances:
service.update(data.get('id'), **data)
except __HOLE__:
raise LookupFailure()
return service | KeyError | dataset/ETHPy150Open deliveryhero/lymph/lymph/discovery/static.py/StaticServiceRegistryHub.lookup |
2,370 | def test_project_duplicate_creation(self):
orig_dir = os.getcwd()
tmp_dir = tempfile.mkdtemp()
try:
os.chdir(tmp_dir)
cli = StackStrapCLI()
cli.main(['template', 'add', 'test-template', repo_url])
cli.main(['create', 'test_project_creation', 'test-template'])
try:
cli.main(['create', 'test_project_creation', 'test-template'])
raise Exception("This shouldn't be reached")
except __HOLE__ as e:
self.assertEqual(e.code, 1)
finally:
os.chdir(orig_dir)
shutil.rmtree(tmp_dir) | SystemExit | dataset/ETHPy150Open stackstrap/stackstrap/tests/test_project.py/ProjectTestCase.test_project_duplicate_creation |
2,371 | def search(request):
query_string = prepare_solr_query_string(request.GET.get('q', ""))
search_terms = query_string.split()
index_query = SearchQuerySet().models(Project)
spelling_suggestion = None
#FIXME: Workaround for https://github.com/toastdriven/django-haystack/issues/364
# Only the else part should be necessary.
if settings.HAYSTACK_SEARCH_ENGINE == 'simple':
results = index_query.auto_query(query_string)
else:
try:
qfilter = fulltext_project_search_filter(query_string)
results = index_query.filter(qfilter)
spelling_suggestion = results.spelling_suggestion(query_string)
except __HOLE__:
results = []
logger.debug("Searched for %s. Found %s results." % (query_string, len(results)))
return render_to_response("search.html",
{'query': query_string,
'terms': search_terms,
'results': results,
'spelling_suggestion': spelling_suggestion},
context_instance = RequestContext(request)) | TypeError | dataset/ETHPy150Open rvanlaar/easy-transifex/src/transifex/transifex/txcommon/views.py/search |
2,372 | def onFailure(self, failure):
"""
Clean up observers, parse the failure and errback the deferred.
@param failure: the failure protocol element. Holds details on
the error condition.
@type failure: L{domish.Element}
"""
self.xmlstream.removeObserver('/challenge', self.onChallenge)
self.xmlstream.removeObserver('/success', self.onSuccess)
try:
condition = failure.firstChildElement().name
except __HOLE__:
condition = None
self._deferred.errback(SASLAuthError(condition)) | AttributeError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/words/protocols/jabber/sasl.py/SASLInitiatingInitializer.onFailure |
2,373 | def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ValueError, __HOLE__):
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info()))) | AttributeError | dataset/ETHPy150Open openstack-dev/heat-cfnclient/heat_cfnclient/openstack/common/importutils.py/import_class |
2,374 | def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except __HOLE__:
return import_class(import_str)(*args, **kwargs) | ImportError | dataset/ETHPy150Open openstack-dev/heat-cfnclient/heat_cfnclient/openstack/common/importutils.py/import_object_ns |
2,375 | def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except __HOLE__:
return default | ImportError | dataset/ETHPy150Open openstack-dev/heat-cfnclient/heat_cfnclient/openstack/common/importutils.py/try_import |
2,376 | def __init__(self):
try:
if settings.LDAP_MASTER_DISABLE == True: return
except __HOLE__: pass
try:
logger.debug("TLS AVAILABLE? %d" % (ldap.TLS_AVAIL))
print "LDAP SETTINGS->"+settings.LDAP_MASTER_URI
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, settings.LDAP_MASTER_CA)
#ldap.set_option(ldap.OPT_X_TLS_CERTFILE, settings.LDAP_MASTER_CERT)
#ldap.set_option(ldap.OPT_X_TLS_KEYFILE, settings.LDAP_MASTER_KEY)
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, settings.LDAP_MASTER_REQCERT)
ldap.set_option(ldap.OPT_TIMEOUT, settings.LDAP_MASTER_TIMEOUT)
self.proxy = ldap.initialize (settings.LDAP_MASTER_URI)
if settings.AUTH_LDAP_START_TLS:
self.proxy.start_tls_s()
self.proxy.simple_bind_s(settings.LDAP_MASTER_DN, settings.LDAP_MASTER_PWD)
logger.debug ("LdapProxy.__init__: Connected to ldapserver %s as %s with CA in %s and with certificate %s and key %s" % (settings.LDAP_MASTER_URI, settings.LDAP_MASTER_DN, settings.LDAP_MASTER_CA, settings.LDAP_MASTER_CERT, settings.LDAP_MASTER_KEY))
except ldap.LDAPError, error_message:
logger.error ("LdapProxy.__init__: Failed connecting to ldapserver %s as %s with CA in %s and with certificate %s and key %s: %s" % (settings.LDAP_MASTER_URI, settings.LDAP_MASTER_DN, settings.LDAP_MASTER_CA, settings.LDAP_MASTER_CERT, settings.LDAP_MASTER_KEY, error_message))
raise
# not really possible to lock objects in de LDAP backend.
# Howver, as expedients in different islands may compete, this becomes relevant.
# Test 1: editing/saving several times a project, while fetching search results and (re)adding
# object is commented out does not cause problems. Probably repeating the delete action raises
# also a ldap.NO_SUCH_OBJECT that is silently discarded. ==> OK
# Test 2: similar, but now deleting object is commented out. After second time,
# an ldap.ALREADY_EXISTS exception is raised. Either the calling code know how to deal with this
# or user is confronted with an internal server error. ==> Good enough for now?
# Crucial is to be careful that resubmitting does not destroy just-recedntly added data. But this
# should be a concern anyway.
# Not really guarantee to be successful, but at least we try a couple of times to overcome this problem here. | AttributeError | dataset/ETHPy150Open fp7-ofelia/ocf/expedient/src/python/expedient/common/ldapproxy/models.py/LdapProxy.__init__ |
2,377 | def create_or_replace (self, dn, entry):
try:
if settings.LDAP_MASTER_DISABLE == True: return
except __HOLE__: pass
count = 0
while 1:
try:
resultid = self.proxy.search(dn, ldap.SCOPE_BASE)
try:
t, data = self.proxy.result(resultid, 1)
logger.debug("LdapProxy.create_or_replace: dn %s exists and is going to be deleted before being inserted again" % (dn))
self.proxy.delete_s(dn)
except ldap.NO_SUCH_OBJECT:
pass
logger.debug("LdapProxy.create_or_replace: adding %s [%s]" % (dn, entry))
self.proxy.add_s(dn,ldap.modlist.addModlist(entry))
break
except ldap.ALREADY_EXISTS:
count = count + 1
if count < settings.LDAP_MASTER_RETRIES:
continue
else:
logger.error ("LdapProxy: tried %d time to replace %s in LDAP directory" % (settings.LDAP_MASTER_RETRIES, dn))
raise
except ldap.LDAPError, error_message:
logger.error ("ldapproxy: create or replace %s with %s failed: %s" % (dn, entry, error_message))
raise | AttributeError | dataset/ETHPy150Open fp7-ofelia/ocf/expedient/src/python/expedient/common/ldapproxy/models.py/LdapProxy.create_or_replace |
2,378 | def delete (self, dn):
try:
if settings.LDAP_MASTER_DISABLE == True: return
except __HOLE__: pass
try:
resultid = self.proxy.search(dn, ldap.SCOPE_BASE)
try:
t, data = self.proxy.result(resultid, 1)
logger.debug("LdapProxy.delete: dn %s exists and is going to be deleted" % (dn))
self.proxy.delete_s(dn)
except ldap.NO_SUCH_OBJECT:
pass
except ldap.LDAPError, error_message:
logger.error ("ldapproxy: delete %s failed: %s" % (dn, error_message))
raise | AttributeError | dataset/ETHPy150Open fp7-ofelia/ocf/expedient/src/python/expedient/common/ldapproxy/models.py/LdapProxy.delete |
2,379 | def __init__(self, exp, queue, payload, worker=None):
excc = sys.exc_info()[0]
self._exception = excc
try:
self._traceback = traceback.format_exc()
except __HOLE__:
self._traceback = None
self._worker = worker
self._queue = queue
self._payload = payload | AttributeError | dataset/ETHPy150Open binarydud/pyres/pyres/failure/base.py/BaseBackend.__init__ |
2,380 | def search(request, template="search_results.html", extra_context=None):
"""
Display search results. Takes an optional "contenttype" GET parameter
in the form "app-name.ModelName" to limit search results to a single model.
"""
query = request.GET.get("q", "")
page = request.GET.get("page", 1)
per_page = settings.SEARCH_PER_PAGE
max_paging_links = settings.MAX_PAGING_LINKS
try:
parts = request.GET.get("type", "").split(".", 1)
search_model = apps.get_model(*parts)
search_model.objects.search # Attribute check
except (ValueError, __HOLE__, LookupError, AttributeError):
search_model = Displayable
search_type = _("Everything")
else:
search_type = search_model._meta.verbose_name_plural.capitalize()
results = search_model.objects.search(query, for_user=request.user)
paginated = paginate(results, page, per_page, max_paging_links)
context = {"query": query, "results": paginated,
"search_type": search_type}
context.update(extra_context or {})
return TemplateResponse(request, template, context) | TypeError | dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/views.py/search |
2,381 | @staff_member_required
def static_proxy(request):
"""
Serves TinyMCE plugins inside the inline popups and the uploadify
SWF, as these are normally static files, and will break with
cross-domain JavaScript errors if ``STATIC_URL`` is an external
host. URL for the file is passed in via querystring in the inline
popup plugin template, and we then attempt to pull out the relative
path to the file, so that we can serve it locally via Django.
"""
normalize = lambda u: ("//" + u.split("://")[-1]) if "://" in u else u
url = normalize(request.GET["u"])
host = "//" + request.get_host()
static_url = normalize(settings.STATIC_URL)
for prefix in (host, static_url, "/"):
if url.startswith(prefix):
url = url.replace(prefix, "", 1)
response = ""
(content_type, encoding) = mimetypes.guess_type(url)
if content_type is None:
content_type = "application/octet-stream"
path = finders.find(url)
if path:
if isinstance(path, (list, tuple)):
path = path[0]
if url.endswith(".htm"):
# Inject <base href="{{ STATIC_URL }}"> into TinyMCE
# plugins, since the path static files in these won't be
# on the same domain.
static_url = settings.STATIC_URL + os.path.split(url)[0] + "/"
if not urlparse(static_url).scheme:
static_url = urljoin(host, static_url)
base_tag = "<base href='%s'>" % static_url
with open(path, "r") as f:
response = f.read().replace("<head>", "<head>" + base_tag)
else:
try:
with open(path, "rb") as f:
response = f.read()
except __HOLE__:
return HttpResponseNotFound()
return HttpResponse(response, content_type=content_type) | IOError | dataset/ETHPy150Open stephenmcd/mezzanine/mezzanine/core/views.py/static_proxy |
2,382 | def __del__(self):
try:
if not self._socket.closed:
self.close()
except (__HOLE__, TypeError):
pass | AttributeError | dataset/ETHPy150Open 0rpc/zerorpc-python/zerorpc/events.py/Events.__del__ |
2,383 | def close(self):
try:
self._send.close()
except AttributeError:
pass
try:
self._recv.close()
except __HOLE__:
pass
self._socket.close() | AttributeError | dataset/ETHPy150Open 0rpc/zerorpc-python/zerorpc/events.py/Events.close |
2,384 | @cached_property
def metadata(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
wheel_metadata = self.get_wheel_metadata(zf)
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if file_version < (1, 1):
fn = 'METADATA'
else:
fn = METADATA_FILENAME
try:
metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
except __HOLE__:
raise ValueError('Invalid wheel, because %s is '
'missing' % fn)
return result | KeyError | dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/distlib/wheel.py/Wheel.metadata |
2,385 | def get_hash(self, data, hash_kind=None):
if hash_kind is None:
hash_kind = self.hash_kind
try:
hasher = getattr(hashlib, hash_kind)
except __HOLE__:
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
result = hasher(data).digest()
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
return hash_kind, result | AttributeError | dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/distlib/wheel.py/Wheel.get_hash |
2,386 | def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
arcname = posixpath.join(info_dir, 'EXTENSIONS')
wrapper = codecs.getreader('utf-8')
result = []
with ZipFile(pathname, 'r') as zf:
try:
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
cache = self._get_dylib_cache()
prefix = cache.prefix_to_dir(pathname)
cache_base = os.path.join(cache.base, prefix)
if not os.path.isdir(cache_base):
os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
extract = True
else:
file_time = os.stat(dest).st_mtime
file_time = datetime.datetime.fromtimestamp(file_time)
info = zf.getinfo(relpath)
wheel_time = datetime.datetime(*info.date_time)
extract = wheel_time > file_time
if extract:
zf.extract(relpath, cache_base)
result.append((name, dest))
except __HOLE__:
pass
return result | KeyError | dataset/ETHPy150Open anzev/hedwig/build/pip/pip/_vendor/distlib/wheel.py/Wheel._get_extensions |
2,387 | def get_namespace(self):
try:
return Namespace.get(self.kwargs['namespace_name'])
except __HOLE__:
raise Http404(
_('Namespace: %s, not found') % self.kwargs['namespace_name']
) | KeyError | dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/smart_settings/views.py/NamespaceDetailView.get_namespace |
2,388 | def check_bind2(self):
sp = self.pygrData.Bio.Seq.Swissprot.sp42()
hbb = sp['HBB1_TORMA']
exons = hbb.exons.keys()
assert len(exons)==1, 'number of expected annotations'
annoDB = self.pygrData.Bio.Annotation.annoDB()
exon = annoDB[1]
assert exons[0] == exon, 'test annotation comparison'
assert exons[0].pathForward is exon, 'annotation parent match'
assert exons[0].sequence == hbb[10:50], 'annotation to sequence match'
onc = sp['HBB1_ONCMY']
try:
exons = onc.exons.keys()
raise ValueError('failed to catch query with no annotations')
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open cjlee112/pygr/tests/metabase_test.py/check_bind2 |
2,389 | def setUp(self):
TestBase.setUp(self)
logger.debug('accessing ensembldb.ensembl.org')
conn = sqlgraph.DBServerInfo(host='ensembldb.ensembl.org',
user='anonymous', passwd='')
try:
translationDB = sqlgraph.SQLTable(
'homo_sapiens_core_47_36i.translation', serverInfo=conn)
exonDB = sqlgraph.SQLTable('homo_sapiens_core_47_36i.exon',
serverInfo=conn)
sql_statement = '''SELECT t3.exon_id FROM
homo_sapiens_core_47_36i.translation AS tr,
homo_sapiens_core_47_36i.exon_transcript AS t1,
homo_sapiens_core_47_36i.exon_transcript AS t2,
homo_sapiens_core_47_36i.exon_transcript AS t3 WHERE tr.translation_id = %s
AND tr.transcript_id = t1.transcript_id AND t1.transcript_id =
t2.transcript_id AND t2.transcript_id = t3.transcript_id AND t1.exon_id =
tr.start_exon_id AND t2.exon_id = tr.end_exon_id AND t3.rank >= t1.rank AND
t3.rank <= t2.rank ORDER BY t3.rank
'''
translationExons = sqlgraph.GraphView(translationDB, exonDB,
sql_statement,
serverInfo=conn)
except __HOLE__:
raise SkipTest('missing MySQLdb module?')
translationExons.__doc__ = 'test saving exon graph'
self.pygrData.Bio.Ensembl.TranslationExons = translationExons
self.metabase.commit()
self.metabase.clear_cache() | ImportError | dataset/ETHPy150Open cjlee112/pygr/tests/metabase_test.py/DBServerInfo_Test.setUp |
2,390 | def test_xmlrpc(self):
"Test XMLRPC"
self.metabase.clear_cache() # force all requests to reload
self.metabase.update("http://localhost:%s" % self.server.port)
check_match(self)
check_dir(self)
check_dir_noargs(self)
check_dir_download(self)
check_dir_re(self)
check_bind(self)
check_bind2(self)
sb_hbb1 = testutil.datafile('sp_hbb1')
sp2 = seqdb.BlastDB(sb_hbb1)
sp2.__doc__ = 'another sp'
try:
self.pygrData.Bio.Seq.sp2 = sp2
self.metabase.commit()
msg = 'failed to catch bad attempt to write to XMLRPC server'
raise KeyError(msg)
except __HOLE__:
pass | ValueError | dataset/ETHPy150Open cjlee112/pygr/tests/metabase_test.py/XMLRPC_Test.test_xmlrpc |
2,391 | def GetKey():
"""Fetches rcpkey from metadata of the instance.
Returns:
RPC key in string.
"""
try:
response = urllib.urlopen(
'http://metadata/computeMetadata/v1beta1/instance/attributes/rpckey')
return response.read()
except __HOLE__:
return '' | IOError | dataset/ETHPy150Open GoogleCloudPlatform/Data-Pipeline/app/static/hadoop_scripts/rpc_daemon/__main__.py/GetKey |
2,392 | @app.route('/mapreduceasync', methods=['GET', 'POST'])
def MapReduceAsync():
"""Handler of MapReduce asynchronous request."""
app.logger.info('ACCESS URL: %s', flask.request.path)
try:
os.mkdir(MAPREDUCE_RESULT_DIR)
except __HOLE__:
# The result directory already exists. Do nothing.
pass
mapreduce_id = str(uuid.uuid4())
threading.Thread(
target=AsyncPerformMapReduce,
args=(mapreduce_id,
flask.request.values.get('input', ''),
flask.request.values.get('output', ''),
flask.request.values.get('mapper_type', 'identity'),
flask.request.values.get('mapper_url', ''),
flask.request.files.get('mapper_file', None),
int(flask.request.values.get('mapper_count', 5)),
flask.request.values.get('reducer_type', 'identity'),
flask.request.values.get('reducer_url', ''),
flask.request.files.get('reducer_file', None),
int(flask.request.values.get('reducer_count', 1)))).start()
return flask.Response(mapreduce_id, mimetype='text/plain') | OSError | dataset/ETHPy150Open GoogleCloudPlatform/Data-Pipeline/app/static/hadoop_scripts/rpc_daemon/__main__.py/MapReduceAsync |
2,393 | def load_path_attr(path):
i = path.rfind(".")
module, attr = path[:i], path[i + 1:]
try:
mod = importlib.import_module(module)
except ImportError as e:
raise ImproperlyConfigured(
"Error importing {0}: '{1}'".format(module, e)
)
try:
attr = getattr(mod, attr)
except __HOLE__:
raise ImproperlyConfigured(
"Module '{0}' does not define a '{1}'".format(module, attr)
)
return attr | AttributeError | dataset/ETHPy150Open pinax/pinax-stripe/pinax/stripe/conf.py/load_path_attr |
2,394 | def importpath(path, error_text=None):
"""
Import value by specified ``path``.
Value can represent module, class, object, attribute or method.
If ``error_text`` is not None and import will
raise ImproperlyConfigured with user friendly text.
"""
result = None
attrs = []
parts = path.split('.')
exception = None
while parts:
try:
result = __import__('.'.join(parts), {}, {}, [''])
except __HOLE__ as e:
if exception is None:
exception = e
attrs = parts[-1:] + attrs
parts = parts[:-1]
else:
break
for attr in attrs:
try:
result = getattr(result, attr)
except (AttributeError, ValueError) as e:
if error_text is not None:
raise ImproperlyConfigured('Error: %s can import "%s"' % (
error_text, path))
else:
raise exception
return result | ImportError | dataset/ETHPy150Open bashu/django-easy-maps/easy_maps/utils.py/importpath |
2,395 | def __init__( self, toklist, name=None, asList=True, modal=True ):
if self.__doinit:
self.__doinit = False
self.__name = None
self.__parent = None
self.__accumNames = {}
if isinstance(toklist, list):
self.__toklist = toklist[:]
else:
self.__toklist = [toklist]
self.__tokdict = dict()
if name:
if not modal:
self.__accumNames[name] = 0
if isinstance(name,int):
name = _ustr(name) # will always return a str, but use _ustr for consistency
self.__name = name
if not toklist in (None,'',[]):
if isinstance(toklist,str):
toklist = [ toklist ]
if asList:
if isinstance(toklist,ParseResults):
self[name] = _ParseResultsWithOffset(toklist.copy(),0)
else:
self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
self[name].__name = name
else:
try:
self[name] = toklist[0]
except (__HOLE__,TypeError,IndexError):
self[name] = toklist | KeyError | dataset/ETHPy150Open ipython/ipython-py3k/IPython/external/pyparsing/_pyparsing.py/ParseResults.__init__ |
2,396 | def _normalizeParseActionArgs( f ):
"""Internal method used to decorate parse actions that take fewer than 3 arguments,
so that all parse actions can be called as f(s,l,t)."""
STAR_ARGS = 4
try:
restore = None
if isinstance(f,type):
restore = f
f = f.__init__
if not _PY3K:
codeObj = f.__code__
else:
codeObj = f.code
if codeObj.co_flags & STAR_ARGS:
return f
numargs = codeObj.co_argcount
if not _PY3K:
if hasattr(f,"im_self"):
numargs -= 1
else:
if hasattr(f,"__self__"):
numargs -= 1
if restore:
f = restore
except AttributeError:
try:
if not _PY3K:
call_im_func_code = f.__call__.__func__.__code__
else:
call_im_func_code = f.__code__
# not a function, must be a callable object, get info from the
# im_func binding of its bound __call__ method
if call_im_func_code.co_flags & STAR_ARGS:
return f
numargs = call_im_func_code.co_argcount
if not _PY3K:
if hasattr(f.__call__,"im_self"):
numargs -= 1
else:
if hasattr(f.__call__,"__self__"):
numargs -= 0
except AttributeError:
if not _PY3K:
call_func_code = f.__call__.__code__
else:
call_func_code = f.__call__.__code__
# not a bound method, get info directly from __call__ method
if call_func_code.co_flags & STAR_ARGS:
return f
numargs = call_func_code.co_argcount
if not _PY3K:
if hasattr(f.__call__,"im_self"):
numargs -= 1
else:
if hasattr(f.__call__,"__self__"):
numargs -= 1
#~ print ("adding function %s with %d args" % (f.func_name,numargs))
if numargs == 3:
return f
else:
if numargs > 3:
def tmp(s,l,t):
return f(f.__call__.__self__, s,l,t)
if numargs == 2:
def tmp(s,l,t):
return f(l,t)
elif numargs == 1:
def tmp(s,l,t):
return f(t)
else: #~ numargs == 0:
def tmp(s,l,t):
return f()
try:
tmp.__name__ = f.__name__
except (AttributeError,TypeError):
# no need for special handling if attribute doesnt exist
pass
try:
tmp.__doc__ = f.__doc__
except (AttributeError,__HOLE__):
# no need for special handling if attribute doesnt exist
pass
try:
tmp.__dict__.update(f.__dict__)
except (AttributeError,TypeError):
# no need for special handling if attribute doesnt exist
pass
return tmp | TypeError | dataset/ETHPy150Open ipython/ipython-py3k/IPython/external/pyparsing/_pyparsing.py/ParserElement._normalizeParseActionArgs |
2,397 | def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
debugging = ( self.debug ) #and doActions )
if debugging or self.failAction:
#~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
if (self.debugActions[0] ):
self.debugActions[0]( instring, loc, self )
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = loc
try:
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except __HOLE__:
raise ParseException( instring, len(instring), self.errmsg, self )
except ParseBaseException as err:
#~ print ("Exception raised:", err)
if self.debugActions[2]:
self.debugActions[2]( instring, tokensStart, self, err )
if self.failAction:
self.failAction( instring, tokensStart, self, err )
raise
else:
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = loc
if self.mayIndexError or loc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
raise ParseException( instring, len(instring), self.errmsg, self )
else:
loc,tokens = self.parseImpl( instring, preloc, doActions )
tokens = self.postParse( instring, loc, tokens )
retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
if self.parseAction and (doActions or self.callDuringTry):
if debugging:
try:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
except ParseBaseException as err:
#~ print "Exception raised in user parse action:", err
if (self.debugActions[2] ):
self.debugActions[2]( instring, tokensStart, self, err )
raise
else:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
return loc, retTokens | IndexError | dataset/ETHPy150Open ipython/ipython-py3k/IPython/external/pyparsing/_pyparsing.py/ParserElement._parseNoCache |
2,398 | def parseFile( self, file_or_filename, parseAll=False ):
"""Execute the parse expression on the given file or filename.
If a filename is specified (instead of a file object),
the entire file is opened, read, and closed before parsing.
"""
try:
file_contents = file_or_filename.read()
except __HOLE__:
f = open(file_or_filename, "rb")
file_contents = f.read()
f.close()
try:
return self.parseString(file_contents, parseAll)
except ParseBaseException as exc:
# catch and re-raise exception from here, clears out pyparsing internal stack trace
raise exc | AttributeError | dataset/ETHPy150Open ipython/ipython-py3k/IPython/external/pyparsing/_pyparsing.py/ParserElement.parseFile |
2,399 | def __init__( self, matchString ):
super(Literal,self).__init__()
self.match = matchString
self.matchLen = len(matchString)
try:
self.firstMatchChar = matchString[0]
except __HOLE__:
warnings.warn("null string passed to Literal; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.__class__ = Empty
self.name = '"%s"' % _ustr(self.match)
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = False
#self.myException.msg = self.errmsg
self.mayIndexError = False
# Performance tuning: this routine gets called a *lot*
# if this is a single character match string and the first character matches,
# short-circuit as quickly as possible, and avoid calling startswith
#~ @profile | IndexError | dataset/ETHPy150Open ipython/ipython-py3k/IPython/external/pyparsing/_pyparsing.py/Literal.__init__ |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.