Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
300
def test_save_invalid_boolean_auto(self): self.protocol.answers.append( 'config/names=\nfoo Boolean+Auto' ) self.protocol.answers.append({'foo': '1'}) conf = TorConfig(self.protocol) for value in ('auto', 'True', 'False', None): try: conf.foo = value except (ValueError, __HOLE__): pass else: self.fail("Invalid value '%s' allowed" % value) conf.save() self.assertEqual(self.protocol.sets, [])
TypeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/ConfigTests.test_save_invalid_boolean_auto
301
def test_int_validator(self): self.protocol.answers.append('config/names=\nfoo Integer') self.protocol.answers.append({'foo': '123'}) conf = TorConfig(self.protocol) conf.foo = 2.33 conf.save() self.assertEqual(conf.foo, 2) conf.foo = '1' conf.save() self.assertEqual(conf.foo, 1) conf.foo = '-100' conf.save() self.assertEqual(conf.foo, -100) conf.foo = 0 conf.save() self.assertEqual(conf.foo, 0) conf.foo = '0' conf.save() self.assertEqual(conf.foo, 0) for value in ('no', 'Not a value', None): try: conf.foo = value except (ValueError, __HOLE__): pass else: self.fail("No excpetion thrown")
TypeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/ConfigTests.test_int_validator
302
def test_unknown_descriptor(self): self.protocol.answers.append('config/names=\nbing CommaList') self.protocol.answers.append({'bing': 'foo'}) conf = TorConfig(self.protocol) try: conf.foo self.assertTrue(False) except __HOLE__, e: self.assertTrue('foo' in str(e))
KeyError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/ConfigTests.test_unknown_descriptor
303
def test_set_wrong_object(self): conf = TorConfig(self.protocol) self.assertTrue(conf.post_bootstrap.called) try: conf.log = ('this', 'is', 'a', 'tuple') self.fail() except __HOLE__, e: self.assertTrue('Not valid' in str(e))
ValueError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/LogTests.test_set_wrong_object
304
def test_conf_changed_invalid_values(self): protocol = FakeControlProtocol([]) protocol.answers.append('config/names=\nFoo Integer\nBar Integer') protocol.answers.append({'Foo': '0'}) protocol.answers.append({'Bar': '1'}) config = TorConfig(protocol) # Initial value is not tested here try: protocol.events['CONF_CHANGED']('Foo=INVALID\nBar=VALUES') except (__HOLE__, TypeError): pass else: self.fail("No excpetion thrown")
ValueError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/EventTests.test_conf_changed_invalid_values
305
def test_hidden_service_parse_error(self): conf = TorConfig(FakeControlProtocol(['config/names='])) try: conf._setup_hidden_services('''FakeHiddenServiceKey=foo''') self.fail() except __HOLE__, e: self.assertTrue('parse' in str(e))
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/HiddenServiceTests.test_hidden_service_parse_error
306
def test_launch_wrong_stdout(self): config = TorConfig() try: launch_tor(config, None, stdout=object(), tor_binary='/bin/echo') self.fail("Should have thrown an error") except __HOLE__: pass
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/LaunchTorTests.test_launch_wrong_stdout
307
def test_wrong_blob(self): try: eph = torconfig.EphemeralHiddenService("80 localhost:80", "foo") self.fail("should get exception") except __HOLE__ as e: pass
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/EphemeralHiddenServiceTest.test_wrong_blob
308
@defer.inlineCallbacks def test_remove_error(self): eph = torconfig.EphemeralHiddenService("80 127.0.0.1:80") eph.hostname = 'foo.onion' proto = Mock() proto.queue_command = Mock(return_value="it's not ok") try: yield eph.remove_from_tor(proto) self.fail("should have gotten exception") except __HOLE__ as e: pass
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torconfig.py/EphemeralHiddenServiceTest.test_remove_error
309
def list_updated ( self, values ): """ Handles updates to the list of legal checklist values. """ sv = self.string_value if (len( values ) > 0) and isinstance( values[0], basestring ): values = [ ( x, sv( x, capitalize ) ) for x in values ] self.values = valid_values = [ x[0] for x in values ] self.names = [ x[1] for x in values ] # Make sure the current value is still legal: modified = False cur_value = parse_value( self.value ) for i in range( len( cur_value ) - 1, -1, -1 ): if cur_value[i] not in valid_values: try: del cur_value[i] modified = True except __HOLE__ as e: logger.warn('Unable to remove non-current value [%s] from ' 'values %s', cur_value[i], values) if modified: if isinstance( self.value, basestring ): cur_value = ','.join( cur_value ) self.value = cur_value self.rebuild_editor() #--------------------------------------------------------------------------- # Rebuilds the editor after its definition is modified: #---------------------------------------------------------------------------
TypeError
dataset/ETHPy150Open enthought/traitsui/traitsui/qt4/check_list_editor.py/SimpleEditor.list_updated
310
def arg_parser(): """ Parses the arguments and calls the help() function if any problem is found """ global PRINT_PIXIE global PRINT_REAVER global USE_REAVER global USE_PIXIEWPS global WASH_TIME global REAVER_TIME global WASH_CHANNEL global PROMPT_APS global OUTPUT_FILE global OUTPUT global GET_PASSWORD global FOREVER global OVERRIDE H = ['-h','--help'] binary_flags = ['-w','-t','-c','-o'] for arg in argv[1:]: if arg in H: help() exit() elif argv[argv.index(arg)-1] in binary_flags: continue elif arg == '-q' or arg == '--quiet': PRINT_PIXIE = False PRINT_REAVER = False elif arg == '-r' or arg == '--use-reaver': USE_REAVER = True elif arg == '-p' or arg == '--use-pixie': USE_PIXIEWPS = True elif arg == '-w' or arg == '--wash-time': try: WASH_TIME = int(argv[argv.index(arg)+1]) except ValueError: help() elif arg == '-t' or arg == '--time': try: REAVER_TIME = int(argv[argv.index(arg)+1]) except ValueError: help() elif arg == '-c' or arg == '--channel': try: WASH_CHANNEL = int(argv[argv.index(arg)+1]) except __HOLE__: help() elif arg == '-P' or arg == '--prompt': PROMPT_APS = True elif arg == '-o' or arg == '--output': OUTPUT = True OUTPUT_FILE = argv[argv.index(arg)+1] elif arg == '-f' or arg == '--pass': GET_PASSWORD = True elif arg == '-F' or arg == '--forever': FOREVER = True elif arg == '-O' or arg == '--override': OVERRIDE = True else: help()
ValueError
dataset/ETHPy150Open jgilhutton/pyxiewps_WPShack-Python/Version-1.0/pyxiewps-EN.py/arg_parser
311
def run(self, cmd, shell = False, kill_tree = True, timeout = -1): """ Runs a command witha given time after wich is terminated returns stdout of proc. output is a list without passing strip() on the lines. """ class Alarm(Exception): pass def alarm_handler(signum, frame): raise Alarm if timeout != -1: signal(SIGALRM, alarm_handler) # Time's ticking... alarm(timeout) proc = subprocess.Popen(cmd, shell = shell, stdout = subprocess.PIPE) output = [] try: for line in iter(proc.stdout.readline, ''): output.append(line) if timeout != -1: alarm(0) except Alarm: # time's out! alarm is raised pids = [proc.pid] # kill the process tree related with the main process. if kill_tree: pids.extend(self.get_process_children(proc.pid)) for pid in pids: try: kill(pid, SIGKILL) except __HOLE__: pass return output return output
OSError
dataset/ETHPy150Open jgilhutton/pyxiewps_WPShack-Python/Version-1.0/pyxiewps-EN.py/Engine.run
312
def get_iface(self): """ If any monitor interfaces are found, returns the wlans. If more than onw are found, ask the user to choose. If monitor mode is already enable, returns the name. """ if self.IS_MON: # Si la interfaz esta en modo monitor devuelve el nombre 'mon' cmd = "ifconfig | grep mon | cut -d \' \' -f1" mon = subprocess.check_output(cmd, shell = True).strip() self.IFACE_MON = mon return mon else: cmd = "ifconfig | grep wlan | cut -d \' \' -f1" proc = subprocess.check_output(cmd, shell = True) ifaces = proc.strip().split('\n') if len(ifaces) == 1 and ifaces[0] == '': print ALERTA + "No wireless interfaces were found!" print " Please check if any wireless device in your PC." print " if you are running on a virtual machine" print " go get an USB wireless device." exit() elif len(ifaces) > 1: print INPUT + "Choose the W.Interface: " for i in ifaces: print str(ifaces.index(i)) + " >> " + i while True: #Control the input! you bugseeker! try: choice = int(raw_input(INPUT)) if choice <= len(ifaces) and choice >= 0: self.IFACE = ifaces[choice] return ifaces[choice] break else: print INPUT + "Number between 0 and %s" %(len(ifaces)-1) #Index error handling except __HOLE__: print ALERTA + "NUMBER between 0 and %s" %(len(ifaces)-1) #Integeer error handling except KeyboardInterrupt: print print ALERTA + "Interrupted program!" print engine.exit_limpio() else: self.IFACE = ifaces[0] return ifaces[0]
ValueError
dataset/ETHPy150Open jgilhutton/pyxiewps_WPShack-Python/Version-1.0/pyxiewps-EN.py/Config.get_iface
313
def get_wps_aps(self): """ Enumerates any WPS-active APs Goes to get_reaver_info """ print INFO + "Enumerating WPS-active APs..." cmd = 'wash -i %s -P' %(c.IFACE_MON) if WASH_CHANNEL != '': cmd = cmd + ' -c %d' %WASH_CHANNEL lista_aps = engine.run(cmd, shell = True, timeout = WASH_TIME) lista_provisoria = [] ultimo = len(lista_aps)-1 for linea in lista_aps: # Some wash output glitches are often found if '|' in linea: # this handles theese glitches lista_provisoria.append(linea) # lista_aps = lista_provisoria # if lista_aps == []: print print ALERTA + "No WPS-active APs were found." print if not FOREVER: engine.exit_limpio() else: for_fill = lista_aps #\ essids = [] #| for line in for_fill: #|- Formats the list line = line.split('|') #| essids.append(line[5].strip()) #| fill = len(max(essids)) #/ print INFO + "The following WPS-active APs were found:" for linea in lista_aps: linea = linea.split('|') fill_line = fill - len(linea[5].strip()) print '\t' + INPUT + str(linea[5].strip()) + ' '*fill_line + ' || ' + linea[0] + ' || Channel: ' + linea[1] + ' || WPS locked?: ' + linea[4] if USE_REAVER: while True: try: if len(lista_aps) != 1 and PROMPT_APS: choice = int(raw_input("%sIndex of the AP: " %INPUT)) provisoria = [] provisoria.append(lista_aps[choice]) lista_aps = provisoria break else: break except __HOLE__: print engine.exit_limpio() break except ValueError: print ALERTA + "Number between 0 and %d" %ultimo if not OVERRIDE and path.isfile('pyxiewpsdata.txt'): coincidencias = [] pin_correspondiente = [] with open('pyxiewpsdata.txt') as f: ya_sacados = f.readlines() if len(ya_sacados) > 1: ya_sacados.reverse() # reverts the list so it takes the newest pin for target in lista_aps: # if any pin were changed by the AP administrator for line in ya_sacados[1:]: if target.split('|')[5].strip() == line.strip(): coincidencias.append(target) pin_correspondiente.append(ya_sacados[ya_sacados.index(line)-1].strip()) for i in set(coincidencias): print OPCION + "The %s pin was already found!" %i.split('|')[5].strip() print '\t'+ INPUT + pin_correspondiente[coincidencias.index(i)] print OPCION + "Do you want to skip this AP? [Y/n]: " try: choice = raw_input("%s Enter to skip: " %INPUT) except KeyboardInterrupt: print engine.exit_limpio() if choice in CHOICES_YES: lista_aps.remove(i) for linea in lista_aps: args = engine.parse_wash(linea.strip()) self.get_reaver_info(args[0],args[1],args[2]) if not FOREVER: engine.exit_limpio() else: pass
KeyboardInterrupt
dataset/ETHPy150Open jgilhutton/pyxiewps_WPShack-Python/Version-1.0/pyxiewps-EN.py/Attack.get_wps_aps
314
def __new__(cls, name, bases, attrs): # TODO: Think of a better test to avoid processing Metadata parent class if bases == (object,): return type.__new__(cls, name, bases, attrs) # Save options as a dict for now (we will be editing them) # TODO: Is this necessary, should we bother relaying Django Meta options? Meta = attrs.pop('Meta', {}) if Meta: Meta = Meta.__dict__.copy() # Remove our options from Meta, so Django won't complain help_text = attrs.pop('HelpText', {}) # TODO: Is this necessary if help_text: help_text = help_text.__dict__.copy() options = Options(Meta, help_text) # Collect and sort our elements elements = [(key, attrs.pop(key)) for key, obj in attrs.items() if isinstance(obj, MetadataField)] elements.sort(lambda x, y: cmp(x[1].creation_counter, y[1].creation_counter)) elements = SortedDict(elements) # Validation: # TODO: Write a test framework for seo.Metadata validation # Check that no group names clash with element names for key,members in options.groups.items(): assert key not in elements, "Group name '%s' clashes with field name" % key for member in members: assert member in elements, "Group member '%s' is not a valid field" % member # Check that the names of the elements are not going to clash with a model field for key in elements: assert key not in RESERVED_FIELD_NAMES, "Field name '%s' is not allowed" % key # Preprocessing complete, here is the new class new_class = type.__new__(cls, name, bases, attrs) options.metadata = new_class new_class._meta = options # Some useful attributes options._update_from_name(name) options._register_elements(elements) try: for backend_name in options.backends: new_class._meta._add_backend(backend_registry[backend_name]) for backend_name in options.backends: backend_registry[backend_name].validate(options) except __HOLE__: raise Exception('Metadata backend "%s" is not installed.' % backend_name) #new_class._meta._add_backend(PathBackend) #new_class._meta._add_backend(ModelInstanceBackend) #new_class._meta._add_backend(ModelBackend) #new_class._meta._add_backend(ViewBackend) registry[name] = new_class return new_class # TODO: Move this function out of the way (subclasses will want to define their own attributes)
KeyError
dataset/ETHPy150Open willhardy/django-seo/rollyourown/seo/base.py/MetadataBase.__new__
315
def _get_metadata_model(name=None): # Find registered Metadata object if name is not None: try: return registry[name] except __HOLE__: if len(registry) == 1: valid_names = u'Try using the name "%s" or simply leaving it out altogether.'% registry.keys()[0] else: valid_names = u"Valid names are " + u", ".join(u'"%s"' % k for k in registry.keys()) raise Exception(u"Metadata definition with name \"%s\" does not exist.\n%s" % (name, valid_names)) else: assert len(registry) == 1, "You must have exactly one Metadata class, if using get_metadata() without a 'name' parameter." return registry.values()[0]
KeyError
dataset/ETHPy150Open willhardy/django-seo/rollyourown/seo/base.py/_get_metadata_model
316
def create_metadata_instance(metadata_class, instance): # If this instance is marked as handled, don't do anything # This typically means that the django admin will add metadata # using eg an inline. if getattr(instance, '_MetadataFormset__seo_metadata_handled', False): return metadata = None content_type = ContentType.objects.get_for_model(instance) # If this object does not define a path, don't worry about automatic update try: path = instance.get_absolute_url() except __HOLE__: return # Look for an existing object with this path language = getattr(instance, '_language', None) site = getattr(instance, '_site', None) for md in metadata_class.objects.get_instances(path, site, language): # If another object has the same path, remove the path. # It's harsh, but we need a unique path and will assume the other # link is outdated. if md._content_type != content_type or md._object_id != instance.pk: md._path = md._content_object.get_absolute_url() md.save() # Move on, this metadata instance isn't for us md = None else: # This is our instance! metadata = md # If the path-based search didn't work, look for (or create) an existing # instance linked to this object. if not metadata: metadata, md_created = metadata_class.objects.get_or_create(_content_type=content_type, _object_id=instance.pk) metadata._path = path metadata.save()
AttributeError
dataset/ETHPy150Open willhardy/django-seo/rollyourown/seo/base.py/create_metadata_instance
317
def on_data(data_list, data, version): name = data.get('name') if name == 'untrusted': _data = Data('untrusted') elif name == 'install': index = data.get('index') data_obj_list = filter(lambda d: d.index == index, version.app.data_set.all()) try: _data = Data('install', index=index, text=next(data_obj_list).value) except __HOLE__: _data = Data('install', index=index, status='error-nodata') data_list.append(_data) return data_list
StopIteration
dataset/ETHPy150Open Crystalnix/omaha-server/omaha_server/omaha/builder.py/on_data
318
@classmethod def setupClass(cls): global numpy global scipy try: import numpy except ImportError: raise SkipTest('NumPy not available.') try: import scipy except __HOLE__: raise SkipTest('SciPy not available.')
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/centrality/tests/test_subgraph.py/TestSubgraph.setupClass
319
def get_git_changeset(): """Returns a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. """ import datetime import os import subprocess repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=True) timestamp = git_log.communicate()[0] try: timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) except __HOLE__: return None return timestamp.strftime('%Y%m%d%H%M%S')
ValueError
dataset/ETHPy150Open pybee/duvet/duvet/__init__.py/get_git_changeset
320
def _decode(self, param): try: return force_text(param, strings_only=True) except __HOLE__: return '(encoded string)'
UnicodeDecodeError
dataset/ETHPy150Open graphql-python/graphene/graphene/contrib/django/debug/sql/tracking.py/NormalCursorWrapper._decode
321
def try_clean(self, value): try: # TODO: Remove usage of this in query builder. return self.clean(value) except (__HOLE__, AssertionError): return None
ValueError
dataset/ETHPy150Open armet/python-armet/armet/attributes/attribute.py/Attribute.try_clean
322
def _make_getter(self, segment, class_): # Attempt to resolve properties and simple functions by # accessing the class attribute directly. obj = getattr(class_, segment, None) if obj is not None: if hasattr(obj, '__call__'): return obj.__call__ if hasattr(obj, '__get__'): return lambda target, x=obj.__get__: x(target, class_) # Check for much better hidden descriptor. obj = class_.__dict__.get(segment) if obj is not None and hasattr(obj, '__get__'): return lambda target, x=obj.__get__: x(target, class_) # Check for item access (for a dictionary). if hasattr(class_, '__getitem__'): def getter(target): try: return target[segment] except __HOLE__: return None return getter # Check for attribute access. if hasattr(class_, '__dict__'): return lambda target: target.__dict__.get(segment) raise RuntimeError( 'unable to resolve attribute access for %r on %r' % ( segment, class_))
KeyError
dataset/ETHPy150Open armet/python-armet/armet/attributes/attribute.py/Attribute._make_getter
323
def read_requirement_files(files): pip_requirements = [] session = pip_download.PipSession() for filename in files: if sh.isfile(filename): cache_key = "f:%s:%s" % (sh.abspth(filename), sh.getsize(filename)) with REQUIREMENT_FILE_CACHE_LOCK: try: reqs = REQUIREMENT_FILE_CACHE[cache_key] except __HOLE__: reqs = tuple(pip_req.parse_requirements(filename, session=session)) REQUIREMENT_FILE_CACHE[cache_key] = reqs pip_requirements.extend(reqs) return (pip_requirements, [req.req for req in pip_requirements])
KeyError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/helpers/pip_helper.py/read_requirement_files
324
@classmethod def read(cls, **kwargs): r = object_read.send(cls, **kwargs) try: return r[0][1] except __HOLE__: pass
IndexError
dataset/ETHPy150Open petermelias/flask-easymode/flask_easymode/mixins/model.py/Read.read
325
@classmethod def read_many(cls, **kwargs): r = object_read.send(cls, _many=True, **kwargs) try: return r[0][1] except __HOLE__: pass
IndexError
dataset/ETHPy150Open petermelias/flask-easymode/flask_easymode/mixins/model.py/Read.read_many
326
@classmethod def update(cls, o, **kwargs): r = object_updated.send(cls, o=o, **kwargs) try: return r[0][1] except __HOLE__: pass
IndexError
dataset/ETHPy150Open petermelias/flask-easymode/flask_easymode/mixins/model.py/Update.update
327
@classmethod def delete(cls, o, **kwargs): r = object_deleted.send(cls, o=o, **kwargs) try: return r[0][1] except __HOLE__: pass
IndexError
dataset/ETHPy150Open petermelias/flask-easymode/flask_easymode/mixins/model.py/Delete.delete
328
@classmethod def load(cls, conditions, **kwargs): r = object_injected.send(cls, conditions=conditions, **kwargs) try: return r[0][1] except __HOLE__: pass
IndexError
dataset/ETHPy150Open petermelias/flask-easymode/flask_easymode/mixins/model.py/Injectable.load
329
def _set_syslog_handler(self, log, cfg, fmt, name): # setup format if not cfg.syslog_prefix: prefix = cfg.proc_name.replace(":", ".") else: prefix = cfg.syslog_prefix prefix = "gunicorn.%s.%s" % (prefix, name) # set format fmt = logging.Formatter(r"%s: %s" % (prefix, fmt)) # syslog facility try: facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()] except __HOLE__: raise RuntimeError("unknown facility name") # parse syslog address socktype, addr = parse_syslog_address(cfg.syslog_addr) # finally setup the syslog handler if sys.version_info >= (2, 7): h = logging.handlers.SysLogHandler(address=addr, facility=facility, socktype=socktype) else: # socktype is only supported in 2.7 and sup # fix issue #541 h = logging.handlers.SysLogHandler(address=addr, facility=facility) h.setFormatter(fmt) h._gunicorn = True log.addHandler(h)
KeyError
dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/gunicorn/glogging.py/Logger._set_syslog_handler
330
@dec.skipif(_ctypes_missing, msg="Ctypes library could not be found") def setUp(self): if sys.platform == 'win32': if sys.version_info < (3, 5): file = ctypes.util.find_msvcrt() else: file = 'api-ms-win-crt-math-l1-1-0.dll' elif sys.platform == 'darwin': file = 'libm.dylib' else: file = 'libm.so' try: self.lib = ctypes.CDLL(file) except __HOLE__: # This test doesn't work on some Linux platforms (Fedora for # example) that put an ld script in libm.so - see gh-5370 self.skipTest("Ctypes can't import libm.so") restype = ctypes.c_double argtypes = (ctypes.c_double,) for name in ['sin', 'cos', 'tan']: func = getattr(self.lib, name) func.restype = restype func.argtypes = argtypes
OSError
dataset/ETHPy150Open scipy/scipy/scipy/integrate/tests/test_quadpack.py/TestCtypesQuad.setUp
331
@staticmethod def factory(): """Static method to discover and import the shaper to use. Discover the platform on which Atcd is running and import the shaping backend for this platform. Returns: The shaping backend class Raises: NotImplementedError: the shaping backend class couldn't be imported """ os_name = os.uname()[0] klass = 'Atcd{0}Shaper'.format(os_name) # If not imported yet, try to import try: if klass not in globals(): from_module_import_class( 'atcd.backends.{0}'.format(os_name.lower()), klass ) except __HOLE__: raise NotImplementedError('{0} is not implemented!'.format(klass)) except ImportError: raise NotImplementedError( '{0} backend is not implemented!'.format(os_name.lower()) ) return globals()[klass]
AttributeError
dataset/ETHPy150Open facebook/augmented-traffic-control/atc/atcd/atcd/AtcdThriftHandlerTask.py/AtcdThriftHandlerTask.factory
332
def _del_mapping(self, id, ip): """Removes mappings from IP to id and id to IP. Also remove the mapping from IP to TrafficControl configs. """ try: del self._id_to_ip_map[id] del self._ip_to_id_map[ip] del self._current_shapings[ip] except __HOLE__: self.logger.exception('Unable to remove key from dict')
KeyError
dataset/ETHPy150Open facebook/augmented-traffic-control/atc/atcd/atcd/AtcdThriftHandlerTask.py/AtcdThriftHandlerTask._del_mapping
333
def _pcap_file_size(self, filename): try: return int(os.path.getsize(self._pcap_full_path(filename))) except __HOLE__: return 0
OSError
dataset/ETHPy150Open facebook/augmented-traffic-control/atc/atcd/atcd/AtcdThriftHandlerTask.py/AtcdThriftHandlerTask._pcap_file_size
334
def sizeof_fmt_django(x): try: value = int(x) except __HOLE__: return x return sizeof_fmt(value)
ValueError
dataset/ETHPy150Open GambitResearch/suponoff/suponoff/templatetags/suponoff.py/sizeof_fmt_django
335
@login_required @app.route('/search/') def search(): try: page = int(request.args.get('p', 1)) except (__HOLE__, ValueError): page = 1 query = request.args.get('q') has_search = bool(app.config['SEARCH_ENGINE']) if query: if uuid_re.match(query): # Forward to message if it exists try: event = Event.objects.get(query) except Event.DoesNotExist: pass else: return redirect(event.get_absolute_url()) elif not has_search: return render_template('sentry/invalid_message_id.html') else: # TODO: # event_list = get_search_query_set(query) raise NotImplementedError else: event_list = Group.objects.none() sort = request.args.get('sort') if sort == 'date': event_list = event_list.order_by('-last_seen') elif sort == 'new': event_list = event_list.order_by('-first_seen') else: sort = 'relevance' return render_template('sentry/search.html', **{ 'event_list': event_list, 'query': query, 'sort': sort, 'request': request, 'page': page, })
TypeError
dataset/ETHPy150Open dcramer/sentry-old/sentry/web/views.py/search
336
@login_required @app.route('/') def index(): filter_list = list(filters.all()) try: page = int(request.args.get('p', 1)) except (__HOLE__, ValueError): page = 1 event_list = Group.objects.all() sort = request.args.get('sort') if sort == 'date': event_list = event_list.order_by('-last_seen') elif sort == 'new': event_list = event_list.order_by('-first_seen') elif sort == 'count': event_list = event_list.order_by('-count') else: sort = 'priority' event_list = event_list.order_by('-score') any_filter = False # for filter_ in filters: # if not filter_.is_set(): # continue # any_filter = True # event_list = filter_.get_query_set(event_list) today = datetime.datetime.now() has_realtime = page == 1 return render_template('sentry/index.html', **{ 'has_realtime': has_realtime, 'event_list': event_list, 'today': today, 'sort': sort, 'any_filter': any_filter, 'request': request, 'filter_list': filter_list, })
TypeError
dataset/ETHPy150Open dcramer/sentry-old/sentry/web/views.py/index
337
@login_required @app.route('/group/<group_id>/<path:slug>') def group_plugin_action(group_id, slug): group = get_object_or_404(Group, pk=group_id) try: cls = GroupActionProvider.plugins[slug] except __HOLE__: abort(404, 'Plugin not found') response = cls(group_id)(request, group) if response: return response return redirect(request.environ.get('HTTP_REFERER') or url_for('index'))
KeyError
dataset/ETHPy150Open dcramer/sentry-old/sentry/web/views.py/group_plugin_action
338
def _parse(self, is_source, lang_rules): """ Parse an INI file and create a stringset with all entries in the file. """ content = self.content self.jformat = JoomlaIniVersion.create(self.content) self._find_linesep(content) comment = "" buf = '' for line in self._iter_by_line(content): # Skip empty lines and comments if not line or line.startswith(self.comment_chars): if is_source: buf += line + self.linesep if line.startswith(self.comment_chars): comment = line[1:] + self.linesep else: comment = "" continue try: source, trans = line.split('=', 1) except __HOLE__: # Maybe abort instead of skipping? logger.warning('Could not parse line "%s". Skipping...' % line) continue escaped_trans = self.jformat.get_translation(trans) if isinstance(self.jformat, JoomlaIniNew): trans = trans[1:-1] context = "" # We use empty context if is_source: if not trans.strip(): buf += line + self.linesep continue source_len = len(source) new_line = line[:source_len] + re.sub( re.escape(trans), "%(hash)s_tr" % {'hash': hash_tag(source, context)}, line[source_len:] ) buf += new_line + self.linesep elif not SourceEntity.objects.filter(resource=self.resource, string=source).exists()\ or not escaped_trans.strip(): #ignore keys with no translation context="" continue self._add_translation_string(source, self._unescape(escaped_trans), context=context, comment=comment) comment = "" return buf[:buf.rfind(self.linesep)]
ValueError
dataset/ETHPy150Open rvanlaar/easy-transifex/src/transifex/transifex/resources/formats/joomla.py/JoomlaINIHandler._parse
339
def screenshot_init( sizes, keys ): """ Replace curses_display.Screen and raw_display.Screen class with HtmlGenerator. Call this function before executing an application that uses curses_display.Screen to have that code use HtmlGenerator instead. sizes -- list of ( columns, rows ) tuples to be returned by each call to HtmlGenerator.get_cols_rows() keys -- list of lists of keys to be returned by each call to HtmlGenerator.get_input() Lists of keys may include "window resize" to force the application to call get_cols_rows and read a new screen size. For example, the following call will prepare an application to: 1. start in 80x25 with its first call to get_cols_rows() 2. take a screenshot when it calls draw_screen(..) 3. simulate 5 "down" keys from get_input() 4. take a screenshot when it calls draw_screen(..) 5. simulate keys "a", "b", "c" and a "window resize" 6. resize to 20x10 on its second call to get_cols_rows() 7. take a screenshot when it calls draw_screen(..) 8. simulate a "Q" keypress to quit the application screenshot_init( [ (80,25), (20,10) ], [ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] ) """ try: for (row,col) in sizes: assert type(row) == int assert row>0 and col>0 except (AssertionError, ValueError): raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]" try: for l in keys: assert type(l) == list for k in l: assert type(k) == str except (__HOLE__, ValueError): raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]" import curses_display curses_display.Screen = HtmlGenerator import raw_display raw_display.Screen = HtmlGenerator HtmlGenerator.sizes = sizes HtmlGenerator.keys = keys
AssertionError
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/html_fragment.py/screenshot_init
340
@webapi_check_login_required def get(self, request, watched_obj_id, *args, **kwargs): try: q = self.get_queryset(request, *args, **kwargs) obj = self.get_watched_object(q, watched_obj_id, *args, **kwargs) except __HOLE__: return DOES_NOT_EXIST return HttpResponseRedirect( self.watched_resource.get_href(obj, request, *args, **kwargs))
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/base_watched_object.py/BaseWatchedObjectResource.get
341
@webapi_check_local_site @webapi_login_required @webapi_response_errors(DOES_NOT_EXIST, NOT_LOGGED_IN, PERMISSION_DENIED) @webapi_request_fields(required={ 'object_id': { 'type': six.text_type, 'description': 'The ID of the object to watch.', }, }) def create(self, request, object_id, *args, **kwargs): try: obj_kwargs = kwargs.copy() obj_kwargs[self.watched_resource.uri_object_key] = object_id obj = self.watched_resource.get_object(request, *args, **obj_kwargs) user = resources.user.get_object(request, *args, **kwargs) except __HOLE__: return DOES_NOT_EXIST if not resources.user.has_modify_permissions(request, user, *args, **kwargs): return self.get_no_access_error(request) profile, profile_is_new = \ Profile.objects.get_or_create(user=request.user) star = getattr(profile, self.star_function) star(obj) return 201, { self.item_result_key: obj, }
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/base_watched_object.py/BaseWatchedObjectResource.create
342
@webapi_check_local_site @webapi_login_required def delete(self, request, watched_obj_id, *args, **kwargs): try: obj_kwargs = kwargs.copy() obj_kwargs[self.watched_resource.uri_object_key] = watched_obj_id obj = self.watched_resource.get_object(request, *args, **obj_kwargs) user = resources.user.get_object(request, *args, **kwargs) except __HOLE__: return DOES_NOT_EXIST if not resources.user.has_modify_permissions(request, user, *args, **kwargs): return self.get_no_access_error(request) profile, profile_is_new = \ Profile.objects.get_or_create(user=request.user) if not profile_is_new: unstar = getattr(profile, self.unstar_function) unstar(obj) return 204, {}
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/base_watched_object.py/BaseWatchedObjectResource.delete
343
def render_GET(self, request): n = request.site.getNode() verifier = request.site.getVerifier() use_key = request.getHeader('x-verify-key') is_verified = True try: signature = request.args['signature'].pop() timestamp = request.args['timestamp'].pop() if verifier: is_verified = verifier.verify(use_key, timestamp, signature) except __HOLE__: pass if not is_verified: return resource.ForbiddenResource("Signature invalid") def generic_handler(msg, subId, request): request.setHeader('content-type', 'application/json') request.write(msg.toJson()) request.write("\r\n") if request.clientproto == "HTTP/1.0": request.finish() for ev in request.args['event']: n.subscribe(ev, generic_handler, request) return server.NOT_DONE_YET
KeyError
dataset/ETHPy150Open selfsk/nodeset.core/src/nodeset/core/web.py/NodeSetSubscribe.render_GET
344
def render_POST(self, request): node = request.site.getNode() verifier = request.site.getVerifier() msg = request.args['message'].pop() ev = request.args['event'].pop() # by default all messages are verified is_verified = True try: signature = request.args['signature'].pop() if verifier: is_verified = verifier.verify(ev, msg, signature) except __HOLE__: pass if is_verified: node.publish(ev, msgClass=message.NodeMessage, json=msg).addCallback(lambda _: request.finish()) request.write("\r\n") return server.NOT_DONE_YET
KeyError
dataset/ETHPy150Open selfsk/nodeset.core/src/nodeset/core/web.py/NodeSetPublish.render_POST
345
def render(self, context): commands = [] identity = get_identity(context, 'mixpanel') if identity is not None: if isinstance(identity, dict): commands.append(IDENTIFY_CODE % identity.get('id', identity.get('username'))) commands.append(IDENTIFY_PROPERTIES % json.dumps(identity, sort_keys=True)) else: commands.append(IDENTIFY_CODE % identity) try: name, properties = context[EVENT_CONTEXT_KEY] commands.append(EVENT_CODE % {'name': name, 'properties': json.dumps(properties, sort_keys=True)}) except __HOLE__: pass html = TRACKING_CODE % {'token': self._token, 'commands': " ".join(commands)} if is_internal_ip(context, 'MIXPANEL'): html = disable_html(html, 'Mixpanel') return mark_safe(html)
KeyError
dataset/ETHPy150Open jcassee/django-analytical/analytical/templatetags/mixpanel.py/MixpanelNode.render
346
def import_all_submodules(name): for app_config in apps.get_app_configs(): app_module = app_config.module try: importlib.import_module('%s.%s' % (app_module.__name__, name)) except __HOLE__: if module_has_submodule(app_module, name): raise
ImportError
dataset/ETHPy150Open thread/django-lightweight-queue/django_lightweight_queue/utils.py/import_all_submodules
347
def handle_app(self, app, **options): directory = os.getcwd() app_name = app.__name__.split('.')[-2] project_dir = os.path.join(directory, app_name) if not os.path.exists(project_dir): try: os.mkdir(project_dir) except __HOLE__, e: raise CommandError(e) copy_template('command_template', project_dir, options.get('command_name'), '%sCommand' % options.get('base_command'))
OSError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-extensions/build/lib/django_extensions/management/commands/create_command.py/Command.handle_app
348
def copy_template(template_name, copy_to, command_name, base_command): """copies the specified template directory to the copy_to location""" import django_extensions import re import shutil template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name) handle_method = "handle(self, *args, **options)" if base_command == 'AppCommand': handle_method = "handle_app(self, app, **options)" elif base_command == 'LabelCommand': handle_method = "handle_label(self, label, **options)" elif base_command == 'NoArgsCommand': handle_method = "handle_noargs(self, **options)" # walks the template structure and copies it for d, subdirs, files in os.walk(template_dir): relative_dir = d[len(template_dir) + 1:] if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)): os.mkdir(os.path.join(copy_to, relative_dir)) for i, subdir in enumerate(subdirs): if subdir.startswith('.'): del subdirs[i] for f in files: if f.endswith('.pyc') or f.startswith('.DS_Store'): continue path_old = os.path.join(d, f) path_new = os.path.join(copy_to, relative_dir, f.replace('sample', command_name)) if os.path.exists(path_new): path_new = os.path.join(copy_to, relative_dir, f) if os.path.exists(path_new): continue path_new = path_new.rstrip(".tmpl") fp_old = open(path_old, 'r') fp_new = open(path_new, 'w') fp_new.write(fp_old.read().replace('{{ command_name }}', command_name).replace('{{ base_command }}', base_command).replace('{{ handle_method }}', handle_method)) fp_old.close() fp_new.close() try: shutil.copymode(path_old, path_new) _make_writeable(path_new) except __HOLE__: sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
OSError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-extensions/build/lib/django_extensions/management/commands/create_command.py/copy_template
349
def readrc(): """Attemps to acquire a config from $HOME/.hbclirc Failures to parse a valid JSON will cause sys.exit(1) """ path = os.environ['HOME'] + "/.hbclirc" if os.path.isfile(path) is False: print("Config file %s is not present, please create one with `config` first." % (path)) sys.exit(1) f = open(path, 'r') config = f.read() f.close() try: config = json.loads(config) assert "user" in config assert "pass" in config assert "url" in config return config except (__HOLE__, AssertionError) as e: print("Unable to parse %s, is your config saved? Error: %s" % (path, e)) sys.exit(1)
ValueError
dataset/ETHPy150Open emccode/heliosburn/heliosburn/hbcli/hbcli.py/readrc
350
def check_getitem(self, obj): # Be careful to index all dimensions, since we don't support # partial indexing yet. def yield_indices(obj): try: shape = obj.shape except AttributeError: shape = len(obj), for tup in np.ndindex(shape): # Simple 1d buffer-providing objects usually don't support # tuple indexing. if len(tup) == 1: yield tup[0] else: yield tup for i in yield_indices(obj): try: expected = obj[i] except (__HOLE__, TypeError): if isinstance(obj, memoryview): # The memoryview object doesn't support all codes yet, # fall back on the underlying object. expected = obj.obj[i] else: raise self.assertPreciseEqual(getitem_usecase(obj, i), expected)
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/tests/test_buffer_protocol.py/TestBufferProtocol.check_getitem
351
def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table, metadata_table, diffs, autogen_context, inspector): is_create_table = conn_table is None # 1a. get raw indexes and unique constraints from metadata ... metadata_unique_constraints = set(uq for uq in metadata_table.constraints if isinstance(uq, sa_schema.UniqueConstraint) ) metadata_indexes = set(metadata_table.indexes) # 1b. ... and from connection if conn_table is not None and hasattr(inspector, "get_unique_constraints"): try: conn_uniques = inspector.get_unique_constraints(tname) except (__HOLE__, NoSuchTableError): conn_uniques = [] else: conn_uniques = [] try: conn_indexes = inspector.get_indexes(tname) except NoSuchTableError: conn_indexes = [] # 2. convert conn-level objects from raw inspector records # into schema objects conn_uniques = set(_make_unique_constraint(uq_def, conn_table) for uq_def in conn_uniques) conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes) # 3. give the dialect a chance to omit indexes and constraints that # we know are either added implicitly by the DB or that the DB # can't accurately report on autogen_context['context'].impl.\ correct_for_autogen_constraints( conn_uniques, conn_indexes, metadata_unique_constraints, metadata_indexes ) # 4. organize the constraints into "signature" collections, the # _constraint_sig() objects provide a consistent facade over both # Index and UniqueConstraint so we can easily work with them # interchangeably metadata_unique_constraints = set(_uq_constraint_sig(uq) for uq in metadata_unique_constraints ) metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes) conn_unique_constraints = set(_uq_constraint_sig(uq) for uq in conn_uniques) conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes) # 5. index things by name, for those objects that have names metadata_names = dict( (c.name, c) for c in metadata_unique_constraints.union(metadata_indexes) if c.name is not None) conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints) conn_indexes_by_name = dict((c.name, c) for c in conn_indexes) conn_names = dict((c.name, c) for c in conn_unique_constraints.union(conn_indexes) if c.name is not None) doubled_constraints = dict( (name, (conn_uniques_by_name[name], conn_indexes_by_name[name])) for name in set(conn_uniques_by_name).intersection(conn_indexes_by_name) ) # 6. index things by "column signature", to help with unnamed unique # constraints. conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints) metadata_uniques_by_sig = dict( (uq.sig, uq) for uq in metadata_unique_constraints) metadata_indexes_by_sig = dict( (ix.sig, ix) for ix in metadata_indexes) unnamed_metadata_uniques = dict((uq.sig, uq) for uq in metadata_unique_constraints if uq.name is None) # assumptions: # 1. a unique constraint or an index from the connection *always* # has a name. # 2. an index on the metadata side *always* has a name. # 3. a unique constraint on the metadata side *might* have a name. # 4. The backend may double up indexes as unique constraints and # vice versa (e.g. MySQL, Postgresql) def obj_added(obj): if obj.is_index: diffs.append(("add_index", obj.const)) log.info("Detected added index '%s' on %s", obj.name, ', '.join([ "'%s'" % obj.column_names ]) ) else: if is_create_table: # unique constraints are created inline with table defs return diffs.append(("add_constraint", obj.const)) log.info("Detected added unique constraint '%s' on %s", obj.name, ', '.join([ "'%s'" % obj.column_names ]) ) def obj_removed(obj): if obj.is_index: diffs.append(("remove_index", obj.const)) log.info("Detected removed index '%s' on '%s'", obj.name, tname) else: diffs.append(("remove_constraint", obj.const)) log.info("Detected removed unique constraint '%s' on '%s'", obj.name, tname ) def obj_changed(old, new, msg): if old.is_index: log.info("Detected changed index '%s' on '%s':%s", old.name, tname, ', '.join(msg) ) diffs.append(("remove_index", old.const)) diffs.append(("add_index", new.const)) else: log.info("Detected changed unique constraint '%s' on '%s':%s", old.name, tname, ', '.join(msg) ) diffs.append(("remove_constraint", old.const)) diffs.append(("add_constraint", new.const)) for added_name in sorted(set(metadata_names).difference(conn_names)): obj = metadata_names[added_name] obj_added(obj) for existing_name in sorted(set(metadata_names).intersection(conn_names)): metadata_obj = metadata_names[existing_name] if existing_name in doubled_constraints: conn_uq, conn_idx = doubled_constraints[existing_name] if metadata_obj.is_index: conn_obj = conn_idx else: conn_obj = conn_uq else: conn_obj = conn_names[existing_name] if conn_obj.is_index != metadata_obj.is_index: obj_removed(conn_obj) obj_added(metadata_obj) else: msg = [] if conn_obj.is_unique != metadata_obj.is_unique: msg.append(' unique=%r to unique=%r' % ( conn_obj.is_unique, metadata_obj.is_unique )) if conn_obj.sig != metadata_obj.sig: msg.append(' columns %r to %r' % ( conn_obj.sig, metadata_obj.sig )) if msg: obj_changed(conn_obj, metadata_obj, msg) for removed_name in sorted(set(conn_names).difference(metadata_names)): conn_obj = conn_names[removed_name] if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques: continue elif removed_name in doubled_constraints: if conn_obj.sig not in metadata_indexes_by_sig and \ conn_obj.sig not in metadata_uniques_by_sig: conn_uq, conn_idx = doubled_constraints[removed_name] obj_removed(conn_uq) obj_removed(conn_idx) else: obj_removed(conn_obj) for uq_sig in unnamed_metadata_uniques: if uq_sig not in conn_uniques_by_sig: obj_added(unnamed_metadata_uniques[uq_sig])
NotImplementedError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/alembic/autogenerate/compare.py/_compare_indexes_and_uniques
352
def _make_directory(self, path): try: os.makedirs(path) except __HOLE__ as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open sdispater/orator/orator/commands/seeds/make_command.py/SeedersMakeCommand._make_directory
353
def _CommandsStart(unused_argv): """Main initialization. Calls __main__.main(), and then the command indicated by the first non-flag argument, or 'help' if no argument was given. (The command to execute if no flag is given can be changed via SetDefaultCommand). Only non-flag arguments are passed to main(). If main does not call sys.exit, the return value of the command is used as the exit status. """ # The following is supposed to return after registering additional commands try: sys.modules['__main__'].main(GetCommandArgv()) # If sys.exit was called, return with error code. except __HOLE__, e: sys.exit(e.code) except Exception, error: traceback.print_exc() # Print a backtrace to stderr. ShortHelpAndExit('\nFATAL error in main: %s' % error) if len(GetCommandArgv()) > 1: command = GetCommand(command_required=True) else: command = GetCommandByName(_cmd_default) if command is None: ShortHelpAndExit("FATAL Command '%s' unknown" % _cmd_default) sys.exit(command.CommandRun(GetCommandArgv()))
SystemExit
dataset/ETHPy150Open google/google-apputils/google/apputils/appcommands.py/_CommandsStart
354
def wrap(self): try: os.remove(self.outputs[0].abspath(self.env)) except __HOLE__: pass return old(self)
OSError
dataset/ETHPy150Open appcelerator-archive/poc-nodejs-desktop/Resources/nodejs/builds/linux/node/lib/node/wafadmin/Tools/ar.py/wrap
355
def test(self, options): if not options.capabilities.intersection(self.capabilities): return options.log.info('Testing %s.', self) if options.pretend: return module = None try: module = self.get_module(options.test_root) except __HOLE__: options.log.warning('No test exists for %s', self) except Exception: options.log.exception('Cannot load test for %s', self) if not module: return module_interactive = options.interactive if hasattr(module, '__noninteractive') and \ getattr(module, '__noninteractive'): module_interactive = False if options.regression_check and \ os.path.exists(self.get_regression_image_filename()): result = RegressionCheckTestResult( self, options.regression_tolerance) module_interactive = False elif options.regression_capture: result = RegressionCaptureTestResult(self) else: result = StandardTestResult(self) print ("Running Test: %s" % self) if module.__doc__: print module.__doc__ print '-' * 78 if module_interactive: raw_input('Press return to begin test...') suite = unittest.TestLoader().loadTestsFromModule(module) options.log.info('Begin unit tests for %s', self) suite(result) for failure in result.failures: options.log.error('Failure in %s', self) options.log.error(failure[1]) for error in result.errors: options.log.error('Error in %s', self) options.log.error(error[1]) options.log.info('%d tests run', result.testsRun) if (module_interactive and len(result.failures) == 0 and len(result.errors) == 0): print module.__doc__ user_result = raw_input('[P]assed test, [F]ailed test: ') if user_result and user_result[0] in ('F', 'f'): print 'Enter failure description: ' description = raw_input('> ') options.log.error('User marked fail for %s', self) options.log.error(description) else: options.log.info('User marked pass for %s', self) result.setUserPass()
IOError
dataset/ETHPy150Open ardekantur/pyglet/tests/test.py/TestCase.test
356
def main(): capabilities = ['GENERIC'] platform_capabilities = { 'linux2': 'X11', 'win32': 'WIN', 'cygwin': 'WIN', 'darwin': 'OSX' } if sys.platform in platform_capabilities: capabilities.append(platform_capabilities[sys.platform]) script_root = os.path.dirname(__file__) plan_filename = os.path.normpath(os.path.join(script_root, 'plan.txt')) test_root = script_root op = optparse.OptionParser() op.usage = 'test.py [options] [components]' op.add_option('--plan', help='test plan file', default=plan_filename) op.add_option('--test-root', default=script_root, help='directory containing test cases') op.add_option('--capabilities', help='selected test capabilities', default=','.join(capabilities)) op.add_option('--log-level', help='verbosity of logging', default=10, type='int') op.add_option('--log-file', help='log to FILE', metavar='FILE', default='pyglet.%d.log') op.add_option('--regression-path', metavar='DIR', default=regressions_path, help='locate regression images in DIR') op.add_option('--regression-tolerance', type='int', default=2, help='tolerance for comparing regression images') op.add_option('--regression-check', action='store_true', help='enable image regression checks') op.add_option('--regression-capture', action='store_true', help='enable image regression capture') op.add_option('--no-interactive', action='store_false', default=True, dest='interactive', help='disable interactive prompting') op.add_option('--developer', action='store_true', help='add DEVELOPER capability') op.add_option('--pretend', action='store_true', help='print selected test cases only') options, args = op.parse_args() options.capabilities = set(options.capabilities.split(',')) if options.developer: options.capabilities.add('DEVELOPER') if options.regression_capture: try: os.makedirs(regressions_path) except __HOLE__: pass if '%d' in options.log_file: i = 1 while os.path.exists(options.log_file % i): i += 1 options.log_file = options.log_file % i logging.basicConfig(filename=options.log_file, level=options.log_level) options.log = logging.getLogger() options.log.info('Beginning test at %s', time.ctime()) options.log.info('Capabilities are: %s', ', '.join(options.capabilities)) options.log.info('sys.platform = %s', sys.platform) options.log.info('Reading test plan from %s', options.plan) plan = TestPlan.from_file(options.plan) errors = False if args: components = [] for arg in args: try: component = plan.names[arg] components.append(component) except KeyError: options.log.error('Unknown test case or section "%s"', arg) errors = True else: components = [plan.root] if not errors: print '-' * 78 for component in components: component.test(options) print '-' * 78
OSError
dataset/ETHPy150Open ardekantur/pyglet/tests/test.py/main
357
def end_graphics(): global _root_window, _canvas, _mouse_enabled try: try: sleep(1) if _root_window is not None: _root_window.destroy() except __HOLE__ as e: print 'Ending graphics raised an exception:', e finally: _root_window = None _canvas = None _mouse_enabled = 0 _clear_keys()
SystemExit
dataset/ETHPy150Open rlpy/rlpy/rlpy/Domains/PacmanPackage/graphicsUtils.py/end_graphics
358
def ProcessClient(self, client): now = rdfvalue.RDFDatetime().Now() ping = client.Get(client.Schema.PING) if ping: for label in self.GetClientLabelsList(client): time_ago = now - ping pos = bisect.bisect(self._bins, time_ago.microseconds) # If clients are older than the last bin forget them. try: self._ValuesForLabel(label)[pos] += 1 except __HOLE__: pass
IndexError
dataset/ETHPy150Open google/grr/grr/lib/flows/cron/system.py/LastAccessStats.ProcessClient
359
def run(self): errors = 0 # How many uncaught exceptions in a row we got. while self._run_state.is_running(): try: try: line = self.__rewrite_tsdb_line(self.__queue.get(True, 5)) except Empty: continue # It is important that we check is_running before we act upon any element # returned by the queue. See the 'stop' method for details. if not self._run_state.is_running(): continue self.__logger.info(line, metric_log_for_monitor=self.__monitor) while True: try: line = self.__rewrite_tsdb_line(self.__queue.get(False)) except Empty: break if not self._run_state.is_running(): continue self.__logger.info(line, metric_log_for_monitor=self.__monitor) errors = 0 # We managed to do a successful iteration. except (ArithmeticError, EOFError, EnvironmentError, LookupError, __HOLE__): errors += 1 if errors > self.__max_uncaught_exceptions: raise self.__error_logger.exception('Uncaught exception in SenderThread, ignoring') self._run_state.sleep_but_awaken_if_stopped(1) continue
ValueError
dataset/ETHPy150Open scalyr/scalyr-agent-2/scalyr_agent/builtin_monitors/linux_system_metrics.py/WriterThread.run
360
def __getitem__(self, element_name): ''' Return the condition matching the given element name. Raises AttributeError if there is no matching condition. ''' try: return next(item for item in self.conditions if item.element_name == element_name) except __HOLE__: raise AttributeError('No matching condition')
StopIteration
dataset/ETHPy150Open bodylabs/drf-to-s3/drf_to_s3/models.py/Policy.__getitem__
361
def cleanup_temp_files(turret, files): for f in files: try: os.remove(f) except __HOLE__ as e: print("Error while packagin turret %s" % turret['name']) print("Error: %s" % e)
IOError
dataset/ETHPy150Open TheGhouls/oct/oct/utilities/pack.py/cleanup_temp_files
362
def write_temp_files(turret, files): writed_files = [] for f in files: try: with open(f['filename'], 'w') as fd: fd.write(f['content']) writed_files.append(f['filename']) except __HOLE__ as e: print("Error while packaging turret %s" % turret['name']) print("Error: %s" % e) return writed_files
IOError
dataset/ETHPy150Open TheGhouls/oct/oct/utilities/pack.py/write_temp_files
363
def query_class(QueryClass, Database): """ Returns a custom django.db.models.sql.query.Query subclass that is appropriate for Oracle. The 'Database' module (cx_Oracle) is passed in here so that all the setup required to import it only needs to be done by the calling module. """ global _classes try: return _classes[QueryClass] except __HOLE__: pass class OracleQuery(QueryClass): def __reduce__(self): """ Enable pickling for this class (normal pickling handling doesn't work as Python can only pickle module-level classes by default). """ if hasattr(QueryClass, '__getstate__'): assert hasattr(QueryClass, '__setstate__') data = self.__getstate__() else: data = self.__dict__ return (unpickle_query_class, (QueryClass,), data) def resolve_columns(self, row, fields=()): # If this query has limit/offset information, then we expect the # first column to be an extra "_RN" column that we need to throw # away. if self.high_mark is not None or self.low_mark: rn_offset = 1 else: rn_offset = 0 index_start = rn_offset + len(self.extra_select.keys()) values = [self.convert_values(v, None) for v in row[rn_offset:index_start]] for value, field in map(None, row[index_start:], fields): values.append(self.convert_values(value, field)) return values def convert_values(self, value, field): from django.db.models.fields import DateField, DateTimeField, \ TimeField, BooleanField, NullBooleanField, DecimalField, Field if isinstance(value, Database.LOB): value = value.read() # Oracle stores empty strings as null. We need to undo this in # order to adhere to the Django convention of using the empty # string instead of null, but only if the field accepts the # empty string. if value is None and isinstance(field, Field) and field.empty_strings_allowed: value = u'' # Convert 1 or 0 to True or False elif value in (1, 0) and isinstance(field, (BooleanField, NullBooleanField)): value = bool(value) # Convert floats to decimals elif value is not None and isinstance(field, DecimalField): value = util.typecast_decimal(field.format_number(value)) # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. We use the type # of the Field to determine which to cast to, but it's not # always available. # As a workaround, we cast to date if all the time-related # values are 0, or to time if the date is 1/1/1900. # This could be cleaned a bit by adding a method to the Field # classes to normalize values from the database (the to_python # method is used for validation and isn't what we want here). elif isinstance(value, Database.Timestamp): # In Python 2.3, the cx_Oracle driver returns its own # Timestamp object that we must convert to a datetime class. if not isinstance(value, datetime.datetime): value = datetime.datetime(value.year, value.month, value.day, value.hour, value.minute, value.second, value.fsecond) if isinstance(field, DateTimeField): # DateTimeField subclasses DateField so must be checked # first. pass elif isinstance(field, DateField): value = value.date() elif isinstance(field, TimeField) or (value.year == 1900 and value.month == value.day == 1): value = value.time() elif value.hour == value.minute == value.second == value.microsecond == 0: value = value.date() return value def as_sql(self, with_limits=True, with_col_aliases=False): """ Creates the SQL for this query. Returns the SQL string and list of parameters. This is overriden from the original Query class to handle the additional SQL Oracle requires to emulate LIMIT and OFFSET. If 'with_limits' is False, any limit/offset information is not included in the query. """ # The `do_offset` flag indicates whether we need to construct # the SQL needed to use limit/offset with Oracle. do_offset = with_limits and (self.high_mark is not None or self.low_mark) if not do_offset: sql, params = super(OracleQuery, self).as_sql(with_limits=False, with_col_aliases=with_col_aliases) else: sql, params = super(OracleQuery, self).as_sql(with_limits=False, with_col_aliases=True) # Wrap the base query in an outer SELECT * with boundaries on # the "_RN" column. This is the canonical way to emulate LIMIT # and OFFSET on Oracle. high_where = '' if self.high_mark is not None: high_where = 'WHERE ROWNUM <= %d' % (self.high_mark,) sql = 'SELECT * FROM (SELECT ROWNUM AS "_RN", "_SUB".* FROM (%s) "_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.low_mark) return sql, params _classes[QueryClass] = OracleQuery return OracleQuery
KeyError
dataset/ETHPy150Open dcramer/django-compositepks/django/db/backends/oracle/query.py/query_class
364
def make_xy(self, on, off): x = [] y = [] for o in on: try: x.append(self.extractor(*o).reshape(1, -1)) y.append(1) except ValueError: pass for o in off: try: x.append(self.extractor(*o).reshape(1, -1)) y.append(0) except __HOLE__: pass x = np.vstack(x) y = np.hstack(y) #sklearn doesn't like non-finite values, which #occasionally popup if not np.isfinite(x).all(): warnings.warn("Non-finite values in feature vectors. Fixing") x = np.nan_to_num(x) assert x.shape[0] == y.shape[0] assert x.ndim == 2 assert y.ndim == 1 return x, y
ValueError
dataset/ETHPy150Open ChrisBeaumont/brut/bubbly/model.py/Model.make_xy
365
def predict(self, params): if not hasattr(params[0], '__len__'): params = [params] x, y = self.make_xy(params, []) try: return self.classifier.predict(x) except __HOLE__: # not yet fit #having an empty model predict 1 #makes it convenient to generate #initial false positives return np.ones(len(params), dtype=np.int)
ValueError
dataset/ETHPy150Open ChrisBeaumont/brut/bubbly/model.py/Model.predict
366
def decision_function(self, x): """ Compute the decision function for a list of stamp descriptions Parameters ---------- x : List of stamp description tuples Returns ------- An ndarray of the decision function for each feature extracted from x """ result = np.empty(len(x)) * np.nan for i, ex in enumerate(x): try: X, _ = self.make_xy([ex], []) except __HOLE__ as e: continue X = X.reshape(1, -1) df = self.classifier.decision_function(X).ravel() result[i] = df return result
ValueError
dataset/ETHPy150Open ChrisBeaumont/brut/bubbly/model.py/Model.decision_function
367
def from_xml(self,xmlnode): """Initialize Delay object from an XML node. :Parameters: - `xmlnode`: the jabber:x:delay XML element. :Types: - `xmlnode`: `libxml2.xmlNode`""" if xmlnode.type!="element": raise ValueError("XML node is not a jabber:x:delay element (not an element)") ns=get_node_ns_uri(xmlnode) if ns and ns!=DELAY_NS or xmlnode.name!="x": raise ValueError("XML node is not a jabber:x:delay element") stamp=xmlnode.prop("stamp") if stamp.endswith("Z"): stamp=stamp[:-1] if "-" in stamp: stamp=stamp.split("-",1)[0] try: tm = time.strptime(stamp, "%Y%m%dT%H:%M:%S") except __HOLE__: raise BadRequestProtocolError("Bad timestamp") tm=tm[0:8]+(0,) self.timestamp=datetime.datetime.fromtimestamp(time.mktime(tm)) delay_from=from_utf8(xmlnode.prop("from")) if delay_from: try: self.delay_from = JID(delay_from) except JIDError: raise JIDMalformedProtocolError("Bad JID in the jabber:x:delay 'from' attribute") else: self.delay_from = None self.reason = from_utf8(xmlnode.getContent())
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python3-alpha/python-libs/pyxmpp2/ext/delay.py/Delay.from_xml
368
def put_file(self, key, file, *args, **kwargs): bufsize = 1024 * 1024 phash = self.hashfunc() if not key: if isinstance(file, str): with open(file, 'rb') as source: while True: buf = source.read(bufsize) phash.update(buf) if len(buf) < bufsize: break return self._dstore.put_file( self._template.format(phash.hexdigest()), file, *args, **kwargs) else: tmpfile = tempfile.NamedTemporaryFile(delete=False) try: while True: buf = file.read(bufsize) phash.update(buf) tmpfile.write(buf) if len(buf) < bufsize: break tmpfile.close() return self._dstore.put_file( self._template.format(phash.hexdigest()), tmpfile.name, *args, **kwargs ) finally: try: os.unlink(tmpfile.name) except __HOLE__ as e: if 2 == e.errno: pass # file already gone else: raise return self._dstore.put_file(key, file, *args, **kwargs)
OSError
dataset/ETHPy150Open mbr/simplekv/simplekv/idgen.py/HashDecorator.put_file
369
@mock.patch("bentomakerlib.bentomaker.pprint", lambda color, s, fout=None: None) def test_simple(self): errors = ( (UsageException, 2), (ParseError, 2), (ConvertionError, 2), (CommandExecutionFailure, 2), (bento.errors.ConfigurationError, 2), (bento.errors.BuildError, 2), (bento.errors.InvalidPackage, 2), (Exception, 1), ) for klass, error_code in errors: old_main = bentomakerlib.bentomaker.main bentomakerlib.bentomaker.main = lambda argv: raise_function(klass) try: try: noexc_main() except __HOLE__: e = extract_exception() self.assertEqual(e.code, error_code, "Expected error code %d for exception type(%r)" % \ (error_code, klass)) finally: bentomakerlib.bentomaker.main = old_main
SystemExit
dataset/ETHPy150Open cournape/Bento/bentomakerlib/tests/test_bentomaker.py/TestBentomakerError.test_simple
370
@signalcommand def handle_noargs(self, **options): if not settings.DEBUG: raise CommandError('Only available in debug mode') try: from django.contrib.auth import get_user_model # Django 1.5 except __HOLE__: from django_extensions.future_1_5 import get_user_model if options.get('prompt_passwd', False): from getpass import getpass passwd = getpass('Password: ') if not passwd: raise CommandError('You must enter a valid password') else: passwd = options.get('default_passwd', DEFAULT_FAKE_PASSWORD) User = get_user_model() user = User() user.set_password(passwd) count = User.objects.all().update(password=user.password) print('Reset %d passwords' % count)
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/set_fake_passwords.py/Command.handle_noargs
371
@staticmethod def fork_process(name, port = 9199, config = ""): cmd = "juba" + name args = [cmd, "--rpc-port", str(port), "--configpath", config, "--thread", "100", "--datadir", "."] try: if TestUtil.check_server(port): raise Exception('Another server is already running') proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) # use PIPE to suppress log messages of server processes try: TestUtil.wait_server(port, proc) except Exception: proc.kill() raise if proc.poll(): stderr = proc.stderr.read() raise Exception('Cannot run server process: \n' + stderr) return proc except __HOLE__ as error: print('Unable to fork. Error: {0} ({1})'.format(error.errno, error.strerror)) raise error
OSError
dataset/ETHPy150Open jubatus/jubatus-python-client/test/jubatus_test/test_util.py/TestUtil.fork_process
372
@Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from aREST device.""" try: if self._pin is None: response = requests.get(self._resource, timeout=10) self.data = response.json()['variables'] else: try: if str(self._pin[0]) == 'A': response = requests.get('{}/analog/{}'.format( self._resource, self._pin[1:]), timeout=10) self.data = {'value': response.json()['return_value']} else: _LOGGER.error("Wrong pin naming. " "Please check your configuration file.") except __HOLE__: response = requests.get('{}/digital/{}'.format( self._resource, self._pin), timeout=10) self.data = {'value': response.json()['return_value']} except requests.exceptions.ConnectionError: _LOGGER.error("No route to device %s. Is device offline?", self._resource) self.data = {'error': 'error fetching'}
TypeError
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/components/sensor/arest.py/ArestData.update
373
def _split_geo_point(self, geo_point): """splits the geo point into lat and lon""" try: return geo_point.split(',') except (__HOLE__, ValueError): m = 'Expected a "lat,long" formatted string; received %s (a %s).' raise exceptions.ValidationError(m % (geo_point, typename(geo_point)))
AttributeError
dataset/ETHPy150Open madisona/django-google-maps/django_google_maps/fields.py/GeoPt._split_geo_point
374
def _validate_geo_range(self, geo_part, range_val): try: geo_part = float(geo_part) if abs(geo_part) > range_val: m = 'Must be between -%s and %s; received %s' raise exceptions.ValidationError(m % (range_val, range_val, geo_part)) except (__HOLE__, ValueError): raise exceptions.ValidationError( 'Expected float, received %s (a %s).' % (geo_part, typename(geo_part)) ) return geo_part
TypeError
dataset/ETHPy150Open madisona/django-google-maps/django_google_maps/fields.py/GeoPt._validate_geo_range
375
def test_get_urls_throws_type_error(self): with self.assertRaises(TypeError): try: admin_instance = ModelAdmin2(BigThing, Admin2) admin_instance.views = [views.AdminView(None, None, None)] admin_instance.get_urls() except __HOLE__ as e: message = u"Cannot instantiate admin view " \ '"ModelAdmin2.None". The error that got raised was: ' \ "'NoneType' object has no attribute 'as_view'" self.assertEqual(e.args[0], message) raise
TypeError
dataset/ETHPy150Open pydanny/django-admin2/djadmin2/tests/test_types.py/ModelAdminTest.test_get_urls_throws_type_error
376
@staticmethod def generate_sender(): """ Generate a sender for a new form """ try: user_id = current.auth.user.id except __HOLE__: return "" return "%s/%d" % (current.xml.domain, user_id) # -------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open sahana/eden/modules/s3db/cap.py/S3CAPModel.generate_sender
377
@staticmethod def list_string_represent(string, fmt=lambda v: v): try: if isinstance(string, list): return ", ".join([fmt(i) for i in string]) elif isinstance(string, basestring): return ", ".join([fmt(i) for i in string[1:-1].split("|")]) except __HOLE__: return current.messages.UNKNOWN_OPT return "" # -------------------------------------------------------------------------
IndexError
dataset/ETHPy150Open sahana/eden/modules/s3db/cap.py/S3CAPModel.list_string_represent
378
def cap_gis_location_xml_post_parse(element, record): """ UNUSED - done in XSLT Convert CAP polygon representation to WKT; extract circle lat lon. Latitude and longitude in CAP are expressed as signed decimal values in coordinate pairs: latitude,longitude The circle text consists of: latitude,longitude radius where the radius is in km. Polygon text consists of a space separated sequence of at least 4 coordinate pairs where the first and last are the same. lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1 """ # @ToDo: Extract altitude and ceiling from the enclosing <area>, and # compute an elevation value to apply to all enclosed gis_locations. cap_polygons = element.xpath("cap_polygon") if cap_polygons: cap_polygon_text = cap_polygons[0].text # CAP polygons and WKT have opposite separator conventions: # CAP has spaces between coordinate pairs and within pairs the # coordinates are separated by comma, and vice versa for WKT. # Unfortunately, CAP and WKT (as we use it) also have opposite # orders of lat and lon. CAP has lat lon, WKT has lon lat. # Both close the polygon by repeating the first point. cap_points_text = cap_polygon_text.split() cap_points = [cpoint.split(",") for cpoint in cap_points_text] # @ToDo: Should we try interpreting all the points as decimal numbers, # and failing validation if they're wrong? wkt_points = ["%s %s" % (cpoint[1], cpoint[0]) for cpoint in cap_points] wkt_polygon_text = "POLYGON ((%s))" % ", ".join(wkt_points) record.wkt = wkt_polygon_text return cap_circle_values = element.xpath("resource[@name='gis_location_tag']/data[@field='tag' and text()='cap_circle']/../data[@field='value']") if cap_circle_values: cap_circle_text = cap_circle_values[0].text coords, radius = cap_circle_text.split() lat, lon = coords.split(",") try: # If any of these fail to interpret as numbers, the circle was # badly formatted. For now, we don't try to fail validation, # but just don't set the lat, lon. lat = float(lat) lon = float(lon) radius = float(radius) except __HOLE__: return record.lat = lat record.lon = lon # Add a bounding box for the given radius, if it is not zero. if radius > 0.0: bbox = current.gis.get_bounds_from_radius(lat, lon, radius) record.lat_min = bbox["lat_min"] record.lon_min = bbox["lon_min"] record.lat_max = bbox["lat_max"] record.lon_max = bbox["lon_max"] # =============================================================================
ValueError
dataset/ETHPy150Open sahana/eden/modules/s3db/cap.py/cap_gis_location_xml_post_parse
379
def cap_gis_location_xml_post_render(element, record): """ UNUSED - done in XSLT Convert Eden WKT polygon (and eventually circle) representation to CAP format and provide them in the rendered s3xml. Not all internal formats have a parallel in CAP, but an effort is made to provide a resonable substitute: Polygons are supported. Circles that were read in from CAP (and thus carry the original CAP circle data) are supported. Multipolygons are currently rendered as their bounding box. Points are rendered as zero radius circles. Latitude and longitude in CAP are expressed as signed decimal values in coordinate pairs: latitude,longitude The circle text consists of: latitude,longitude radius where the radius is in km. Polygon text consists of a space separated sequence of at least 4 coordinate pairs where the first and last are the same. lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1 """ # @ToDo: Can we rely on gis_feature_type == 3 to tell if the location is a # polygon, or is it better to look for POLYGON in the wkt? For now, check # both. # @ToDo: CAP does not support multipolygons. Do we want to extract their # outer polygon if passed MULTIPOLYGON wkt? For now, these are exported # with their bounding box as the polygon. # @ToDo: What if a point (gis_feature_type == 1) that is not a CAP circle # has a non-point bounding box? Should it be rendered as a polygon for # the bounding box? try: from lxml import etree except: # This won't fail, since we're in the middle of processing xml. return SubElement = etree.SubElement s3xml = current.xml TAG = s3xml.TAG RESOURCE = TAG["resource"] DATA = TAG["data"] ATTRIBUTE = s3xml.ATTRIBUTE NAME = ATTRIBUTE["name"] FIELD = ATTRIBUTE["field"] VALUE = ATTRIBUTE["value"] loc_tablename = "gis_location" tag_tablename = "gis_location_tag" tag_fieldname = "tag" val_fieldname = "value" polygon_tag = "cap_polygon" circle_tag = "cap_circle" fallback_polygon_tag = "cap_polygon_fallback" fallback_circle_tag = "cap_circle_fallback" def __cap_gis_location_add_polygon(element, cap_polygon_text, fallback=False): """ Helper for cap_gis_location_xml_post_render that adds the CAP polygon data to the current element in a gis_location_tag element. """ # Make a gis_location_tag. tag_resource = SubElement(element, RESOURCE) tag_resource.set(NAME, tag_tablename) tag_field = SubElement(tag_resource, DATA) # Add tag and value children. tag_field.set(FIELD, tag_fieldname) if fallback: tag_field.text = fallback_polygon_tag else: tag_field.text = polygon_tag val_field = SubElement(tag_resource, DATA) val_field.set(FIELD, val_fieldname) val_field.text = cap_polygon_text def __cap_gis_location_add_circle(element, lat, lon, radius, fallback=False): """ Helper for cap_gis_location_xml_post_render that adds CAP circle data to the current element in a gis_location_tag element. """ # Make a gis_location_tag. tag_resource = SubElement(element, RESOURCE) tag_resource.set(NAME, tag_tablename) tag_field = SubElement(tag_resource, DATA) # Add tag and value children. tag_field.set(FIELD, tag_fieldname) if fallback: tag_field.text = fallback_circle_tag else: tag_field.text = circle_tag val_field = SubElement(tag_resource, DATA) val_field.set(FIELD, val_fieldname) # Construct a CAP circle string: latitude,longitude radius cap_circle_text = "%s,%s %s" % (lat, lon, radius) val_field.text = cap_circle_text # Sort out the geometry case by wkt, CAP tags, gis_feature_type, bounds,... # Check the two cases for CAP-specific locations first, as those will have # definite export values. For others, we'll attempt to produce either a # circle or polygon: Locations with a bounding box will get a box polygon, # points will get a zero-radius circle. # Currently wkt is stripped out of gis_location records right here: # https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1332 # https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1426 # https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L3152 # Until we provide a way to configure that choice, this will not work for # polygons. wkt = record.get("wkt", None) # WKT POLYGON: Although there is no WKT spec, according to every reference # that deals with nested polygons, the outer, enclosing, polygon must be # listed first. Hence, we extract only the first polygon, as CAP has no # provision for nesting. if wkt and wkt.startswith("POLYGON"): # ToDo: Is it sufficient to test for adjacent (( to find the start of # the polygon, or might there be whitespace between them? start = wkt.find("((") end = wkt.find(")") if start >=0 and end >=0: polygon_text = wkt[start + 2 : end] points_text = polygon_text.split(",") points = [p.split() for p in points_text] cap_points_text = ["%s,%s" % (point[1], point[0]) for point in points] cap_polygon_text = " ".join(cap_points_text) __cap_gis_location_add_polygon(element, cap_polygon_text) return # Fall through if the wkt string was mal-formed. # CAP circle stored in a gis_location_tag with tag = cap_circle. # If there is a cap_circle tag, we don't need to do anything further, as # export.xsl will use it. However, we don't know if there is a cap_circle # tag... # # @ToDo: The export calls xml_post_render after processing a resource's # fields, but before its components are added as children in the xml tree. # If this were delayed til after the components were added, we could look # there for the cap_circle gis_location_tag record. Since xml_post_parse # isn't in use yet (except for this), maybe we could look at moving it til # after the components? # # For now, with the xml_post_render before components: We could do a db # query to check for a real cap_circle tag record, and not bother with # creating fallbacks from bounding box or point...but we don't have to. # Instead, just go ahead and add the fallbacks under different tag names, # and let the export.xsl sort them out. This only wastes a little time # compared to a db query. # ToDo: MULTIPOLYGON -- Can stitch together the outer polygons in the # multipolygon, but would need to assure all were the same handedness. # The remaining cases are for locations that don't have either polygon wkt # or a cap_circle tag. # Bounding box: Make a four-vertex polygon from the bounding box. # This is a fallback, as if there is a circle tag, we'll use that. lon_min = record.get("lon_min", None) lon_max = record.get("lon_max", None) lat_min = record.get("lat_min", None) lat_max = record.get("lat_max", None) if lon_min and lon_max and lat_min and lat_max and \ (lon_min != lon_max) and (lat_min != lat_max): # Although there is no WKT requirement, arrange the points in # counterclockwise order. Recall format is: # lat1,lon1 lat2,lon2 ... latN,lonN, lat1,lon1 cap_polygon_text = \ "%(lat_min)s,%(lon_min)s %(lat_min)s,%(lon_max)s %(lat_max)s,%(lon_max)s %(lat_max)s,%(lon_min)s %(lat_min)s,%(lon_min)s" \ % {"lon_min": lon_min, "lon_max": lon_max, "lat_min": lat_min, "lat_max": lat_max} __cap_gis_location_add_polygon(element, cap_polygon_text, fallback=True) return # WKT POINT or location with lat, lon: This can be rendered as a # zero-radius circle. # Q: Do we put bounding boxes around POINT locations, and are they # meaningful? lat = record.get("lat", None) lon = record.get("lon", None) if not lat or not lon: # Look for POINT. if wkt and wkt.startswith("POINT"): start = wkt.find("(") end = wkt.find(")") if start >=0 and end >=0: point_text = wkt[start + 2 : end] point = point_text.split() try: lon = float(point[0]) lat = float(point[1]) except __HOLE__: pass if lat and lon: # Add a (fallback) circle with zero radius. __cap_gis_location_add_circle(element, lat, lon, 0, True) return # ToDo: Other WKT. # Did not find anything to use. Presumably the area has a text description. return # =============================================================================
ValueError
dataset/ETHPy150Open sahana/eden/modules/s3db/cap.py/cap_gis_location_xml_post_render
380
def _to_time_inst(msg, rostype, inst=None): # Create an instance if we haven't been provided with one if rostype == "time" and msg == "now": return rospy.get_rostime() if inst is None: if rostype == "time": inst = rospy.rostime.Time() elif rostype == "duration": inst = rospy.rostime.Duration() else: return None # Copy across the fields for field in ["secs", "nsecs"]: try: if field in msg: setattr(inst, field, msg[field]) except __HOLE__: continue return inst
TypeError
dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_library/src/rosbridge_library/internal/message_conversion.py/_to_time_inst
381
def from_pix(x): v = 0 try: v = int(float(x.replace('px', ''))) except __HOLE__: log.error('error parsing px', exc_info=True) return v
ValueError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/from_pix
382
def remove_class(self, cls): try: self._classes.remove(cls) except __HOLE__: pass
ValueError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/Tag.remove_class
383
def set_size(self, width, height): """Set the widget size. Args: width (int or str): An optional width for the widget (es. width=10 or width='10px' or width='10%'). height (int or str): An optional height for the widget (es. height=10 or height='10px' or height='10%'). """ if width is not None: try: width = to_pix(int(width)) except ValueError: # now we know w has 'px or % in it' pass self.style['width'] = width if height is not None: try: height = to_pix(int(height)) except __HOLE__: # now we know w has 'px or % in it' pass self.style['height'] = height
ValueError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/Widget.set_size
384
def set_enabled(self, enabled): """ Enables or disables the Button. Args: enabled (bool): If true te button is enabled and so the user can press it. """ if enabled: try: del self.attributes['disabled'] except __HOLE__: pass else: self.attributes['disabled'] = None
KeyError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/Button.set_enabled
385
def set_enabled(self, enabled): if enabled: try: del self.attributes['disabled'] except __HOLE__: pass else: self.attributes['disabled'] = None
KeyError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/Input.set_enabled
386
def set_read_only(self, readonly): if readonly: self.attributes['readonly'] = None else: try: del self.attributes['readonly'] except __HOLE__: pass
KeyError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/Input.set_read_only
387
def populate_folder_items(self, directory): def _sort_files(a, b): if os.path.isfile(a) and os.path.isdir(b): return 1 elif os.path.isfile(b) and os.path.isdir(a): return -1 else: try: if a[0] == '.': a = a[1:] if b[0] == '.': b = b[1:] return a.lower() > b.lower() except (IndexError, __HOLE__): return a > b log.debug("FileFolderNavigator - populate_folder_items") l = os.listdir(directory) l.sort(key=cmp_to_key(_sort_files)) # used to restore a valid path after a wrong edit in the path editor self.lastValidPath = directory # we remove the container avoiding graphic update adding items # this speeds up the navigation self.remove_child(self.itemContainer) # creation of a new instance of a itemContainer self.itemContainer = Widget() self.itemContainer.set_layout_orientation(Widget.LAYOUT_VERTICAL) self.itemContainer.style['overflow-y'] = 'scroll' self.itemContainer.style['overflow-x'] = 'hidden' self.itemContainer.style['height'] = '300px' self.itemContainer.style['display'] = 'block' for i in l: full_path = os.path.join(directory, i) is_folder = not os.path.isfile(full_path) if (not is_folder) and (not self.allow_file_selection): continue fi = FileFolderItem(i, is_folder) fi.style['display'] = 'block' fi.set_on_click_listener(self, 'on_folder_item_click') # navigation purpose fi.set_on_selection_listener(self, 'on_folder_item_selected') # selection purpose self.folderItems.append(fi) self.itemContainer.append(fi) self.append(self.itemContainer, key='items') # replace the old widget
ValueError
dataset/ETHPy150Open dddomodossola/remi/remi/gui.py/FileFolderNavigator.populate_folder_items
388
def flatten(inlist, type=type, ltype=(list,tuple), maxint= sys.maxint): """Flatten out a list, code developed by myself and modified by Tim Peters, then by me again :)""" try: # for every possible index for ind in xrange( maxint): # while that index currently holds a list while isinstance( inlist[ind], ltype): # expand that list into the index (and subsequent indicies) inlist[ind:ind+1] = list(inlist[ind]) #ind = ind+1 except __HOLE__: pass return inlist
IndexError
dataset/ETHPy150Open correl/Transmission-XBMC/resources/lib/basictypes/latebind.py/flatten
389
def find_datasets(apps): """ Return a list of DataSet classes found on the received list of ``apps``. Since it's a search, ImportErrors are ignored. """ fixtures = [] for app in apps: try: fixtures.extend(get_datasets(app)) except __HOLE__: pass return fixtures
ImportError
dataset/ETHPy150Open henriquebastos/django-fixturapp/src/fixturapp/management/commands/__init__.py/find_datasets
390
def _parse_signature(func): """Return a signature object for the function.""" if hasattr(func, 'im_func'): func = func.im_func # if we have a cached validator for this function, return it parse = _signature_cache.get(func) if parse is not None: return parse # inspect the function signature and collect all the information positional, vararg_var, kwarg_var, defaults = inspect.getargspec(func) defaults = defaults or () arg_count = len(positional) arguments = [] for idx, name in enumerate(positional): if isinstance(name, list): raise TypeError('cannot parse functions that unpack tuples ' 'in the function signature') try: default = defaults[idx - arg_count] except __HOLE__: param = (name, False, None) else: param = (name, True, default) arguments.append(param) arguments = tuple(arguments) def parse(args, kwargs): new_args = [] missing = [] extra = {} # consume as many arguments as positional as possible for idx, (name, has_default, default) in enumerate(arguments): try: new_args.append(args[idx]) except IndexError: try: new_args.append(kwargs.pop(name)) except KeyError: if has_default: new_args.append(default) else: missing.append(name) else: if name in kwargs: extra[name] = kwargs.pop(name) # handle extra arguments extra_positional = args[arg_count:] if vararg_var is not None: new_args.extend(extra_positional) extra_positional = () if kwargs and kwarg_var is None: extra.update(kwargs) kwargs = {} return new_args, kwargs, missing, extra, extra_positional, \ arguments, vararg_var, kwarg_var _signature_cache[func] = parse return parse
IndexError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/_internal.py/_parse_signature
391
def __get__(self, obj, type=None): if obj is None: return self storage = self.lookup(obj) if self.name not in storage: return self.default rv = storage[self.name] if self.load_func is not None: try: rv = self.load_func(rv) except (ValueError, __HOLE__): rv = self.default return rv
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/appengine-flask-skeleton/lib/werkzeug/_internal.py/_DictAccessorProperty.__get__
392
def write_dist_info(self, site_pkgs): """Write dist-info folder, according to PEP 376""" metadata = common.make_metadata(self.module, self.ini_info) dist_info = pathlib.Path(site_pkgs) / '{}-{}.dist-info'.format( metadata.name, metadata.version) try: dist_info.mkdir() except FileExistsError: shutil.rmtree(str(dist_info)) dist_info.mkdir() with (dist_info / 'METADATA').open('w', encoding='utf-8') as f: metadata.write_metadata_file(f) self.installed_files.append(dist_info / 'METADATA') with (dist_info / 'INSTALLER').open('w') as f: f.write('flit') self.installed_files.append(dist_info / 'INSTALLER') # We only handle explicitly requested installations with (dist_info / 'REQUESTED').open('w'): pass self.installed_files.append(dist_info / 'REQUESTED') if self.ini_info['entry_points_file'] is not None: shutil.copy(str(self.ini_info['entry_points_file']), str(dist_info / 'entry_points.txt') ) self.installed_files.append(dist_info / 'entry_points.txt') with (dist_info / 'RECORD').open('w', encoding='utf-8') as f: cf = csv.writer(f) for path in self.installed_files: path = pathlib.Path(path) if path.is_symlink() or path.suffix in {'.pyc', '.pyo'}: hash, size = '', '' else: hash = 'sha256=' + common.hash_file(path) size = path.stat().st_size try: path = path.relative_to(site_pkgs) except __HOLE__: pass cf.writerow((path, hash, size)) cf.writerow(((dist_info / 'RECORD').relative_to(site_pkgs), '', ''))
ValueError
dataset/ETHPy150Open takluyver/flit/flit/install.py/Installer.write_dist_info
393
def typeset(table, field): """ Return a set containing all Python types found for values in the given field. E.g.:: >>> import petl as etl >>> table = [['foo', 'bar', 'baz'], ... ['A', 1, '2'], ... ['B', u'2', '3.4'], ... [u'B', u'3', '7.8', True], ... ['D', u'xyz', 9.0], ... ['E', 42]] >>> sorted(etl.typeset(table, 'foo')) ['str'] >>> sorted(etl.typeset(table, 'bar')) ['int', 'str'] >>> sorted(etl.typeset(table, 'baz')) ['NoneType', 'float', 'str'] The `field` argument can be a field name or index (starting from zero). """ s = set() for v in values(table, field): try: s.add(type(v).__name__) except __HOLE__: pass # ignore short rows return s
IndexError
dataset/ETHPy150Open alimanfoo/petl/petl/util/misc.py/typeset
394
def __getattr__(self, name): try: return self[name] except __HOLE__: raise AttributeError(name)
KeyError
dataset/ETHPy150Open tevino/mongu/mongu.py/ObjectDict.__getattr__
395
def parse_special_header(self, linenum, info): """ Parses part of a diff beginning at the specified line number, trying to find a special diff header. This usually occurs before the standard diff header. The line number returned is the line after the special header, which can be multiple lines long. """ if linenum + 1 < len(self.lines) and \ self.lines[linenum].startswith(b"Index: ") and \ self.lines[linenum + 1] == self.INDEX_SEP: # This is an Index: header, which is common in CVS and Subversion, # amongst other systems. try: info['index'] = self.lines[linenum].split(None, 1)[1] except __HOLE__: raise DiffParserError("Malformed Index line", linenum) linenum += 2 return linenum
ValueError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/diffviewer/parser.py/DiffParser.parse_special_header
396
def parse_diff_header(self, linenum, info): """ Parses part of a diff beginning at the specified line number, trying to find a standard diff header. The line number returned is the line after the special header, which can be multiple lines long. """ if linenum + 1 < len(self.lines) and \ ((self.lines[linenum].startswith(b'--- ') and self.lines[linenum + 1].startswith(b'+++ ')) or (self.lines[linenum].startswith(b'*** ') and self.lines[linenum + 1].startswith(b'--- ') and not self.lines[linenum].endswith(b" ****"))): # This is a unified or context diff header. Parse the # file and extra info. try: info['origFile'], info['origInfo'] = \ self.parse_filename_header(self.lines[linenum][4:], linenum) linenum += 1 info['newFile'], info['newInfo'] = \ self.parse_filename_header(self.lines[linenum][4:], linenum) linenum += 1 except __HOLE__: raise DiffParserError("The diff file is missing revision " + "information", linenum) return linenum
ValueError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/diffviewer/parser.py/DiffParser.parse_diff_header
397
def _get_version(name, results): ''' ``pkg search`` will return all packages for which the pattern is a match. Narrow this down and return the package version, or None if no exact match. ''' for line in salt.utils.itertools.split(results, '\n'): if not line: continue try: pkgname, pkgver = line.rsplit('-', 1) except __HOLE__: continue if pkgname == name: return pkgver return None
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/pkgng.py/_get_version
398
def list_pkgs(versions_as_list=False, jail=None, chroot=None, with_origin=False, **kwargs): ''' List the packages currently installed as a dict:: {'<package_name>': '<version>'} jail List the packages in the specified jail chroot List the packages in the specified chroot (ignored if ``jail`` is specified) with_origin : False Return a nested dictionary containing both the origin name and version for each installed package. .. versionadded:: 2014.1.0 CLI Example: .. code-block:: bash salt '*' pkg.list_pkgs salt '*' pkg.list_pkgs jail=<jail name or id> salt '*' pkg.list_pkgs chroot=/path/to/chroot ''' # not yet implemented or not applicable if any([salt.utils.is_true(kwargs.get(x)) for x in ('removed', 'purge_desired')]): return {} versions_as_list = salt.utils.is_true(versions_as_list) contextkey_pkg = _contextkey(jail, chroot) contextkey_origins = _contextkey(jail, chroot, prefix='pkg.origin') if contextkey_pkg in __context__: ret = copy.deepcopy(__context__[contextkey_pkg]) if not versions_as_list: __salt__['pkg_resource.stringify'](ret) if salt.utils.is_true(with_origin): origins = __context__.get(contextkey_origins, {}) return dict([ (x, {'origin': origins.get(x, ''), 'version': y}) for x, y in six.iteritems(ret) ]) return ret ret = {} origins = {} out = __salt__['cmd.run_stdout']( _pkg(jail, chroot) + ['info', '-ao'], output_loglevel='trace', python_shell=False) for line in salt.utils.itertools.split(out, '\n'): if not line: continue try: pkg, origin = line.split() pkgname, pkgver = pkg.rsplit('-', 1) except __HOLE__: continue __salt__['pkg_resource.add_pkg'](ret, pkgname, pkgver) origins[pkgname] = origin __salt__['pkg_resource.sort_pkglist'](ret) __context__[contextkey_pkg] = copy.deepcopy(ret) __context__[contextkey_origins] = origins if not versions_as_list: __salt__['pkg_resource.stringify'](ret) if salt.utils.is_true(with_origin): return dict([ (x, {'origin': origins.get(x, ''), 'version': y}) for x, y in six.iteritems(ret) ]) return ret
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/pkgng.py/list_pkgs
399
def _get_available_choices(self, queryset, value): """ get possible choices for selection """ item = queryset.filter(pk=value).first() if item: try: pk = getattr(item, self.chained_model_field + "_id") filter = {self.chained_model_field: pk} except __HOLE__: try: # maybe m2m? pks = getattr(item, self.chained_model_field).all().values_list('pk', flat=True) filter = {self.chained_model_field + "__in": pks} except AttributeError: try: # maybe a set? pks = getattr(item, self.chained_model_field + "_set").all().values_list('pk', flat=True) filter = {self.chained_model_field + "__in": pks} except: # give up filter = {} filtered = list(get_model(self.to_app_name, self.to_model_name).objects.filter(**filter).distinct()) sort_results(filtered) else: # invalid value for queryset filtered = [] return filtered
AttributeError
dataset/ETHPy150Open digi604/django-smart-selects/smart_selects/widgets.py/ChainedSelect._get_available_choices