Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
500
def what(self, page, args): level = int(args.get('level', 1)) if page.level + 1 == level: return page.get_absolute_url() elif page.level + 1 < level: return '#' try: return page.get_ancestors()[level - 1].get_absolute_url() except __HOLE__: return '#'
IndexError
dataset/ETHPy150Open django-leonardo/django-leonardo/leonardo/module/nav/templatetags/webcms_nav_tags.py/ParentLinkNode.what
501
@register.filter def is_parent_of(page1, page2): """ Determines whether a given page is the parent of another page Example:: {% if page|is_parent_of:feincms_page %} ... {% endif %} """ try: return page1.tree_id == page2.tree_id and page1.lft < page2.lft and page1.rght > page2.rght except __HOLE__: return False # ------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open django-leonardo/django-leonardo/leonardo/module/nav/templatetags/webcms_nav_tags.py/is_parent_of
502
@register.filter def is_equal_or_parent_of(page1, page2): try: return page1.tree_id == page2.tree_id and page1.lft <= page2.lft and page1.rght >= page2.rght except __HOLE__: return False # ------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open django-leonardo/django-leonardo/leonardo/module/nav/templatetags/webcms_nav_tags.py/is_equal_or_parent_of
503
@register.filter def is_sibling_of(page1, page2): """ Determines whether a given page is a sibling of another page :: {% if page|is_sibling_of:feincms_page %} ... {% endif %} """ try: return page1.parent_id == page2.parent_id except __HOLE__: return False # ------------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open django-leonardo/django-leonardo/leonardo/module/nav/templatetags/webcms_nav_tags.py/is_sibling_of
504
def find_django_migrations_module(module_name): """ Tries to locate <module_name>.migrations_django (without actually importing it). Appends either ".migrations_django" or ".migrations" to module_name. For details why: https://docs.djangoproject.com/en/1.7/topics/migrations/#libraries-third-party-apps """ import imp try: module_info = imp.find_module(module_name) module = imp.load_module(module_name, *module_info) imp.find_module('migrations_django', module.__path__) return module_name + '.migrations_django' except __HOLE__: return module_name + '.migrations' # conforms to Django 1.7 defaults
ImportError
dataset/ETHPy150Open jrief/djangocms-cascade/examples/bs3demo/utils.py/find_django_migrations_module
505
def targettype(x): try: _target = int(x) def infinite_iterator(): return (_target for _ in iter(int, 1)) return infinite_iterator except __HOLE__: return util.load_obj_from_path(x, prefix='relay.plugins')
ValueError
dataset/ETHPy150Open sailthru/relay/relay/argparse_shared.py/targettype
506
def clean(self, value): value = super(TagField, self).clean(value) try: return parse_tags(value) except __HOLE__: raise forms.ValidationError( _("Please provide a comma-separated list of tags."))
ValueError
dataset/ETHPy150Open alex/django-taggit/taggit/forms.py/TagField.clean
507
def plot_epochs(epochs, picks=None, scalings=None, n_epochs=20, n_channels=20, title=None, show=True, block=False): """ Visualize epochs Bad epochs can be marked with a left click on top of the epoch. Bad channels can be selected by clicking the channel name on the left side of the main axes. Calling this function drops all the selected bad epochs as well as bad epochs marked beforehand with rejection parameters. Parameters ---------- epochs : instance of Epochs The epochs object picks : array-like of int | None Channels to be included. If None only good data channels are used. Defaults to None scalings : dict | None Scale factors for the traces. If None, defaults to:: dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, ecg=5e-4, emg=1e-3, ref_meg=1e-12, misc=1e-3, stim=1, resp=1, chpi=1e-4) n_epochs : int The number of epochs per view. Defaults to 20. n_channels : int The number of channels per view. Defaults to 20. title : str | None The title of the window. If None, epochs name will be displayed. Defaults to None. show : bool Show figure if True. Defaults to True block : bool Whether to halt program execution until the figure is closed. Useful for rejecting bad trials on the fly by clicking on an epoch. Defaults to False. Returns ------- fig : Instance of matplotlib.figure.Figure The figure. Notes ----- The arrow keys (up/down/left/right) can be used to navigate between channels and epochs and the scaling can be adjusted with - and + (or =) keys, but this depends on the backend matplotlib is configured to use (e.g., mpl.use(``TkAgg``) should work). Full screen mode can be toggled with f11 key. The amount of epochs and channels per view can be adjusted with home/end and page down/page up keys. Butterfly plot can be toggled with ``b`` key. Right mouse click adds a vertical line to the plot. """ epochs.drop_bad() scalings = _handle_default('scalings_plot_raw', scalings) projs = epochs.info['projs'] params = {'epochs': epochs, 'info': copy.deepcopy(epochs.info), 'bad_color': (0.8, 0.8, 0.8), 't_start': 0, 'histogram': None} params['label_click_fun'] = partial(_pick_bad_channels, params=params) _prepare_mne_browse_epochs(params, projs, n_channels, n_epochs, scalings, title, picks) _prepare_projectors(params) _layout_figure(params) callback_close = partial(_close_event, params=params) params['fig'].canvas.mpl_connect('close_event', callback_close) try: plt_show(show, block=block) except __HOLE__: # not all versions have this plt_show(show) return params['fig']
TypeError
dataset/ETHPy150Open mne-tools/mne-python/mne/viz/epochs.py/plot_epochs
508
def rest_command(func): def _rest_command(*args, **kwargs): try: msg = func(*args, **kwargs) return Response(content_type='application/json', body=json.dumps(msg)) except SyntaxError as e: status = 400 details = e.msg except (ValueError, __HOLE__) as e: status = 400 details = e.message except NotFoundError as msg: status = 404 details = str(msg) msg = {REST_RESULT: REST_NG, REST_DETAILS: details} return Response(status=status, body=json.dumps(msg)) return _rest_command
NameError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/rest_command
509
def set_data(self, vlan_id, param, waiters): vlan_routers = self._get_vlan_router(vlan_id) if not vlan_routers: vlan_routers = [self._add_vlan_router(vlan_id)] msgs = [] for vlan_router in vlan_routers: try: msg = vlan_router.set_data(param) msgs.append(msg) if msg[REST_RESULT] == REST_NG: # Data setting is failure. self._del_vlan_router(vlan_router.vlan_id, waiters) except __HOLE__ as err_msg: # Data setting is failure. self._del_vlan_router(vlan_router.vlan_id, waiters) raise err_msg return {REST_SWITCHID: self.dpid_str, REST_COMMAND_RESULT: msgs}
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/Router.set_data
510
def _delete_address_data(self, address_id, waiters): if address_id != REST_ALL: try: address_id = int(address_id) except __HOLE__ as e: err_msg = 'Invalid [%s] value. %s' raise ValueError(err_msg % (REST_ADDRESSID, e.message)) skip_ids = self._chk_addr_relation_route(address_id) # Get all flow. delete_list = [] msgs = self.ofctl.get_all_flow(waiters) max_id = UINT16_MAX for msg in msgs: for stats in msg.body: vlan_id = VlanRouter._cookie_to_id(REST_VLANID, stats.cookie) if vlan_id != self.vlan_id: continue addr_id = VlanRouter._cookie_to_id(REST_ADDRESSID, stats.cookie) if addr_id in skip_ids: continue elif address_id == REST_ALL: if addr_id <= COOKIE_DEFAULT_ID or max_id < addr_id: continue elif address_id != addr_id: continue delete_list.append(stats) delete_ids = [] for flow_stats in delete_list: # Delete flow self.ofctl.delete_flow(flow_stats) address_id = VlanRouter._cookie_to_id(REST_ADDRESSID, flow_stats.cookie) del_address = self.address_data.get_data(addr_id=address_id) if del_address is not None: # Clean up suspend packet threads. self.packet_buffer.delete(del_addr=del_address) # Delete data. self.address_data.delete(address_id) if address_id not in delete_ids: delete_ids.append(address_id) msg = {} if delete_ids: delete_ids = ','.join(str(addr_id) for addr_id in delete_ids) details = 'Delete address [address_id=%s]' % delete_ids msg = {REST_RESULT: REST_OK, REST_DETAILS: details} if skip_ids: skip_ids = ','.join(str(addr_id) for addr_id in skip_ids) details = 'Skip delete (related route exist) [address_id=%s]'\ % skip_ids if msg: msg[REST_DETAILS] += ', %s' % details else: msg = {REST_RESULT: REST_NG, REST_DETAILS: details} return msg
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/VlanRouter._delete_address_data
511
def _delete_routing_data(self, route_id, waiters): if route_id != REST_ALL: try: route_id = int(route_id) except __HOLE__ as e: err_msg = 'Invalid [%s] value. %s' raise ValueError(err_msg % (REST_ROUTEID, e.message)) # Get all flow. msgs = self.ofctl.get_all_flow(waiters) delete_list = [] for msg in msgs: for stats in msg.body: vlan_id = VlanRouter._cookie_to_id(REST_VLANID, stats.cookie) if vlan_id != self.vlan_id: continue rt_id = VlanRouter._cookie_to_id(REST_ROUTEID, stats.cookie) if route_id == REST_ALL: if rt_id == COOKIE_DEFAULT_ID: continue elif route_id != rt_id: continue delete_list.append(stats) # Delete flow. delete_ids = [] for flow_stats in delete_list: self.ofctl.delete_flow(flow_stats) route_id = VlanRouter._cookie_to_id(REST_ROUTEID, flow_stats.cookie) self.routing_tbl.delete(route_id) if route_id not in delete_ids: delete_ids.append(route_id) # case: Default route deleted. -> set flow (drop) route_type = get_priority_type(flow_stats.priority, vid=self.vlan_id) if route_type == PRIORITY_DEFAULT_ROUTING: self._set_defaultroute_drop() msg = {} if delete_ids: delete_ids = ','.join(str(route_id) for route_id in delete_ids) details = 'Delete route [route_id=%s]' % delete_ids msg = {REST_RESULT: REST_OK, REST_DETAILS: details} return msg
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/VlanRouter._delete_routing_data
512
def _get_send_port_ip(self, header_list): try: src_mac = header_list[ETHERNET].src if IPV4 in header_list: src_ip = header_list[IPV4].src else: src_ip = header_list[ARP].src_ip except __HOLE__: self.logger.debug('Receive unsupported packet.', extra=self.sw_id) return None address = self.address_data.get_data(ip=src_ip) if address is not None: return address.default_gw else: route = self.routing_tbl.get_data(gw_mac=src_mac) if route is not None: address = self.address_data.get_data(ip=route.gateway_ip) if address is not None: return address.default_gw self.logger.debug('Receive packet from unknown IP[%s].', ip_addr_ntoa(src_ip), extra=self.sw_id) return None
KeyError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/VlanRouter._get_send_port_ip
513
def mask_ntob(mask, err_msg=None): try: return (UINT32_MAX << (32 - mask)) & UINT32_MAX except __HOLE__: msg = 'illegal netmask' if err_msg is not None: msg = '%s %s' % (err_msg, msg) raise ValueError(msg)
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/mask_ntob
514
def nw_addr_aton(nw_addr, err_msg=None): ip_mask = nw_addr.split('/') default_route = ip_addr_aton(ip_mask[0], err_msg=err_msg) netmask = 32 if len(ip_mask) == 2: try: netmask = int(ip_mask[1]) except __HOLE__ as e: if err_msg is not None: e.message = '%s %s' % (err_msg, e.message) raise ValueError(e.message) if netmask < 0: msg = 'illegal netmask' if err_msg is not None: msg = '%s %s' % (err_msg, msg) raise ValueError(msg) nw_addr = ipv4_apply_mask(default_route, netmask, err_msg) return nw_addr, netmask, default_route
ValueError
dataset/ETHPy150Open osrg/ryu/ryu/app/rest_router.py/nw_addr_aton
515
def has_role(user, roles): if user and user.is_superuser: return True if not isinstance(roles, list): roles = [roles] normalized_roles = [] for role in roles: if not inspect.isclass(role): role = RolesManager.retrieve_role(role) normalized_roles.append(role) try: user_role = get_user_role(user) except __HOLE__: return False if not user_role: return False return user_role in normalized_roles
ObjectDoesNotExist
dataset/ETHPy150Open vintasoftware/django-role-permissions/rolepermissions/verifications.py/has_role
516
def _contents(self): """Internal version of contents() with no locking.""" try: return self.stream().getvalue() except __HOLE__: return ''
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/logservice/logservice.py/LogsBuffer._contents
517
@datastore_rpc._positional(0) def fetch(start_time=None, end_time=None, offset=None, minimum_log_level=None, include_incomplete=False, include_app_logs=False, version_ids=None, **kwargs): """Returns an iterator yielding an application's request and application logs. Logs will be returned by the iterator in reverse chronological order by request end time, or by last flush time for requests still in progress (if requested). The items yielded are google.appengine.api.logservice.log_service_pb.RequestLog protocol buffer objects, the contents of which are accessible via method calls. All parameters are optional. Args: start_time: The earliest request completion or last-update time that results should be fetched for, in seconds since the Unix epoch. end_time: The latest request completion or last-update time that results should be fetched for, in seconds since the Unix epoch. offset: A byte string representing an offset into the log stream, extracted from a previously emitted RequestLog. This iterator will begin immediately after the record from which the offset came. minimum_log_level: An application log level which serves as a filter on the requests returned--requests with no application log at or above the specified level will be omitted. Works even if include_app_logs is not True. In ascending order, the available log levels are: logservice.LOG_LEVEL_DEBUG, logservice.LOG_LEVEL_INFO, logservice.LOG_LEVEL_WARNING, logservice.LOG_LEVEL_ERROR, and logservice.LOG_LEVEL_CRITICAL. include_incomplete: Whether or not to include requests that have started but not yet finished, as a boolean. Defaults to False. include_app_logs: Whether or not to include application level logs in the results, as a boolean. Defaults to False. version_ids: A list of version ids whose logs should be queried against. Defaults to the application's current version id only. Returns: An iterable object containing the logs that the user has queried for. Raises: InvalidArgumentError: Raised if any of the input parameters are not of the correct type. """ args_diff = set(kwargs) - _FETCH_KWARGS if args_diff: raise InvalidArgumentError('Invalid arguments: %s' % ', '.join(args_diff)) request = log_service_pb.LogReadRequest() request.set_app_id(os.environ['APPLICATION_ID']) if start_time is not None: if not isinstance(start_time, (float, int, long)): raise InvalidArgumentError('start_time must be a float or integer') request.set_start_time(long(start_time * 1000000)) if end_time is not None: if not isinstance(end_time, (float, int, long)): raise InvalidArgumentError('end_time must be a float or integer') request.set_end_time(long(end_time * 1000000)) if offset is not None: try: request.mutable_offset().ParseFromString(offset) except (__HOLE__, ProtocolBuffer.ProtocolBufferDecodeError): raise InvalidArgumentError('offset must be a string or read-only buffer') if minimum_log_level is not None: if not isinstance(minimum_log_level, int): raise InvalidArgumentError('minimum_log_level must be an int') if not minimum_log_level in range(LOG_LEVEL_CRITICAL+1): raise InvalidArgumentError("""minimum_log_level must be between 0 and 4 inclusive""") request.set_minimum_log_level(minimum_log_level) if not isinstance(include_incomplete, bool): raise InvalidArgumentError('include_incomplete must be a boolean') request.set_include_incomplete(include_incomplete) if not isinstance(include_app_logs, bool): raise InvalidArgumentError('include_app_logs must be a boolean') request.set_include_app_logs(include_app_logs) if version_ids is None: version_id = os.environ['CURRENT_VERSION_ID'] version_ids = [version_id.split('.')[0]] else: if not isinstance(version_ids, list): raise InvalidArgumentError('version_ids must be a list') for version_id in version_ids: if not _MAJOR_VERSION_ID_RE.match(version_id): raise InvalidArgumentError( 'version_ids must only contain valid major version identifiers') request.version_id_list()[:] = version_ids prototype_request = kwargs.get('prototype_request') if prototype_request: if not isinstance(prototype_request, log_service_pb.LogReadRequest): raise InvalidArgumentError('prototype_request must be a LogReadRequest') request.MergeFrom(prototype_request) timeout = kwargs.get('timeout') if timeout is not None: if not isinstance(timeout, (float, int, long)): raise InvalidArgumentError('timeout must be a float or integer') batch_size = kwargs.get('batch_size') if batch_size is not None: if not isinstance(batch_size, (int, long)): raise InvalidArgumentError('batch_size must be an integer') if batch_size < 1: raise InvalidArgumentError('batch_size must be greater than zero') if batch_size > MAX_ITEMS_PER_FETCH: raise InvalidArgumentError('batch_size specified is too large') request.set_count(batch_size) return _LogQueryResult(request, timeout=timeout)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/logservice/logservice.py/fetch
518
def test_proper_test_request_context(): app = flask.Flask(__name__) app.config.update( SERVER_NAME='localhost.localdomain:5000' ) @app.route('/') def index(): return None @app.route('/', subdomain='foo') def sub(): return None with app.test_request_context('/'): assert flask.url_for('index', _external=True) == \ 'http://localhost.localdomain:5000/' with app.test_request_context('/'): assert flask.url_for('sub', _external=True) == \ 'http://foo.localhost.localdomain:5000/' try: with app.test_request_context('/', environ_overrides={'HTTP_HOST': 'localhost'}): pass except __HOLE__ as e: assert str(e) == ( "the server name provided " "('localhost.localdomain:5000') does not match the " "server name from the WSGI environment ('localhost')" ) app.config.update(SERVER_NAME='localhost') with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost'}): pass app.config.update(SERVER_NAME='localhost:80') with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost:80'}): pass
ValueError
dataset/ETHPy150Open pallets/flask/tests/test_reqctx.py/test_proper_test_request_context
519
def get_version(): "Returns the version as a human-format string." version = '%d.%d.%d' % (version_info[:3]) # add revision info if not final version if version_info[3] != 'f': import os version = '%d.%d.%d%s%d' % version_info dir = os.path.abspath(os.path.dirname(__file__)) hg_dir = os.path.normpath(os.path.join(dir, '../')) if os.path.isdir(os.path.join(hg_dir, '.hg')): hg_rev = 'dev0' # unknown version try: from mercurial import ui, hg, error except __HOLE__: pass else: try: repo = hg.repository(ui.ui(), hg_dir) c = repo['tip'] hg_rev = 'dev%s' % (c.rev()) except error.RepoError: pass version = '%s.%s' % (version, hg_rev) return version
ImportError
dataset/ETHPy150Open slav0nic/djangobb/djangobb_forum/__init__.py/get_version
520
def expect(self, method=None, uri=None, params=None, headers=None): if method is not None: self.assertEqual(method, self.executor.request.method) if uri is not None: self.assertEqual(self.executor.request.uri, 'http://domain.uservoice.com/api/v1' + uri) if params is not None: try: params = tuple(params.items()) except __HOLE__: pass self.assertEqual(self.executor.request.params, params) if headers is not None: self.assertEqual(self.executor.request.headers, headers)
AttributeError
dataset/ETHPy150Open ducksboard/libsaas/test/test_uservoice.py/UserVoiceTestCase.expect
521
def get_model_mock(self): if not self.model or not self.model[0]: # no mock needed return {} try: return self.model[1] except __HOLE__: raise MockNotFound("no mock for %s" % self.name)
IndexError
dataset/ETHPy150Open priestc/giotto/giotto/programs/__init__.py/Program.get_model_mock
522
def get_suggestion(self, front_path): """ Returns suggestions for a path. Used in tab completion from the command line. """ if '/' in front_path: # transverse the manifest, return the new manifest, then # get those suggestions with the remaining word splitted = front_path.split('/') new_manifest = self.manifest pre_path = '' for item in splitted: try: new_manifest = new_manifest[item] except __HOLE__: partial_word = item break else: pre_path += item + '/' if isinstance(new_manifest, Program): return [] matches = new_manifest._get_suggestions(partial_word) return [pre_path + match for match in matches] else: return self._get_suggestions(front_path or None)
KeyError
dataset/ETHPy150Open priestc/giotto/giotto/programs/__init__.py/Manifest.get_suggestion
523
def completedir(dir, url, params = {}, flag = Event(), vc = lambda x: None, fc = lambda x: None): files = listdir(dir) files.sort() ext = '.torrent' if params.has_key('target'): target = params['target'] else: target = '' togen = [] for f in files: if f[-len(ext):] != ext and (f + ext) not in files: togen.append(join(dir, f)) total = 0 for i in togen: total += calcsize(i) subtotal = [0] def callback(x, subtotal = subtotal, total = total, vc = vc): subtotal[0] += x vc(float(subtotal[0]) / total) for i in togen: fc(i) try: t = split(i)[-1] if t not in ignore and t[0] != '.': if target != '': params['target'] = join(target,t+ext) make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0) except __HOLE__: print_exc()
ValueError
dataset/ETHPy150Open Cclleemm/FriendlyTorrent/src/tornado/BitTornado/BT1/makemetafile.py/completedir
524
def tag(self, alt='', use_size=None, **attrs): """ Return a standard XHTML ``<img ... />`` tag for this field. :param alt: The ``alt=""`` text for the tag. Defaults to ``''``. :param use_size: Whether to get the size of the thumbnail image for use in the tag attributes. If ``None`` (default), the size will only be used it if won't result in a remote file retrieval. All other keyword parameters are added as (properly escaped) extra attributes to the `img` tag. """ if use_size is None: if getattr(self, '_dimensions_cache', None): use_size = True else: try: self.storage.path(self.name) use_size = True except __HOLE__: use_size = False attrs['alt'] = alt attrs['src'] = self.url if use_size: attrs.update(dict(width=self.width, height=self.height)) attrs = ' '.join(['%s="%s"' % (key, escape(value)) for key, value in sorted(attrs.items())]) return mark_safe('<img %s />' % attrs)
NotImplementedError
dataset/ETHPy150Open SmileyChris/easy-thumbnails/easy_thumbnails/files.py/ThumbnailFile.tag
525
def generate_thumbnail(self, thumbnail_options, high_resolution=False, silent_template_exception=False): """ Return an unsaved ``ThumbnailFile`` containing a thumbnail image. The thumbnail image is generated using the ``thumbnail_options`` dictionary. """ thumbnail_options = self.get_options(thumbnail_options) orig_size = thumbnail_options['size'] # remember original size # Size sanity check. min_dim, max_dim = 0, 0 for dim in orig_size: try: dim = int(dim) except (TypeError, __HOLE__): continue min_dim, max_dim = min(min_dim, dim), max(max_dim, dim) if max_dim == 0 or min_dim < 0: raise exceptions.EasyThumbnailsError( "The source image is an invalid size (%sx%s)" % orig_size) if high_resolution: thumbnail_options['size'] = (orig_size[0] * 2, orig_size[1] * 2) image = engine.generate_source_image( self, thumbnail_options, self.source_generators, fail_silently=silent_template_exception) if image is None: raise exceptions.InvalidImageFormatError( "The source file does not appear to be an image") thumbnail_image = engine.process_image(image, thumbnail_options, self.thumbnail_processors) if high_resolution: thumbnail_options['size'] = orig_size # restore original size filename = self.get_thumbnail_name( thumbnail_options, transparent=utils.is_transparent(thumbnail_image), high_resolution=high_resolution) quality = thumbnail_options['quality'] subsampling = thumbnail_options['subsampling'] img = engine.save_image( thumbnail_image, filename=filename, quality=quality, subsampling=subsampling) data = img.read() thumbnail = ThumbnailFile( filename, file=ContentFile(data), storage=self.thumbnail_storage, thumbnail_options=thumbnail_options) thumbnail.image = thumbnail_image thumbnail._committed = False return thumbnail
ValueError
dataset/ETHPy150Open SmileyChris/easy-thumbnails/easy_thumbnails/files.py/Thumbnailer.generate_thumbnail
526
def _command(self, node, method, params, wait=False): url = self._get_command_url(node) body = self._get_command_body(method, params) request_params = { 'wait': str(wait).lower() } LOG.debug('Executing agent command %(method)s for node %(node)s', {'node': node.uuid, 'method': method}) try: response = self.session.post(url, params=request_params, data=body) except requests.RequestException as e: msg = (_('Error invoking agent command %(method)s for node ' '%(node)s. Error: %(error)s') % {'method': method, 'node': node.uuid, 'error': e}) LOG.error(msg) raise exception.IronicException(msg) # TODO(russellhaering): real error handling try: result = response.json() except __HOLE__: msg = _( 'Unable to decode response as JSON.\n' 'Request URL: %(url)s\nRequest body: "%(body)s"\n' 'Response status code: %(code)s\n' 'Response: "%(response)s"' ) % ({'response': response.text, 'body': body, 'url': url, 'code': response.status_code}) LOG.error(msg) raise exception.IronicException(msg) LOG.debug('Agent command %(method)s for node %(node)s returned ' 'result %(res)s, error %(error)s, HTTP status code %(code)d', {'node': node.uuid, 'method': method, 'res': result.get('command_result'), 'error': result.get('command_error'), 'code': response.status_code}) return result
ValueError
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/agent_client.py/AgentClient._command
527
def save_image(image, destination=None, filename=None, **options): """ Save a PIL image. """ if destination is None: destination = BytesIO() filename = filename or '' # Ensure plugins are fully loaded so that Image.EXTENSION is populated. Image.init() format = Image.EXTENSION.get(os.path.splitext(filename)[1].lower(), 'JPEG') if format in ('JPEG', 'WEBP'): options.setdefault('quality', 85) saved = False if format == 'JPEG': if settings.THUMBNAIL_PROGRESSIVE and ( max(image.size) >= settings.THUMBNAIL_PROGRESSIVE): options['progressive'] = True try: image.save(destination, format=format, optimize=1, **options) saved = True except __HOLE__: # Try again, without optimization (PIL can't optimize an image # larger than ImageFile.MAXBLOCK, which is 64k by default). This # shouldn't be triggered very often these days, as recent versions # of pillow avoid the MAXBLOCK limitation. pass if not saved: image.save(destination, format=format, **options) if hasattr(destination, 'seek'): destination.seek(0) return destination
IOError
dataset/ETHPy150Open SmileyChris/easy-thumbnails/easy_thumbnails/engine.py/save_image
528
def test_updates_add(self): up1 = OrderedUpdates() up2 = OrderedUpdates() a = theano.shared('a') b = theano.shared('b') assert not up1 + up2 up1[a] = 5 # test that addition works assert up1 assert up1 + up2 assert not up2 assert len(up1 + up2) == 1 assert (up1 + up2)[a] == 5 up2[b] = 7 assert up1 assert up1 + up2 assert up2 assert len(up1 + up2) == 2 assert (up1 + up2)[a] == 5 assert (up1 + up2)[b] == 7 assert a in (up1 + up2) assert b in (up1 + up2) # this works even though there is a collision # because values all match assert len(up1 + up1 + up1) == 1 up2[a] = 8 # a gets different value in up1 and up2 try: up1 + up2 assert 0 except __HOLE__: pass # reassigning to a key works fine right? up2[a] = 10
KeyError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/tests/test_updates.py/test_ifelse.test_updates_add
529
def __init__(self, *args, **kwargs): kwargs['expose_request'] = kwargs.get('expose_request', True) try: tz = conf.settings.AMF_TIME_OFFSET except __HOLE__: tz = None try: debug = conf.settings.DEBUG except AttributeError: debug = False kwargs['timezone_offset'] = kwargs.get('timezone_offset', tz) kwargs['debug'] = kwargs.get('debug', debug) gateway.BaseGateway.__init__(self, *args, **kwargs)
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/remoting/gateway/django.py/DjangoGateway.__init__
530
def __call__(self, http_request): """ Processes and dispatches the request. """ if http_request.method != 'POST': return http.HttpResponseNotAllowed(['POST']) stream = None timezone_offset = self._get_timezone_offset() # Decode the request try: request = remoting.decode(http_request.raw_post_data, strict=self.strict, logger=self.logger, timezone_offset=timezone_offset) except (pyamf.DecodeError, IOError): if self.logger: self.logger.exception('Error decoding AMF request') response = ("400 Bad Request\n\nThe request body was unable to " "be successfully decoded.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() # support for Django 0.96 http_response = http.HttpResponse(mimetype='text/plain', content=response) http_response.status_code = 400 return http_response except (KeyboardInterrupt, SystemExit): raise except: if self.logger: self.logger.exception('Unexpected error decoding AMF request') response = ('500 Internal Server Error\n\n' 'An unexpected error occurred.') if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() return http.HttpResponseServerError(mimetype='text/plain', content=response) if self.logger: self.logger.debug("AMF Request: %r" % request) # Process the request try: response = self.getResponse(http_request, request) except (KeyboardInterrupt, __HOLE__): raise except: if self.logger: self.logger.exception('Error processing AMF request') response = ("500 Internal Server Error\n\nThe request was " "unable to be successfully processed.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() return http.HttpResponseServerError(mimetype='text/plain', content=response) if self.logger: self.logger.debug("AMF Response: %r" % response) # Encode the response try: stream = remoting.encode(response, strict=self.strict, logger=self.logger, timezone_offset=timezone_offset) except: if self.logger: self.logger.exception('Error encoding AMF request') response = ("500 Internal Server Error\n\nThe request was " "unable to be encoded.") if self.debug: response += "\n\nTraceback:\n\n%s" % gateway.format_exception() return http.HttpResponseServerError( mimetype='text/plain', content=response) buf = stream.getvalue() http_response = http.HttpResponse(mimetype=remoting.CONTENT_TYPE) http_response['Server'] = gateway.SERVER_NAME http_response['Content-Length'] = str(len(buf)) http_response.write(buf) return http_response
SystemExit
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/remoting/gateway/django.py/DjangoGateway.__call__
531
def __init__(self, project=None, credentials=None, read_only=False, admin=False, user_agent=DEFAULT_USER_AGENT, timeout_seconds=DEFAULT_TIMEOUT_SECONDS): _ClientProjectMixin.__init__(self, project=project) if credentials is None: credentials = get_credentials() if read_only and admin: raise ValueError('A read-only client cannot also perform' 'administrative actions.') scopes = [] if read_only: scopes.append(READ_ONLY_SCOPE) else: scopes.append(DATA_SCOPE) if admin: scopes.append(ADMIN_SCOPE) self._admin = bool(admin) try: credentials = credentials.create_scoped(scopes) except __HOLE__: pass self._credentials = credentials self.user_agent = user_agent self.timeout_seconds = timeout_seconds # These will be set in start(). self._data_stub_internal = None self._cluster_stub_internal = None self._operations_stub_internal = None self._table_stub_internal = None
AttributeError
dataset/ETHPy150Open GoogleCloudPlatform/gcloud-python/gcloud/bigtable/client.py/Client.__init__
532
def connect_rpc(ctx, param, value): if not value: return try: from six.moves import xmlrpc_client except __HOLE__: import xmlrpclib as xmlrpc_client return xmlrpc_client.ServerProxy(value, allow_none=True)
ImportError
dataset/ETHPy150Open binux/pyspider/pyspider/run.py/connect_rpc
533
@cli.command() @click.option('--phantomjs-path', default='phantomjs', help='phantomjs path') @click.option('--port', default=25555, help='phantomjs port') @click.option('--auto-restart', default=False, help='auto restart phantomjs if crashed') @click.argument('args', nargs=-1) @click.pass_context def phantomjs(ctx, phantomjs_path, port, auto_restart, args): """ Run phantomjs fetcher if phantomjs is installed. """ args = args or ctx.default_map and ctx.default_map.get('args', []) import subprocess g = ctx.obj _quit = [] phantomjs_fetcher = os.path.join( os.path.dirname(pyspider.__file__), 'fetcher/phantomjs_fetcher.js') cmd = [phantomjs_path, # this may cause memory leak: https://github.com/ariya/phantomjs/issues/12903 #'--load-images=false', '--ssl-protocol=any', '--disk-cache=true'] + list(args or []) + [phantomjs_fetcher, str(port)] try: _phantomjs = subprocess.Popen(cmd) except __HOLE__: logging.warning('phantomjs not found, continue running without it.') return None def quit(*args, **kwargs): _quit.append(1) _phantomjs.kill() _phantomjs.wait() logging.info('phantomjs existed.') if not g.get('phantomjs_proxy'): g['phantomjs_proxy'] = '127.0.0.1:%s' % port phantomjs = utils.ObjectDict(port=port, quit=quit) g.instances.append(phantomjs) if g.get('testing_mode'): return phantomjs while True: _phantomjs.wait() if _quit or not auto_restart: break _phantomjs = subprocess.Popen(cmd)
OSError
dataset/ETHPy150Open binux/pyspider/pyspider/run.py/phantomjs
534
def main(): p = OptionParser() options, args = p.parse_args() if len(args) != 1: p.error("no valid directory given") inp = args[0] outp = inp + ".npz" files = [] for dirpath, dirnames, filenames in os.walk(inp): for fn in filenames: if fn.endswith('.txt'): files.append( (dirpath[len(inp)+1:] + '/' + fn[:-4], os.path.join(dirpath, fn))) data = {} for key, fn in files: key = key.replace('/', '-').strip('-') try: data[key] = np.loadtxt(fn) except __HOLE__: print("Failed to load", fn) savez_compress(outp, **data)
ValueError
dataset/ETHPy150Open scipy/scipy/scipy/special/utils/makenpz.py/main
535
def load_translations(locale): """Load the translation for a locale. If a locale does not exist the return value a fake translation object. """ from werkzeug.utils import import_string from kay.i18n.translations import KayTranslations domain = "messages" ret = KayTranslations.load(utils.get_kay_locale_path(), locale, domain) def _merge(path): t = KayTranslations.load(path, locale, domain) if t is not None: if ret is None: return t elif isinstance(ret, KayTranslations): ret.merge(t) return ret try: installed_apps = local.app.app_settings.INSTALLED_APPS except AttributeError: installed_apps = settings.INSTALLED_APPS for appname in installed_apps: app = import_string(appname) apppath = os.path.join(os.path.dirname(app.__file__), 'i18n') if os.path.isdir(apppath): ret = _merge(apppath) # Add I18N_DIR try: target = os.path.join(kay.PROJECT_DIR, local.app.app_settings.I18N_DIR) if os.path.isdir(target): ret = _merge(target) except __HOLE__: pass return ret
AttributeError
dataset/ETHPy150Open IanLewis/kay/kay/i18n/__init__.py/load_translations
536
def parse_datetime(string, rebase=True): """Parses a string into a datetime object. Per default a conversion from the blog timezone to UTC is performed but returned as naive datetime object (that is tzinfo being None). If rebasing is disabled the string is expected in UTC. The return value is **always** a naive datetime object in UTC. This function should be considered of a lenient counterpart of `format_system_datetime`. """ from datetime import datetime from time import strptime from kay.utils import to_utc # shortcut: string as None or "now" or the current locale's # equivalent returns the current timestamp. if string is None or string.lower() in ('now', _('now')): return datetime.utcnow().replace(microsecond=0) def convert(format): """Helper that parses the string and convers the timezone.""" rv = datetime(*strptime(string, format)[:7]) if rebase: rv = to_utc(rv) return rv.replace(microsecond=0) # first of all try the following format because this is the format # Texpress will output by default for any date time string in the # administration panel. try: return convert(u'%Y-%m-%d %H:%M') except __HOLE__: pass # no go with time only, and current day for fmt in TIME_FORMATS: try: val = convert(fmt) except ValueError: continue return to_utc(datetime.utcnow().replace(hour=val.hour, minute=val.minute, second=val.second, microsecond=0)) # no try various types of date + time strings def combined(): for t_fmt in TIME_FORMATS: for d_fmt in DATE_FORMATS: yield t_fmt + ' ' + d_fmt yield d_fmt + ' ' + t_fmt for fmt in combined(): try: return convert(fmt) except ValueError: pass raise ValueError('invalid date format')
ValueError
dataset/ETHPy150Open IanLewis/kay/kay/i18n/__init__.py/parse_datetime
537
def test_formset_iteration(self): # Regression tests for #16455 -- formset instances are iterable ChoiceFormset = formset_factory(Choice, extra=3) formset = ChoiceFormset() # confirm iterated formset yields formset.forms forms = list(formset) self.assertEqual(forms, formset.forms) self.assertEqual(len(formset), len(forms)) # confirm indexing of formset self.assertEqual(formset[0], forms[0]) try: formset[3] self.fail('Requesting an invalid formset index should raise an exception') except __HOLE__: pass # Formets can override the default iteration order class BaseReverseFormSet(BaseFormSet): def __iter__(self): return reversed(self.forms) def __getitem__(self, idx): return super(BaseReverseFormSet, self).__getitem__(len(self) - idx - 1) ReverseChoiceFormset = formset_factory(Choice, BaseReverseFormSet, extra=3) reverse_formset = ReverseChoiceFormset() # confirm that __iter__ modifies rendering order # compare forms from "reverse" formset with forms from original formset self.assertEqual(str(reverse_formset[0]), str(forms[-1])) self.assertEqual(str(reverse_formset[1]), str(forms[-2])) self.assertEqual(len(reverse_formset), len(forms))
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/tests/regressiontests/forms/tests/formsets.py/FormsFormsetTestCase.test_formset_iteration
538
def mkdir_p(path): """Emulates `mkdir -p` behavior.""" try: makedirs(path) except __HOLE__ as exc: # Python >2.5 if exc.errno == errno.EEXIST: pass else: raise
OSError
dataset/ETHPy150Open kennethreitz/clint/clint/utils.py/mkdir_p
539
def runscript(mainpyfile, args=None, pre_run="", steal_output=False): dbg = _get_debugger(steal_output=steal_output) # Note on saving/restoring sys.argv: it's a good idea when sys.argv was # modified by the script being debugged. It's a bad idea when it was # changed by the user from the command line. The best approach would be to # have a "restart" command which would allow explicit specification of # command line arguments. import sys if args is not None: prev_sys_argv = sys.argv[:] sys.argv = [mainpyfile] + args # replace pudb's dir with script's dir in front of module search path. from os.path import dirname prev_sys_path = sys.path[:] sys.path[0] = dirname(mainpyfile) while True: if pre_run: from subprocess import call retcode = call(pre_run, close_fds=True, shell=True) if retcode: print("*** WARNING: pre-run process exited with code %d." % retcode) raw_input("[Hit Enter]") status_msg = "" try: dbg._runscript(mainpyfile) except __HOLE__: se = sys.exc_info()[1] status_msg = "The debuggee exited normally with " \ "status code %s.\n\n" % se.code except: dbg.post_mortem = True dbg.interaction(None, sys.exc_info()) while True: import urwid pre_run_edit = urwid.Edit("", pre_run) if not CONFIG["prompt_on_quit"]: return result = dbg.ui.call_with_ui(dbg.ui.dialog, urwid.ListBox(urwid.SimpleListWalker([urwid.Text( "Your PuDB session has ended.\n\n%s" "Would you like to quit PuDB or restart your program?\n" "You may hit 'q' to quit." % status_msg), urwid.Text("\n\nIf you decide to restart, this command " "will be run prior to actually restarting:"), urwid.AttrMap(pre_run_edit, "value") ])), [ ("Restart", "restart"), ("Examine", "examine"), ("Quit", "quit"), ], focus_buttons=True, bind_enter_esc=False, title="Finished", extra_bindings=[ ("q", "quit"), ("esc", "examine"), ]) if result == "quit": return if result == "examine": dbg.post_mortem = True dbg.interaction(None, sys.exc_info(), show_exc_dialog=False) if result == "restart": break pre_run = pre_run_edit.get_edit_text() dbg.restart() if args is not None: sys.argv = prev_sys_argv sys.path = prev_sys_path
SystemExit
dataset/ETHPy150Open inducer/pudb/pudb/__init__.py/runscript
540
def set_interrupt_handler(interrupt_signal=DEFAULT_SIGNAL): """ Set up an interrupt handler, to activate PuDB when Python receives the signal `interrupt_signal`. By default it is SIGINT (i.e., Ctrl-c). To use a different signal, pass it as the argument to this function, like `set_interrupt_handler(signal.SIGALRM)`. You can then break your code with `kill -ALRM pid`, where `pid` is the process ID of the Python process. Note that PuDB will still use SIGINT once it is running to allow breaking running code. If that is an issue, you can change the default signal by hooking `pudb.DEFAULT_SIGNAL`, like >>> import pudb >>> import signal >>> pudb.DEFAULT_SIGNAL = signal.SIGALRM Note, this may not work if you use threads or subprocesses. """ import signal old_handler = signal.getsignal(interrupt_signal) if old_handler is not signal.default_int_handler \ and old_handler != signal.SIG_DFL and old_handler != _interrupt_handler: # Since we don't currently have support for a non-default signal handlers, # let's avoid undefined-behavior territory and just show a warning. from warnings import warn if old_handler is None: # This is the documented meaning of getsignal()->None. old_handler = 'not installed from python' return warn("A non-default handler for signal %d is already installed (%s). " "Skipping pudb interrupt support." % (interrupt_signal, old_handler)) try: signal.signal(interrupt_signal, _interrupt_handler) except __HOLE__: from pudb.lowlevel import format_exception import sys from warnings import warn warn("setting interrupt handler on signal %d failed: %s" % (interrupt_signal, "".join(format_exception(sys.exc_info()))))
ValueError
dataset/ETHPy150Open inducer/pudb/pudb/__init__.py/set_interrupt_handler
541
def pm(): import sys try: e_type = sys.last_type e_value = sys.last_value tb = sys.last_traceback except __HOLE__: ## No exception on record. Do nothing. return post_mortem(tb, e_type, e_value)
AttributeError
dataset/ETHPy150Open inducer/pudb/pudb/__init__.py/pm
542
def longzip(a, b): """Like `izip` but yields `None` for missing items.""" aiter = iter(a) biter = iter(b) try: for item1 in aiter: yield item1, biter.next() except __HOLE__: for item1 in aiter: yield item1, None else: for item2 in biter: yield None, item2
StopIteration
dataset/ETHPy150Open mitsuhiko/solace/solace/utils/formatting.py/longzip
543
def get_batch_file(self): try: return self._cache['batch_file'] except __HOLE__: batch_file = self.find_batch_file() self._cache['batch_file'] = batch_file return batch_file
KeyError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/MSCommon/vs.py/VisualStudio.get_batch_file
544
def get_executable(self): try: debug('get_executable using cache:%s'%self._cache['executable']) return self._cache['executable'] except __HOLE__: executable = self.find_executable() self._cache['executable'] = executable debug('get_executable not in cache:%s'%executable) return executable
KeyError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/MSCommon/vs.py/VisualStudio.get_executable
545
def get_vs_dir(self): try: return self._cache['vs_dir'] except __HOLE__: vs_dir = self.find_vs_dir() self._cache['vs_dir'] = vs_dir return vs_dir
KeyError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/MSCommon/vs.py/VisualStudio.get_vs_dir
546
def get_supported_arch(self): try: return self._cache['supported_arch'] except __HOLE__: # RDEVE: for the time being use hardcoded lists # supported_arch = self.find_supported_arch() self._cache['supported_arch'] = self.supported_arch return self.supported_arch
KeyError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/MSCommon/vs.py/VisualStudio.get_supported_arch
547
def db_type(self, connection): """ Returns the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific DATA_TYPES dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # an XMLField is represented by a TEXT column type, which is the same # as the TextField Django field type, which means XMLField's # get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: return connection.creation.data_types[self.get_internal_type()] % data except __HOLE__: return None
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/Field.db_type
548
def get_prep_lookup(self, lookup_type, value): "Perform preliminary non-db specific lookup checks and conversions" if hasattr(value, 'prepare'): return value.prepare() if hasattr(value, '_prepare'): return value._prepare() if lookup_type in ( 'regex', 'iregex', 'month', 'day', 'week_day', 'search', 'contains', 'icontains', 'iexact', 'startswith', 'istartswith', 'endswith', 'iendswith', 'isnull' ): return value elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'): return self.get_prep_value(value) elif lookup_type in ('range', 'in'): return [self.get_prep_value(v) for v in value] elif lookup_type == 'year': try: return int(value) except __HOLE__: raise ValueError("The __year lookup type requires an integer argument") raise TypeError("Field has invalid lookup: %s" % lookup_type)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/Field.get_prep_lookup
549
def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, __HOLE__): raise exceptions.ValidationError(self.error_messages['invalid'])
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/AutoField.to_python
550
def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value if not ansi_date_re.search(value): raise exceptions.ValidationError(self.error_messages['invalid']) # Now that we have the date string in YYYY-MM-DD format, check to make # sure it's a valid date. # We could use time.strptime here and catch errors, but datetime.date # produces much friendlier error messages. year, month, day = map(int, value.split('-')) try: return datetime.date(year, month, day) except __HOLE__, e: msg = self.error_messages['invalid_date'] % _(str(e)) raise exceptions.ValidationError(msg)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/DateField.to_python
551
def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): return datetime.datetime(value.year, value.month, value.day) # Attempt to parse a datetime: value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], **kwargs) except ValueError: # Try without hour/minutes/seconds. try: return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], **kwargs) except __HOLE__: raise exceptions.ValidationError(self.error_messages['invalid'])
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/DateTimeField.to_python
552
def to_python(self, value): if value is None: return value try: return float(value) except (__HOLE__, ValueError): raise exceptions.ValidationError(self.error_messages['invalid'])
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/FloatField.to_python
553
def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, __HOLE__): raise exceptions.ValidationError(self.error_messages['invalid'])
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/IntegerField.to_python
554
def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() # Attempt to parse a datetime: value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.time(*time.strptime(value, '%H:%M:%S')[3:6], **kwargs) except __HOLE__: try: # Try without seconds. return datetime.time(*time.strptime(value, '%H:%M')[3:5], **kwargs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid'])
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/db/models/fields/__init__.py/TimeField.to_python
555
def checkElements(val, modelDocument, parent): isSchema = modelDocument.type == ModelDocument.Type.SCHEMA if isinstance(parent, ModelObject): parentXlinkType = parent.get("{http://www.w3.org/1999/xlink}type") isInstance = parent.namespaceURI == XbrlConst.xbrli and parent.localName == "xbrl" parentIsLinkbase = parent.namespaceURI == XbrlConst.link and parent.localName == "linkbase" parentIsSchema = parent.namespaceURI == XbrlConst.xsd and parent.localName == "schema" if isInstance or parentIsLinkbase: # only for non-inline instance val.roleRefURIs = {} # uses ixdsRoleRefURIs when inline instance (across all target documents) val.arcroleRefURIs = {} def linkbaseTopElts(): for refPass in (True, False): # do roleType and arcroleType before extended links and any other children for child in parent.iterchildren(): if refPass == (isinstance(child,ModelObject) and child.localName in ("roleRef","arcroleRef") and child.namespaceURI == XbrlConst.link): yield child childrenIter = linkbaseTopElts() else: childrenIter = parent.iterchildren() else: # parent is document node, not an element parentXlinkType = None isInstance = False parentIsLinkbase = False childrenIter = (parent.getroot(),) if isSchema: val.inSchemaTop = True parentIsAppinfo = False if modelDocument.type == ModelDocument.Type.INLINEXBRL: if isinstance(parent,ModelObject): # element if (parent.localName == "meta" and parent.namespaceURI == XbrlConst.xhtml and (parent.get("http-equiv") or "").lower() == "content-type"): val.metaContentTypeEncoding = HtmlUtil.attrValue(parent.get("content"), "charset") elif isinstance(parent,etree._ElementTree): # documentNode val.documentTypeEncoding = modelDocument.documentEncoding # parent.docinfo.encoding val.metaContentTypeEncoding = "" instanceOrder = 0 if modelDocument.type == ModelDocument.Type.SCHEMA: ncnameTests = (("id","xbrl:xmlElementId"), ("name","xbrl.5.1.1:conceptName")) else: ncnameTests = (("id","xbrl:xmlElementId"),) for elt in childrenIter: if isinstance(elt,ModelObject): for name, errCode in ncnameTests: if elt.get(name) is not None: attrValue = elt.get(name) ''' done in XmlValidate now if not val.NCnamePattern.match(attrValue): val.modelXbrl.error(errCode, _("Element %(element)s attribute %(attribute)s '%(value)s' is not an NCname"), modelObject=elt, element=elt.prefixedName, attribute=name, value=attrValue) ''' if name == "id" and attrValue in val.elementIDs: # 2.1 spec @id validation refers to http://www.w3.org/TR/REC-xml#NT-TokenizedType # TODO: this check should not test inline elements, those should be in ModelDocument inlineIxdsDiscover using ixdsEltById val.modelXbrl.error("xml.3.3.1:idMustBeUnique", _("Element %(element)s id %(value)s is duplicated"), modelObject=elt, element=elt.prefixedName, attribute=name, value=attrValue) val.elementIDs.add(attrValue) # checks for elements in schemas only if isSchema: if elt.namespaceURI == XbrlConst.xsd: localName = elt.localName if localName == "schema": XmlValidate.validate(val.modelXbrl, elt) targetNamespace = elt.get("targetNamespace") if targetNamespace is not None: if targetNamespace == "": val.modelXbrl.error("xbrl.5.1:emptyTargetNamespace", "Schema element has an empty targetNamespace", modelObject=elt) if val.validateEFM and len(targetNamespace) > 85: l = len(targetNamespace.encode("utf-8")) if l > 255: val.modelXbrl.error("EFM.6.07.30", _("Schema targetNamespace length (%(length)s) is over 255 bytes long in utf-8 %(targetNamespace)s"), modelObject=elt, length=l, targetNamespace=targetNamespace, value=targetNamespace) if val.validateSBRNL: if elt.get("targetNamespace") is None: val.modelXbrl.error("SBR.NL.2.2.0.08", _('Schema element must have a targetNamespace attribute'), modelObject=elt) if (elt.get("attributeFormDefault") != "unqualified" or elt.get("elementFormDefault") != "qualified"): val.modelXbrl.error("SBR.NL.2.2.0.09", _('Schema element attributeFormDefault must be "unqualified" and elementFormDefault must be "qualified"'), modelObject=elt) for attrName in ("blockDefault", "finalDefault", "version"): if elt.get(attrName) is not None: val.modelXbrl.error("SBR.NL.2.2.0.10", _('Schema element must not have a %(attribute)s attribute'), modelObject=elt, attribute=attrName) elif val.validateSBRNL: if localName in ("assert", "openContent", "fallback"): val.modelXbrl.error("SBR.NL.2.2.0.01", _('Schema contains XSD 1.1 content "%(element)s"'), modelObject=elt, element=elt.qname) if localName == "element": for attr, presence, errCode in (("block", False, "2.2.2.09"), ("final", False, "2.2.2.10"), ("fixed", False, "2.2.2.11"), ("form", False, "2.2.2.12"),): if (elt.get(attr) is not None) != presence: val.modelXbrl.error("SBR.NL.{0}".format(errCode), _('Schema element %(concept)s %(requirement)s contain attribute %(attribute)s'), modelObject=elt, concept=elt.get("name"), requirement=(_("MUST NOT"),_("MUST"))[presence], attribute=attr, messageCodes=("SBR.NL.2.2.2.09", "SBR.NL.2.2.2.10", "SBR.NL.2.2.2.11", "SBR.NL.2.2.2.12")) eltName = elt.get("name") if eltName is not None: # skip for concepts which are refs type = qname(elt, elt.get("type")) eltQname = elt.qname if type in xsd1_1datatypes: val.modelXbrl.error("SBR.NL.2.2.0.01", _('Schema element %(concept)s contains XSD 1.1 datatype "%(xsdType)s"'), modelObject=elt, concept=elt.get("name"), xsdType=type) if not parentIsSchema: # root element if elt.get("name") is not None and (elt.isItem or elt.isTuple): val.modelXbrl.error("SBR.NL.2.2.2.01", _('Schema concept definition is not at the root level: %(concept)s'), modelObject=elt, concept=elt.get("name")) elif eltQname not in val.typedDomainQnames: for attr, presence, errCode in (("abstract", True, "2.2.2.08"), ("id", True, "2.2.2.13"), ("nillable", True, "2.2.2.15"), ("substitutionGroup", True, "2.2.2.18"),): if (elt.get(attr) is not None) != presence: val.modelXbrl.error("SBR.NL.{0}".format(errCode), _('Schema root element %(concept)s %(requirement)s contain attribute %(attribute)s'), modelObject=elt, concept=elt.get("name"), requirement=(_("MUST NOT"),_("MUST"))[presence], attribute=attr, messageCodes=("SBR.NL.2.2.2.08", "SBR.NL.2.2.2.13", "SBR.NL.2.2.2.15", "SBR.NL.2.2.2.18")) # semantic checks if elt.isTuple: val.hasTuple = True elif elt.isLinkPart: val.hasLinkPart = True elif elt.isItem: if elt.isDimensionItem: val.hasDimension = True #elif elt.substitutesFor() if elt.isAbstract: val.hasAbstractItem = True else: val.hasNonAbstraceElement = True if elt.isAbstract and elt.isItem: val.hasAbstractItem = True if elt.typeQname is not None: val.referencedNamespaces.add(elt.typeQname.namespaceURI) if elt.substitutionGroupQname is not None: val.referencedNamespaces.add(elt.substitutionGroupQname.namespaceURI) if elt.isTypedDimension and elt.typedDomainElement is not None: val.referencedNamespaces.add(elt.typedDomainElement.namespaceURI) else: referencedElt = elt.dereference() if referencedElt is not None: val.referencedNamespaces.add(referencedElt.modelDocument.targetNamespace) if not parentIsSchema: eltDecl = elt.dereference() if (elt.get("minOccurs") is None or elt.get("maxOccurs") is None): val.modelXbrl.error("SBR.NL.2.2.2.14", _('Schema %(element)s must have minOccurs and maxOccurs'), modelObject=elt, element=eltDecl.qname) elif elt.get("maxOccurs") != "1" and eltDecl.isItem: val.modelXbrl.error("SBR.NL.2.2.2.30", _("Tuple concept %(concept)s must have maxOccurs='1'"), modelObject=elt, concept=eltDecl.qname) if eltDecl.isItem and eltDecl.isAbstract: val.modelXbrl.error("SBR.NL.2.2.2.31", _("Abstract concept %(concept)s must not be a child of a tuple"), modelObject=elt, concept=eltDecl.qname) elif localName in ("sequence","choice"): for attrName in ("minOccurs", "maxOccurs"): attrValue = elt.get(attrName) if attrValue is None: val.modelXbrl.error("SBR.NL.2.2.2.14", _('Schema %(element)s must have %(attrName)s'), modelObject=elt, element=elt.elementQname, attrName=attrName) elif attrValue != "1": val.modelXbrl.error("SBR.NL.2.2.2.33", _('Schema %(element)s must have %(attrName)s = "1"'), modelObject=elt, element=elt.elementQname, attrName=attrName) elif localName in {"complexType","simpleType"}: qnameDerivedFrom = elt.qnameDerivedFrom if qnameDerivedFrom is not None: if isinstance(qnameDerivedFrom, list): # union for qn in qnameDerivedFrom: val.referencedNamespaces.add(qn.namespaceURI) else: # not union type val.referencedNamespaces.add(qnameDerivedFrom.namespaceURI) elif localName == "attribute": if elt.typeQname is not None: val.referencedNamespaces.add(elt.typeQname.namespaceURI) if localName == "redefine": val.modelXbrl.error("xbrl.5.6.1:Redefine", "Redefine is not allowed", modelObject=elt) if localName in {"attribute", "element", "attributeGroup"}: ref = elt.get("ref") if ref is not None: if qname(elt, ref) not in {"attribute":val.modelXbrl.qnameAttributes, "element":val.modelXbrl.qnameConcepts, "attributeGroup":val.modelXbrl.qnameAttributeGroups}[localName]: val.modelXbrl.error("xmlSchema:refNotFound", _("%(element)s ref %(ref)s not found"), modelObject=elt, element=localName, ref=ref) if val.validateSBRNL and localName == "attribute": val.modelXbrl.error("SBR.NL.2.2.11.06", _('xs:attribute must not be used'), modelObject=elt) if localName == "appinfo": if val.validateSBRNL: if (parent.localName != "annotation" or parent.namespaceURI != XbrlConst.xsd or parent.getparent().localName != "schema" or parent.getparent().namespaceURI != XbrlConst.xsd or XmlUtil.previousSiblingElement(parent) != None): val.modelXbrl.error("SBR.NL.2.2.0.12", _('Annotation/appinfo record must be be behind schema and before import'), modelObject=elt) nextSiblingElement = XmlUtil.nextSiblingElement(parent) if nextSiblingElement is not None and nextSiblingElement.localName != "import": val.modelXbrl.error("SBR.NL.2.2.0.14", _('Annotation/appinfo record must be followed only by import'), modelObject=elt) if localName == "annotation": val.annotationsCount += 1 if val.validateSBRNL and not XmlUtil.hasChild(elt,XbrlConst.xsd,"appinfo"): val.modelXbrl.error("SBR.NL.2.2.0.12", _('Schema file annotation missing appinfo element must be be behind schema and before import'), modelObject=elt) if val.validateEFM and localName in {"element", "complexType", "simpleType"}: name = elt.get("name") if name and len(name) > 64: l = len(name.encode("utf-8")) if l > 200: val.modelXbrl.error("EFM.6.07.29", _("Schema %(element)s has a name length (%(length)s) over 200 bytes long in utf-8, %(name)s."), modelObject=elt, element=localName, name=name, length=l) if val.validateSBRNL and localName in {"all", "documentation", "any", "anyAttribute", "attributeGroup", # comment out per R.H. 2011-11-16 "complexContent", "complexType", "extension", "field", "group", "key", "keyref", "list", "notation", "redefine", "selector", "unique"}: val.modelXbrl.error("SBR.NL.2.2.11.{0:02}".format({"all":1, "documentation":2, "any":3, "anyAttribute":4, "attributeGroup":7, "complexContent":10, "complexType":11, "extension":12, "field":13, "group":14, "key":15, "keyref":16, "list":17, "notation":18, "redefine":20, "selector":22, "unique":23}[localName]), _('Schema file element must not be used "%(element)s"'), modelObject=elt, element=elt.qname, messageCodes=("SBR.NL.2.2.11.1", "SBR.NL.2.2.11.2", "SBR.NL.2.2.11.3", "SBR.NL.2.2.11.4", "SBR.NL.2.2.11.7", "SBR.NL.2.2.11.10", "SBR.NL.2.2.11.11", "SBR.NL.2.2.11.12", "SBR.NL.2.2.11.13", "SBR.NL.2.2.11.14", "SBR.NL.2.2.11.15", "SBR.NL.2.2.11.16", "SBR.NL.2.2.11.17", "SBR.NL.2.2.11.18", "SBR.NL.2.2.11.20", "SBR.NL.2.2.11.22", "SBR.NL.2.2.11.23")) if val.inSchemaTop: if localName in schemaBottom: val.inSchemaTop = False elif localName in schemaTop: val.modelXbrl.error("xmlschema.3.4.2:contentModel", _("Element %(element)s is mis-located in schema file"), modelObject=elt, element=elt.prefixedName) # check schema roleTypes if elt.localName in ("roleType","arcroleType") and elt.namespaceURI == XbrlConst.link: uriAttr, xbrlSection, roleTypes, localRoleTypes = { "roleType":("roleURI","5.1.3",val.modelXbrl.roleTypes, val.schemaRoleTypes), "arcroleType":("arcroleURI","5.1.4",val.modelXbrl.arcroleTypes, val.schemaArcroleTypes) }[elt.localName] if not parent.localName == "appinfo" and parent.namespaceURI == XbrlConst.xsd: val.modelXbrl.error("xbrl.{0}:{1}Appinfo".format(xbrlSection,elt.localName), _("%(element)s not child of xsd:appinfo"), modelObject=elt, element=elt.qname, messageCodes=("xbrl.5.1.3:roleTypeAppinfo", "xbrl.5.1.4:arcroleTypeAppinfo")) else: # parent is appinfo, element IS in the right location XmlValidate.validate(val.modelXbrl, elt) # validate [arc]roleType roleURI = elt.get(uriAttr) if roleURI is None or not UrlUtil.isValid(roleURI): val.modelXbrl.error("xbrl.{0}:{1}Missing".format(xbrlSection,uriAttr), _("%(element)s missing or invalid %(attribute)s"), modelObject=elt, element=elt.qname, attribute=uriAttr, messageCodes=("xbrl.5.1.3:roleTypeMissing", "xbrl.5.1.4:arcroleTypeMissing")) if roleURI in localRoleTypes: val.modelXbrl.error("xbrl.{0}:{1}Duplicate".format(xbrlSection,elt.localName), _("Duplicate %(element)s %(attribute)s %(roleURI)s"), modelObject=elt, element=elt.qname, attribute=uriAttr, roleURI=roleURI, messageCodes=("xbrl.5.1.3:roleTypeDuplicate", "xbrl.5.1.4:arcroleTypeDuplicate")) else: localRoleTypes[roleURI] = elt for otherRoleType in roleTypes[roleURI]: if elt != otherRoleType and not XbrlUtil.sEqual(val.modelXbrl, elt, otherRoleType): val.modelXbrl.error("xbrl.{0}:{1}s-inequality".format(xbrlSection,elt.localName), _("%(element)s %(roleURI)s not s-equal in %(otherSchema)s"), modelObject=elt, element=elt.qname, roleURI=roleURI, otherSchema=otherRoleType.modelDocument.basename, messageCodes=("xbrl.5.1.3:roleTypes-inequality", "xbrl.5.1.4:arcroleTypes-inequality")) if elt.localName == "arcroleType": cycles = elt.get("cyclesAllowed") if cycles not in ("any", "undirected", "none"): val.modelXbrl.error("xbrl.{0}:{1}CyclesAllowed".format(xbrlSection,elt.localName), _("%(element)s %(roleURI)s invalid cyclesAllowed %(value)s"), modelObject=elt, element=elt.qname, roleURI=roleURI, value=cycles, messageCodes=("xbrl.5.1.3:roleTypeCyclesAllowed", "xbrl.5.1.4:arcroleTypeCyclesAllowed")) if val.validateSBRNL: val.modelXbrl.error("SBR.NL.2.2.4.01", _('ArcroleType is not allowed %(roleURI)s'), modelObject=elt, roleURI=roleURI) else: # roleType if val.validateSBRNL: roleTypeModelObject = modelDocument.idObjects.get(elt.get("id")) if roleTypeModelObject is not None and not roleTypeModelObject.genLabel(lang="nl"): val.modelXbrl.error("SBR.NL.2.3.8.05", _('RoleType %(roleURI)s must have a label in lang "nl"'), modelObject=elt, roleURI=roleURI) if val.validateEFM and len(roleURI) > 85: l = len(roleURI.encode("utf-8")) if l > 255: val.modelXbrl.error("EFM.6.07.30", _("Schema %(element)s %(attribute)s length (%(length)s) is over 255 bytes long in utf-8 %(roleURI)s"), modelObject=elt, element=elt.qname, attribute=uriAttr, length=l, roleURI=roleURI, value=roleURI) # check for used on duplications usedOns = set() for usedOn in elt.iterdescendants(tag="{http://www.xbrl.org/2003/linkbase}usedOn"): if isinstance(usedOn,ModelObject): qName = qname(usedOn, XmlUtil.text(usedOn)) if qName not in usedOns: usedOns.add(qName) else: val.modelXbrl.error("xbrl.{0}:{1}s-inequality".format(xbrlSection,elt.localName), _("%(element)s %(roleURI)s usedOn %(value)s on has s-equal duplicate"), modelObject=elt, element=elt.qname, roleURI=roleURI, value=qName, messageCodes=("xbrl.5.1.3:roleTypes-inequality", "xbrl.5.1.4:arcroleTypes-inequality")) if val.validateSBRNL: val.valUsedPrefixes.add(qName.prefix) if qName == XbrlConst.qnLinkCalculationLink: val.modelXbrl.error("SBR.NL.2.2.3.01", _("%(element)s usedOn must not be link:calculationLink"), modelObject=elt, element=parent.qname, value=qName) if elt.localName == "roleType" and qName in XbrlConst.standardExtLinkQnames: if not any((key[1] == roleURI and key[2] == qName) for key in val.modelXbrl.baseSets.keys()): val.modelXbrl.error("SBR.NL.2.2.3.02", _("%(element)s usedOn %(usedOn)s not addressed for role %(role)s"), modelObject=elt, element=parent.qname, usedOn=qName, role=roleURI) elif elt.localName == "linkbase" and elt.namespaceURI == XbrlConst.link: XmlValidate.validate(val.modelXbrl, elt) # check linkbases inside schema files if val.validateSBRNL and not elt.prefix: val.modelXbrl.error("SBR.NL.2.2.0.06", 'Schema element is not prefixed: "%(element)s"', modelObject=elt, element=elt.qname) elif modelDocument.type == ModelDocument.Type.LINKBASE: if elt.localName == "linkbase": XmlValidate.validate(val.modelXbrl, elt) if val.validateSBRNL and not elt.prefix: val.modelXbrl.error("SBR.NL.2.2.0.06", _('Linkbase element is not prefixed: "%(element)s"'), modelObject=elt, element=elt.qname) # check of roleRefs when parent is linkbase or instance element xlinkType = elt.get("{http://www.w3.org/1999/xlink}type") xlinkRole = elt.get("{http://www.w3.org/1999/xlink}role") if elt.namespaceURI == XbrlConst.link: if elt.localName == "linkbase": if elt.parentQname is not None and elt.parentQname not in (XbrlConst.qnXsdAppinfo, XbrlConst.qnNsmap): val.modelXbrl.error("xbrl.5.2:linkbaseRootElement", "Linkbase must be a root element or child of appinfo, and may not be nested in %(parent)s", parent=elt.parentQname, modelObject=elt) elif elt.localName in ("roleRef","arcroleRef"): uriAttr, xbrlSection, roleTypeDefs, refs = { "roleRef":("roleURI","3.5.2.4",val.modelXbrl.roleTypes,val.roleRefURIs), "arcroleRef":("arcroleURI","3.5.2.5",val.modelXbrl.arcroleTypes,val.arcroleRefURIs) }[elt.localName] if parentIsAppinfo: pass #ignore roleTypes in appinfo (test case 160 v05) elif not (parentIsLinkbase or isInstance or elt.parentQname in (XbrlConst.qnIXbrlResources, XbrlConst.qnIXbrl11Resources)): val.modelXbrl.info("info:{1}Location".format(xbrlSection,elt.localName), _("Link:%(elementName)s not child of link:linkbase or xbrli:instance"), modelObject=elt, elementName=elt.localName, messageCodes=("info:roleRefLocation", "info:arcroleRefLocation")) else: # parent is linkbase or instance, element IS in the right location # check for duplicate roleRefs when parent is linkbase or instance element refUri = elt.get(uriAttr) hrefAttr = elt.get("{http://www.w3.org/1999/xlink}href") hrefUri, hrefId = UrlUtil.splitDecodeFragment(hrefAttr) if refUri == "": val.modelXbrl.error("xbrl.{}.5:{}Missing".format(xbrlSection,elt.localName), _("%(element)s %(refURI)s missing"), modelObject=elt, element=elt.qname, refURI=refUri, messageCodes=("xbrl.3.5.2.4.5:roleRefMissing", "xbrl.3.5.2.5.5:arcroleRefMissing")) elif refUri in refs: val.modelXbrl.error("xbrl.{}.5:{}Duplicate".format(xbrlSection,elt.localName), _("%(element)s is duplicated for %(refURI)s"), modelObject=elt, element=elt.qname, refURI=refUri, messageCodes=("xbrl.3.5.2.4.5:roleRefDuplicate", "xbrl.3.5.2.5.5:arcroleRefDuplicate")) elif refUri not in roleTypeDefs: val.modelXbrl.error("xbrl.{}.5:{}NotDefined".format(xbrlSection,elt.localName), _("%(element)s %(refURI)s is not defined"), modelObject=elt, element=elt.qname, refURI=refUri, messageCodes=("xbrl.3.5.2.4.5:roleRefNotDefined", "xbrl.3.5.2.5.5:arcroleRefNotDefined")) else: refs[refUri] = hrefUri roleTypeElt = elt.resolveUri(uri=hrefAttr) if roleTypeElt not in roleTypeDefs[refUri]: val.modelXbrl.error("xbrl.{}.5:{}Mismatch".format(xbrlSection,elt.localName), _("%(element)s %(refURI)s defined with different URI"), modelObject=(elt,roleTypeElt), element=elt.qname, refURI=refUri, messageCodes=("xbrl.3.5.2.4.5:roleRefMismatch", "xbrl.3.5.2.5.5:arcroleRefMismatch")) if val.validateEFMorGFMorSBRNL: if elt.localName == "arcroleRef": if hrefUri not in val.disclosureSystem.standardTaxonomiesDict: val.modelXbrl.error(("EFM.6.09.06", "GFM.1.04.06"), _("Arcrole %(refURI)s arcroleRef %(xlinkHref)s must be a standard taxonomy"), modelObject=elt, refURI=refUri, xlinkHref=hrefUri) if val.validateSBRNL: for attrName, errCode in (("{http://www.w3.org/1999/xlink}arcrole","SBR.NL.2.3.2.05"),("{http://www.w3.org/1999/xlink}role","SBR.NL.2.3.2.06")): if elt.get(attrName): val.modelXbrl.error(errCode, _("Arcrole %(refURI)s arcroleRef %(xlinkHref)s must not have an %(attribute)s attribute"), modelObject=elt, refURI=refUri, xlinkHref=hrefUri, attribute=attrName, messageCodes=("SBR.NL.2.3.2.05", "SBR.NL.2.3.2.06")) elif elt.localName == "roleRef": if val.validateSBRNL: for attrName, errCode in (("{http://www.w3.org/1999/xlink}arcrole","SBR.NL.2.3.10.09"),("{http://www.w3.org/1999/xlink}role","SBR.NL.2.3.10.10")): if elt.get(attrName): val.modelXbrl.error(errCode, _("Role %(refURI)s roleRef %(xlinkHref)s must not have an %(attribute)s attribute"), modelObject=elt, refURI=refUri, xlinkHref=hrefUri, attribute=attrName, messageCodes=("SBR.NL.2.3.10.09", "SBR.NL.2.3.10.10")) if val.validateSBRNL: if not xlinkType: val.modelXbrl.error("SBR.NL.2.3.0.01", _("Xlink 1.1 simple type is not allowed (xlink:type is missing)"), modelObject=elt) # checks for elements in linkbases if elt.namespaceURI == XbrlConst.link: if elt.localName in ("schemaRef", "linkbaseRef", "roleRef", "arcroleRef"): if xlinkType != "simple": val.modelXbrl.error("xbrl.3.5.1.1:simpleLinkType", _("Element %(element)s missing xlink:type=\"simple\""), modelObject=elt, element=elt.qname) href = elt.get("{http://www.w3.org/1999/xlink}href") if not href or "xpointer(" in href: val.modelXbrl.error("xbrl.3.5.1.2:simpleLinkHref", _("Element %(element)s missing or invalid href"), modelObject=elt, element=elt.qname) for name in ("{http://www.w3.org/1999/xlink}role", "{http://www.w3.org/1999/xlink}arcrole"): if elt.get(name) == "": val.modelXbrl.error("xbrl.3.5.1.2:simpleLink" + name, _("Element %(element)s has empty %(attribute)s"), modelObject=elt, attribute=name, messageCodes=("xbrl.3.5.1.2:simpleLink{http://www.w3.org/1999/xlink}role", "xbrl.3.5.1.2:simpleLink{http://www.w3.org/1999/xlink}arcrole")) if elt.localName == "linkbaseRef" and \ elt.get("{http://www.w3.org/1999/xlink}arcrole") != XbrlConst.xlinkLinkbase: val.modelXbrl.error("xbrl.4.3.3:linkbaseRefArcrole", _("LinkbaseRef missing arcrole"), modelObject=elt) elif elt.localName == "loc": if xlinkType != "locator": val.modelXbrl.error("xbrl.3.5.3.7.1:linkLocType", _("Element %(element)s missing xlink:type=\"locator\""), modelObject=elt, element=elt.qname) for name, errName in (("{http://www.w3.org/1999/xlink}href","xbrl.3.5.3.7.2:linkLocHref"), ("{http://www.w3.org/1999/xlink}label","xbrl.3.5.3.7.3:linkLocLabel")): if elt.get(name) is None: val.modelXbrl.error(errName, _("Element %(element)s missing: %(attribute)s"), modelObject=elt, element=elt.qname, attribute=name, messageCodes=("xbrl.3.5.3.7.2:linkLocHref","xbrl.3.5.3.7.3:linkLocLabel")) elif xlinkType == "resource": if elt.localName == "footnote" and elt.get("{http://www.w3.org/XML/1998/namespace}lang") is None: val.modelXbrl.error("xbrl.4.11.1.2.1:footnoteLang", _("Footnote %(xlinkLabel)s element missing xml:lang attribute"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label")) elif elt.localName == "footnote" and elt.get("{http://www.w3.org/XML/1998/namespace}lang") is None: val.modelXbrl.error("xbrl.5.2.2.2.1:labelLang", _("Label %(xlinkLabel)s element missing xml:lang attribute"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label")) if val.validateSBRNL: if elt.localName in ("label", "reference"): if not XbrlConst.isStandardRole(xlinkRole): val.modelXbrl.error("SBR.NL.2.3.10.13", _("Extended link %(element)s must have a standard xlink:role attribute (%(xlinkRole)s)"), modelObject=elt, element=elt.elementQname, xlinkRole=xlinkRole) if elt.localName == "reference": # look for custom reference parts for linkPart in elt.iterchildren(): if linkPart.namespaceURI not in val.disclosureSystem.baseTaxonomyNamespaces: val.modelXbrl.error("SBR.NL.2.2.5.01", _("Link part %(element)s is not authorized"), modelObject=linkPart, element=linkPart.elementQname) # TBD: add lang attributes content validation if xlinkRole is not None: checkLinkRole(val, elt, elt.qname, xlinkRole, xlinkType, val.roleRefURIs) elif xlinkType == "extended" and val.validateSBRNL: # no @role on extended link val.modelXbrl.error("SBR.NL.2.3.10.13", _("Extended link %(element)s must have an xlink:role attribute"), modelObject=elt, element=elt.elementQname) if elt.get("{http://www.w3.org/1999/xlink}arcrole") is not None: checkArcrole(val, elt, elt.qname, elt.get("{http://www.w3.org/1999/xlink}arcrole"), val.arcroleRefURIs) #check resources if parentXlinkType == "extended": if elt.localName not in ("documentation", "title") and \ xlinkType not in ("arc", "locator", "resource"): val.modelXbrl.error("xbrl.3.5.3.8.1:resourceType", _("Element %(element)s appears to be a resource missing xlink:type=\"resource\""), modelObject=elt, element=elt.qname) elif (xlinkType == "locator" and elt.namespaceURI != XbrlConst.link and parent.namespaceURI == XbrlConst.link and parent.localName in link_loc_spec_sections): val.modelXbrl.error("xbrl.{0}:customLocator".format(link_loc_spec_sections[parent.localName]), _("Element %(element)s is a custom locator in a standard %(link)s"), modelObject=(elt,parent), element=elt.qname, link=parent.qname, messageCodes=("xbrl.5.2.2.1:customLocator", "xbrl.5.2.3.1:customLocator", "xbrl.5.2.5.1:customLocator", "xbrl.5.2.6.1:customLocator", "xbrl.5.2.4.1:customLocator", "xbrl.4.11.1.1:customLocator")) if xlinkType == "resource": if not elt.get("{http://www.w3.org/1999/xlink}label"): val.modelXbrl.error("xbrl.3.5.3.8.2:resourceLabel", _("Element %(element)s missing xlink:label"), modelObject=elt, element=elt.qname) elif xlinkType == "arc": for name, errName in (("{http://www.w3.org/1999/xlink}from", "xbrl.3.5.3.9.2:arcFrom"), ("{http://www.w3.org/1999/xlink}to", "xbrl.3.5.3.9.2:arcTo")): if not elt.get(name): val.modelXbrl.error(errName, _("Element %(element)s missing xlink:%(attribute)s"), modelObject=elt, element=elt.qname, attribute=name, messageCodes=("xbrl.3.5.3.9.2:arcFrom", "xbrl.3.5.3.9.2:arcTo")) if val.modelXbrl.hasXDT and elt.get("{http://xbrl.org/2005/xbrldt}targetRole") is not None: targetRole = elt.get("{http://xbrl.org/2005/xbrldt}targetRole") if not XbrlConst.isStandardRole(targetRole) and \ elt.qname == XbrlConst.qnLinkDefinitionArc and \ targetRole not in val.roleRefURIs: val.modelXbrl.error("xbrldte:TargetRoleNotResolvedError", _("TargetRole %(targetRole)s is missing a roleRef"), modelObject=elt, element=elt.qname, targetRole=targetRole) val.containsRelationship = True xmlLang = elt.get("{http://www.w3.org/XML/1998/namespace}lang") if val.validateXmlLang and xmlLang is not None: if not val.disclosureSystem.xmlLangPattern.match(xmlLang): val.modelXbrl.error("SBR.NL.2.3.8.01" if (val.validateSBRNL and xmlLang.startswith('nl')) else "SBR.NL.2.3.8.02" if (val.validateSBRNL and xmlLang.startswith('en')) else "arelle:langError", _("Element %(element)s %(xlinkLabel)s has unauthorized xml:lang='%(lang)s'"), modelObject=elt, element=elt.qname, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label"), lang=elt.get("{http://www.w3.org/XML/1998/namespace}lang"), messageCodes=("SBR.NL.2.3.8.01", "SBR.NL.2.3.8.02", "arelle:langError")) if isInstance: if elt.namespaceURI == XbrlConst.xbrli: expectedSequence = instanceSequence.get(elt.localName,9) else: expectedSequence = 9 #itdms last if instanceOrder > expectedSequence: val.modelXbrl.error("xbrl.4.7:instanceElementOrder", _("Element %(element)s is out of order"), modelObject=elt, element=elt.qname) else: instanceOrder = expectedSequence if modelDocument.type == ModelDocument.Type.UnknownXML: if elt.localName == "xbrl" and elt.namespaceURI == XbrlConst.xbrli: if elt.getparent() is not None: val.modelXbrl.error("xbrl.4:xbrlRootElement", "Xbrl must be a root element, and may not be nested in %(parent)s", parent=elt.parentQname, modelObject=elt) elif elt.localName == "schema" and elt.namespaceURI == XbrlConst.xsd: if elt.getparent() is not None: val.modelXbrl.error("xbrl.5.1:schemaRootElement", "Schema must be a root element, and may not be nested in %(parent)s", parent=elt.parentQname, modelObject=elt) if modelDocument.type == ModelDocument.Type.INLINEXBRL and elt.namespaceURI in XbrlConst.ixbrlAll: if elt.localName == "footnote": if val.validateGFM: if elt.get("{http://www.w3.org/1999/xlink}arcrole") != XbrlConst.factFootnote: # must be in a nonDisplay div if not any(inlineDisplayNonePattern.search(e.get("style") or "") for ns in (XbrlConst.xhtml, None) # may be un-namespaced html for e in XmlUtil.ancestors(elt, ns, "div")): val.modelXbrl.error(("EFM.N/A", "GFM:1.10.16"), _("Inline XBRL footnote %(footnoteID)s must be in non-displayable div due to arcrole %(arcrole)s"), modelObject=elt, footnoteID=elt.get("footnoteID"), arcrole=elt.get("{http://www.w3.org/1999/xlink}arcrole")) if not elt.get("{http://www.w3.org/XML/1998/namespace}lang"): val.modelXbrl.error(("EFM.N/A", "GFM:1.10.13"), _("Inline XBRL footnote %(footnoteID)s is missing an xml:lang attribute"), modelObject=elt, footnoteID=id) if elt.namespaceURI == XbrlConst.ixbrl: val.ixdsFootnotes[elt.footnoteID] = elt else: checkIxContinuationChain(elt) if not elt.xmlLang: val.modelXbrl.error(ixMsgCode("footnoteLang", elt, sect="validation"), _("Inline XBRL footnotes require an in-scope xml:lang"), modelObject=elt) elif elt.localName == "fraction": ixDescendants = XmlUtil.descendants(elt, elt.namespaceURI, '*') wrongDescendants = [d for d in ixDescendants if d.localName not in ('numerator','denominator','fraction')] if wrongDescendants: val.modelXbrl.error(ixMsgCode("fractionDescendants", elt, sect="validation"), _("Inline XBRL fraction may only contain ix:numerator, ix:denominator, or ix:fraction, but contained %(wrongDescendants)s"), modelObject=[elt] + wrongDescendants, wrongDescendants=", ".join(str(d.elementQname) for d in wrongDescendants)) ixDescendants = XmlUtil.descendants(elt, elt.namespaceURI, ('numerator','denominator')) if not elt.isNil: if set(d.localName for d in ixDescendants) != {'numerator','denominator'}: val.modelXbrl.error(ixMsgCode("fractionTerms", elt, sect="validation"), _("Inline XBRL fraction must have one ix:numerator and one ix:denominator when not nil"), modelObject=[elt] + ixDescendants) else: if ixDescendants: # nil and has fraction term elements val.modelXbrl.error(ixMsgCode("fractionNilTerms", elt, sect="validation"), _("Inline XBRL fraction must not have ix:numerator or ix:denominator when nil"), modelObject=[elt] + ixDescendants) e2 = XmlUtil.ancestor(elt, elt.namespaceURI, "fraction") if e2 is not None: val.modelXbrl.error(ixMsgCode("nestedFractionIsNil", elt, sect="validation"), _("Inline XBRL nil ix:fraction may not have an ancestor ix:fraction"), modelObject=(elt,e2)) elif elt.localName in ("denominator", "numerator"): wrongDescendants = [d for d in XmlUtil.descendants(elt, '*', '*')] if wrongDescendants: val.modelXbrl.error(ixMsgCode("fractionTermDescendants", elt, sect="validation"), _("Inline XBRL fraction term ix:%(name)s may only contain text nodes, but contained %(wrongDescendants)s"), modelObject=[elt] + wrongDescendants, name=elt.localName, wrongDescendants=", ".join(str(d.elementQname) for d in wrongDescendants)) if elt.get("format") is None and '-' in XmlUtil.innerText(elt): val.modelXbrl.error(ixMsgCode("fractionTermNegative", elt, sect="validation"), _("Inline XBRL ix:numerator or ix:denominator without format attribute must be non-negative"), modelObject=elt) elif elt.localName == "header": if not any(inlineDisplayNonePattern.search(e.get("style") or "") for ns in (XbrlConst.xhtml, None) # may be un-namespaced html for e in XmlUtil.ancestors(elt, ns, "div")): val.modelXbrl.warning(ixMsgCode("headerDisplayNone", elt, sect="validation"), _("Warning, Inline XBRL ix:header is recommended to be nested in a <div> with style display:none"), modelObject=elt) val.ixdsHeaderCount += 1 elif elt.localName == "nonFraction": if elt.isNil: e2 = XmlUtil.ancestor(elt, elt.namespaceURI, "nonFraction") if e2 is not None: val.modelXbrl.error(ixMsgCode("nestedNonFractionIsNil", elt, sect="validation"), _("Inline XBRL nil ix:nonFraction may not have an ancestor ix:nonFraction"), modelObject=(elt,e2)) else: c = XmlUtil.children(elt, '*', '*') if c and (len(c) != 1 or c[0].namespaceURI != elt.namespaceURI or c[0].localName != "nonFraction"): val.modelXbrl.error(ixMsgCode("nonFractionChildren", elt, sect="validation"), _("Inline XBRL nil ix:nonFraction may only have one child ix:nonFraction"), modelObject=[elt] + c) for e in c: if (e.namespaceURI == elt.namespaceURI and e.localName == "nonFraction" and (e.format != elt.format or e.scaleInt != elt.scaleInt or e.unitID != elt.unitID)): val.modelXbrl.error(ixMsgCode("nestedNonFractionProperties", e, sect="validation"), _("Inline XBRL nested ix:nonFraction must have matching format, scale, and unitRef properties"), modelObject=(elt, e)) if elt.get("format") is None and '-' in XmlUtil.innerText(elt): val.modelXbrl.error(ixMsgCode("nonFractionNegative", elt, sect="validation"), _("Inline XBRL ix:nonFraction without format attribute must be non-negative"), modelObject=elt) elif elt.localName == "nonNumeric": checkIxContinuationChain(elt) elif elt.localName == "references": val.ixdsReferences[elt.get("target")].append(elt) elif elt.localName == "relationship": val.ixdsRelationships.append(elt) elif elt.localName == "tuple": if not elt.tupleID: if not elt.isNil: if not XmlUtil.descendants(elt, elt.namespaceURI, ("fraction", "nonFraction", "nonNumeric", "tuple")): val.modelXbrl.error(ixMsgCode("tupleID", elt, sect="validation"), _("Inline XBRL non-nil tuples without ix:fraction, ix:nonFraction, ix:nonNumeric or ix:tuple descendants require a tupleID"), modelObject=elt) else: val.ixdsTuples[elt.tupleID] = elt if val.validateEFMorGFMorSBRNL: if xlinkType == "extended": if not xlinkRole or xlinkRole == "": val.modelXbrl.error(("EFM.6.09.04", "GFM.1.04.04"), "%(element)s is missing an xlink:role", modelObject=elt, element=elt.qname) eltNsName = (elt.namespaceURI,elt.localName) if not val.extendedElementName: val.extendedElementName = elt.qname elif val.extendedElementName != elt.qname: val.modelXbrl.error(("EFM.6.09.07", "GFM:1.04.07", "SBR.NL.2.3.0.11"), _("Extended element %(element)s must be the same as %(element2)s"), modelObject=elt, element=elt.qname, element2=val.extendedElementName) if xlinkType == "locator": if val.validateSBRNL and elt.qname != XbrlConst.qnLinkLoc: val.modelXbrl.error("SBR.NL.2.3.0.11", _("Loc element %(element)s may not be contained in a linkbase with %(element2)s"), modelObject=elt, element=elt.qname, element2=val.extendedElementName) if xlinkType == "resource": if not xlinkRole: val.modelXbrl.error(("EFM.6.09.04", "GFM.1.04.04"), _("%(element)s is missing an xlink:role"), modelObject=elt, element=elt.qname) elif not XbrlConst.isStandardRole(xlinkRole): modelsRole = val.modelXbrl.roleTypes.get(xlinkRole) if (modelsRole is None or len(modelsRole) == 0 or modelsRole[0].modelDocument.targetNamespace not in val.disclosureSystem.standardTaxonomiesDict): val.modelXbrl.error(("EFM.6.09.05", "GFM.1.04.05", "SBR.NL.2.3.10.14"), _("Resource %(xlinkLabel)s role %(role)s is not a standard taxonomy role"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label"), role=xlinkRole, element=elt.qname, roleDefinition=val.modelXbrl.roleTypeDefinition(xlinkRole)) if val.validateSBRNL: if elt.localName == "reference": for child in elt.iterdescendants(): if isinstance(child,ModelObject) and child.namespaceURI.startswith("http://www.xbrl.org") and child.namespaceURI != "http://www.xbrl.org/2006/ref": val.modelXbrl.error("SBR.NL.2.3.3.01", _("Reference %(xlinkLabel)s has unauthorized part element %(element)s"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label"), element=qname(child)) id = elt.get("id") if not id: val.modelXbrl.error("SBR.NL.2.3.3.02", _("Reference %(xlinkLabel)s is missing an id attribute"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label")) elif id in val.DTSreferenceResourceIDs: val.modelXbrl.error("SBR.NL.2.3.3.03", _("Reference %(xlinkLabel)s has duplicated id %(id)s also in linkbase %(otherLinkbase)s"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label"), id=id, otherLinkbase=val.DTSreferenceResourceIDs[id]) else: val.DTSreferenceResourceIDs[id] = modelDocument.basename if elt.qname not in { XbrlConst.qnLinkLabelLink: (XbrlConst.qnLinkLabel,), XbrlConst.qnLinkReferenceLink: (XbrlConst.qnLinkReference,), XbrlConst.qnLinkPresentationLink: tuple(), XbrlConst.qnLinkCalculationLink: tuple(), XbrlConst.qnLinkDefinitionLink: tuple(), XbrlConst.qnLinkFootnoteLink: (XbrlConst.qnLinkFootnote,), # XbrlConst.qnGenLink: (XbrlConst.qnGenLabel, XbrlConst.qnGenReference, val.qnSbrLinkroleorder), }.get(val.extendedElementName,(elt.qname,)): # allow non-2.1 to be ok regardless per RH 2013-03-13 val.modelXbrl.error("SBR.NL.2.3.0.11", _("Resource element %(element)s may not be contained in a linkbase with %(element2)s"), modelObject=elt, element=elt.qname, element2=val.extendedElementName) if xlinkType == "arc": if elt.get("priority") is not None: priority = elt.get("priority") try: if int(priority) >= 10: val.modelXbrl.error(("EFM.6.09.09", "GFM.1.04.08"), _("Arc from %(xlinkFrom)s to %(xlinkTo)s priority %(priority)s must be less than 10"), modelObject=elt, arcElement=elt.qname, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), priority=priority) except (ValueError) : val.modelXbrl.error(("EFM.6.09.09", "GFM.1.04.08"), _("Arc from %(xlinkFrom)s to %(xlinkTo)s priority %(priority)s is not an integer"), modelObject=elt, arcElement=elt.qname, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), priority=priority) if elt.namespaceURI == XbrlConst.link: if elt.localName == "presentationArc" and not elt.get("order"): val.modelXbrl.error(("EFM.6.12.01", "GFM.1.06.01", "SBR.NL.2.3.4.04"), _("PresentationArc from %(xlinkFrom)s to %(xlinkTo)s must have an order"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), conceptFrom=arcFromConceptQname(elt), conceptTo=arcToConceptQname(elt)) elif elt.localName == "calculationArc": if not elt.get("order"): val.modelXbrl.error(("EFM.6.14.01", "GFM.1.07.01"), _("CalculationArc from %(xlinkFrom)s to %(xlinkTo)s must have an order"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), conceptFrom=arcFromConceptQname(elt), conceptTo=arcToConceptQname(elt)) try: weightAttr = elt.get("weight") weight = float(weightAttr) if not weight in (1, -1): val.modelXbrl.error(("EFM.6.14.02", "GFM.1.07.02"), _("CalculationArc from %(xlinkFrom)s to %(xlinkTo)s weight %(weight)s must be 1 or -1"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), conceptFrom=arcFromConceptQname(elt), conceptTo=arcToConceptQname(elt), weight=weightAttr) except __HOLE__: val.modelXbrl.error(("EFM.6.14.02", "GFM.1.07.02"), _("CalculationArc from %(xlinkFrom)s to %(xlinkTo)s must have an weight (value error in \"%(weight)s\")"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), conceptFrom=arcFromConceptQname(elt), conceptTo=arcToConceptQname(elt), weight=weightAttr) elif elt.localName == "definitionArc": if not elt.get("order"): val.modelXbrl.error(("EFM.6.16.01", "GFM.1.08.01"), _("DefinitionArc from %(xlinkFrom)s to %(xlinkTo)s must have an order"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), conceptFrom=arcFromConceptQname(elt), conceptTo=arcToConceptQname(elt)) if val.validateSBRNL and arcrole in (XbrlConst.essenceAlias, XbrlConst.similarTuples, XbrlConst.requiresElement): val.modelXbrl.error({XbrlConst.essenceAlias: "SBR.NL.2.3.2.02", XbrlConst.similarTuples: "SBR.NL.2.3.2.03", XbrlConst.requiresElement: "SBR.NL.2.3.2.04"}[arcrole], _("DefinitionArc from %(xlinkFrom)s to %(xlinkTo)s has unauthorized arcrole %(arcrole)s"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to"), arcrole=arcrole, messageCodes=("SBR.NL.2.3.2.02", "SBR.NL.2.3.2.03", "SBR.NL.2.3.2.04")), elif elt.localName == "referenceArc" and val.validateSBRNL: if elt.get("order"): val.modelXbrl.error("SBR.NL.2.3.3.05", _("ReferenceArc from %(xlinkFrom)s to %(xlinkTo)s has an order"), modelObject=elt, xlinkFrom=elt.get("{http://www.w3.org/1999/xlink}from"), xlinkTo=elt.get("{http://www.w3.org/1999/xlink}to")) if val.validateSBRNL and elt.get("use") == "prohibited" and elt.getparent().tag in ( "{http://www.xbrl.org/2003/linkbase}presentationLink", "{http://www.xbrl.org/2003/linkbase}labelLink", "{http://xbrl.org/2008/generic}link", "{http://www.xbrl.org/2003/linkbase}referenceLink"): val.modelXbrl.error("SBR.NL.2.3.0.10", _("%(arc)s must not contain use='prohibited'"), modelObject=elt, arc=elt.getparent().qname) if val.validateSBRNL and elt.qname not in { XbrlConst.qnLinkLabelLink: (XbrlConst.qnLinkLabelArc,), XbrlConst.qnLinkReferenceLink: (XbrlConst.qnLinkReferenceArc,), XbrlConst.qnLinkPresentationLink: (XbrlConst.qnLinkPresentationArc,), XbrlConst.qnLinkCalculationLink: (XbrlConst.qnLinkCalculationArc,), XbrlConst.qnLinkDefinitionLink: (XbrlConst.qnLinkDefinitionArc,), XbrlConst.qnLinkFootnoteLink: (XbrlConst.qnLinkFootnoteArc,), # XbrlConst.qnGenLink: (XbrlConst.qnGenArc,), }.get(val.extendedElementName, (elt.qname,)): # allow non-2.1 to be ok regardless per RH 2013-03-13 val.modelXbrl.error("SBR.NL.2.3.0.11", _("Arc element %(element)s may not be contained in a linkbase with %(element2)s"), modelObject=elt, element=elt.qname, element2=val.extendedElementName) if val.validateSBRNL and elt.qname == XbrlConst.qnLinkLabelArc and elt.get("order"): val.modelXbrl.error("SBR.NL.2.3.8.08", _("labelArc may not be contain order (%(order)s)"), modelObject=elt, order=elt.get("order")) if val.validateSBRNL: # check attributes for prefixes and xmlns val.valUsedPrefixes.add(elt.prefix) if elt.namespaceURI not in val.disclosureSystem.baseTaxonomyNamespaces: val.modelXbrl.error("SBR.NL.2.2.0.20", _("%(fileType)s element %(element)s must not have custom namespace %(namespace)s"), modelObject=elt, element=elt.qname, fileType="schema" if isSchema else "linkbase" , namespace=elt.namespaceURI) for attrTag, attrValue in elt.items(): prefix, ns, localName = XmlUtil.clarkNotationToPrefixNsLocalname(elt, attrTag, isAttribute=True) if prefix: # don't count unqualified prefixes for using default namespace val.valUsedPrefixes.add(prefix) if ns and ns not in val.disclosureSystem.baseTaxonomyNamespaces: val.modelXbrl.error("SBR.NL.2.2.0.20", _("%(fileType)s element %(element)s must not have %(prefix)s:%(localName)s"), modelObject=elt, element=elt.qname, fileType="schema" if isSchema else "linkbase" , prefix=prefix, localName=localName) if isSchema and localName in ("base", "ref", "substitutionGroup", "type"): valuePrefix, sep, valueName = attrValue.partition(":") if sep: val.valUsedPrefixes.add(valuePrefix) # check for xmlns on a non-root element parentElt = elt.getparent() if parentElt is not None: for prefix, ns in elt.nsmap.items(): if prefix not in parentElt.nsmap or parentElt.nsmap[prefix] != ns: val.modelXbrl.error(("SBR.NL.2.2.0.19" if isSchema else "SBR.NL.2.3.1.01"), _("%(fileType)s element %(element)s must not have xmlns:%(prefix)s"), modelObject=elt, element=elt.qname, fileType="schema" if isSchema else "linkbase" , prefix=prefix, messageCodes=("SBR.NL.2.2.0.19", "SBR.NL.2.3.1.01")) if elt.localName == "roleType" and not elt.get("id"): val.modelXbrl.error("SBR.NL.2.3.10.11", _("RoleType %(roleURI)s missing id attribute"), modelObject=elt, roleURI=elt.get("roleURI")) elif elt.localName == "loc" and elt.get("{http://www.w3.org/1999/xlink}role"): val.modelXbrl.error("SBR.NL.2.3.10.08", _("Loc %(xlinkLabel)s has unauthorized role attribute"), modelObject=elt, xlinkLabel=elt.get("{http://www.w3.org/1999/xlink}label")) elif elt.localName == "documentation": val.modelXbrl.error("SBR.NL.2.3.10.12" if elt.namespaceURI == XbrlConst.link else "SBR.NL.2.2.11.02", _("Documentation element must not be used: %(value)s"), modelObject=elt, value=XmlUtil.text(elt), messageCodes=("SBR.NL.2.3.10.12", "SBR.NL.2.2.11.02")) if elt.localName == "linkbase": schemaLocation = elt.get("{http://www.w3.org/2001/XMLSchema-instance}schemaLocation") if schemaLocation: schemaLocations = schemaLocation.split() for sl in (XbrlConst.link, XbrlConst.xlink): if sl in schemaLocations: val.modelXbrl.error("SBR.NL.2.3.0.07", _("Linkbase element must not have schemaLocation entry for %(schemaLocation)s"), modelObject=elt, schemaLocation=sl) for attrName, errCode in (("id", "SBR.NL.2.3.10.04"), ("{http://www.w3.org/2001/XMLSchema-instance}nil", "SBR.NL.2.3.10.05"), ("{http://www.w3.org/2001/XMLSchema-instance}noNamespaceSchemaLocation", "SBR.NL.2.3.10.06"), ("{http://www.w3.org/2001/XMLSchema-instance}type", "SBR.NL.2.3.10.07")): if elt.get(attrName) is not None: val.modelXbrl.error(errCode, _("Linkbase element %(element)s must not have attribute %(attribute)s"), modelObject=elt, element=elt.qname, attribute=attrName, messageCodes=("SBR.NL.2.3.10.04", "SBR.NL.2.3.10.05", "SBR.NL.2.3.10.06", "SBR.NL.2.3.10.07")) for attrName, errCode in (("{http://www.w3.org/1999/xlink}actuate", "SBR.NL.2.3.10.01"), ("{http://www.w3.org/1999/xlink}show", "SBR.NL.2.3.10.02"), ("{http://www.w3.org/1999/xlink}title", "SBR.NL.2.3.10.03")): if elt.get(attrName) is not None: val.modelXbrl.error(errCode, _("Linkbase element %(element)s must not have attribute xlink:%(attribute)s"), modelObject=elt, element=elt.qname, attribute=attrName, messageCodes=("SBR.NL.2.3.10.01", "SBR.NL.2.3.10.02", "SBR.NL.2.3.10.03")) checkElements(val, modelDocument, elt) elif isinstance(elt,ModelComment): # comment node if val.validateSBRNL: if elt.itersiblings(preceding=True): val.modelXbrl.error("SBR.NL.2.2.0.05" if isSchema else "SBR.NL.2.3.0.05", _('%(fileType)s must have only one comment node before schema element: "%(value)s"'), modelObject=elt, fileType=modelDocument.gettype().title(), value=elt.text, messageCodes=("SBR.NL.2.2.0.05", "SBR.NL.2.3.0.05"))
ValueError
dataset/ETHPy150Open Arelle/Arelle/arelle/ValidateXbrlDTS.py/checkElements
556
def _prepare_setuptools(self): if hasattr(TestDist, '_setuptools_ready'): return try: import setuptools except __HOLE__: py.test.skip("setuptools not found") if os.path.exists(os.path.join(self.rootdir, 'setup.py')): self.run(['setup.py', 'egg_info'], cwd=self.rootdir) TestDist._setuptools_ready = True
ImportError
dataset/ETHPy150Open johncsnyder/SwiftKitten/cffi/testing/cffi1/test_zdist.py/TestDist._prepare_setuptools
557
def join_component_view(component, view): """Pack a componentID and optional view into single tuple Returns an object compatible with data.__getitem__ and related methods. Handles edge cases of when view is None, a scalar, a tuple, etc. :param component: ComponentID :param view: view into data, or None """ if view is None: return component result = [component] try: result.extend(view) except __HOLE__: # view is a scalar result = [component, view] return tuple(result)
TypeError
dataset/ETHPy150Open glue-viz/glue/glue/core/util.py/join_component_view
558
def tick_linker(all_categories, pos, *args): try: pos = np.round(pos) return all_categories[int(pos)] except __HOLE__: return ''
IndexError
dataset/ETHPy150Open glue-viz/glue/glue/core/util.py/tick_linker
559
def f_get(self, *args): """Returns annotations If len(args)>1, then returns a list of annotations. `f_get(X)` with *X* integer will return the annotation with name `annotation_X`. If the annotation contains only a single entry you can call `f_get()` without arguments. If you call `f_get()` and the annotation contains more than one element a ValueError is thrown. """ if len(args) == 0: if len(self._dict) == 1: return self._dict[compat.listkeys(self._dict)[0]] elif len(self._dict) > 1: raise ValueError('Your annotation contains more than one entry: ' '`%s` Please use >>f_get<< with one of these.' % (str(compat.listkeys(self._dict)))) else: raise AttributeError('Your annotation is empty, cannot access data.') result_list = [] for name in args: name = self._translate_key(name) try: result_list.append(self._dict[name]) except __HOLE__: raise AttributeError('Your annotation does not contain %s.' % name) if len(args) == 1: return result_list[0] else: return tuple(result_list)
KeyError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/annotations.py/Annotations.f_get
560
def f_remove(self, key): """Removes `key` from annotations""" key = self._translate_key(key) try: del self._dict[key] except __HOLE__: raise AttributeError('Your annotations do not contain %s' % key)
KeyError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/annotations.py/Annotations.f_remove
561
def flavor(): import maya.cmds try: return maya.cmds.about(product=1).split()[1] except __HOLE__: raise RuntimeError, "This method cannot be used until maya is fully initialized"
AttributeError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/versions.py/flavor
562
def isEval(): import maya.cmds try: return maya.cmds.about(evalVersion=1) except __HOLE__: raise RuntimeError, "This method cannot be used until maya is fully initialized"
AttributeError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/versions.py/isEval
563
def get_fixed_option_value (option, s): if option.type=='float': return fix_float_string(s) if option.type=='int': return fix_int_string(s) if option.type=='long': return fix_int_string(s) if option.type=='choice': choices = map(str, option.choices) try: return option.choices[choices.index(str(s))] except __HOLE__: pass return s
ValueError
dataset/ETHPy150Open pearu/pylibtiff/libtiff/optparse_gui.py/get_fixed_option_value
564
def check_file(option, opt, value): try: value = str(value) except __HOLE__: raise OptionValueError( _("option %s: invalid %s value: %r") % (opt, what, value)) #if value and not os.path.isfile(value): # print 'Warning: path %r is not a file' % (value) return value
ValueError
dataset/ETHPy150Open pearu/pylibtiff/libtiff/optparse_gui.py/check_file
565
def check_directory(option, opt, value): try: value = str(value) except __HOLE__: raise OptionValueError( _("option %s: invalid %s value: %r") % (opt, what, value)) #if value and not os.path.isdir(value): # print 'Warning: path %r is not a directory' % (value) return value
ValueError
dataset/ETHPy150Open pearu/pylibtiff/libtiff/optparse_gui.py/check_directory
566
def _set_dest_value(self, dest, value, old_cwd = None): if value is None: return option = ([option for option in self._get_all_options () if option.dest == dest] + [None])[0] if option is None: print 'Could not find option with dest=%r for setting %r (dest not specified for option).' % (dest, value) else: if option.type in ['file', 'directory']: if old_cwd is not None: value = os.path.join(old_cwd, value) else: value = os.path.abspath(value) cwd = os.getcwd() if value.startswith(cwd): value = value[len(cwd)+1:] if value == 'none': value = None else: try: option.check_value(option.dest, str(value)) except optparse.OptionValueError, msg: print '_set_dest_value: ignoring %s' % msg return option.default = value if value is None: try: del self.defaults[dest] except __HOLE__: pass else: self.defaults[dest] = value
KeyError
dataset/ETHPy150Open pearu/pylibtiff/libtiff/optparse_gui.py/OptionParser._set_dest_value
567
def main(): """Wrap it all up together""" delay = DELAY if len(sys.argv) > 1: arg = sys.argv[1] if arg.isdigit(): delay = int(arg) else: sys.stderr.write("Ignoring non-integer argument. Using default: %ss\n" % delay) sock = socket.socket() try: sock.connect( (CARBON_SERVER, CARBON_PICKLE_PORT) ) except socket.error: raise SystemExit("Couldn't connect to %(server)s on port %(port)d, is carbon-cache.py running?" % { 'server':CARBON_SERVER, 'port':CARBON_PICKLE_PORT }) try: run(sock, delay) except __HOLE__: sys.stderr.write("\nExiting on CTRL-c\n") sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open graphite-project/carbon/examples/example-pickle-client.py/main
568
def killProcessGroupHoldingPorts(portStart, portStop, retries = 5): toKillOrNone = pidAndProcessNameHoldingPorts(portStart, portStop) if toKillOrNone is None: return False pidToKill, processName = toKillOrNone try: pidGroupToKill = os.getpgid(pidToKill) logging.warn("Killing process group pgid=%s containing %s at pid=%s", pidGroupToKill, processName, pidToKill ) os.killpg(pidGroupToKill, 9) except __HOLE__: logging.warn("Failed to kill process holding port: %s.\n%s", toKillOrNone, traceback.format_exc() ) toKillOrNone2 = pidAndProcessNameHoldingPorts(portStart, portStop) if toKillOrNone2 is not None: logging.error( "Failed to free the port range %s-%s. It was held as %s, now as %s", portStart, portStop, toKillOrNone, toKillOrNone2 ) if retries < 1: raise UserWarning("Failed to clear the port") logging.info("Trying to kill process group holding port range %s-%s again", portStart, portStop) return killProcessGroupHoldingPorts(portStart, portStop, retries-1) return True
OSError
dataset/ETHPy150Open ufora/ufora/ufora/util/KillProcessHoldingPort.py/killProcessGroupHoldingPorts
569
def get_setting(name, default): """ TODO encrypt using secret """ try: setting = Setting.query.get(name) return setting.value_decrypted except __HOLE__: return default
AttributeError
dataset/ETHPy150Open nkuttler/flaskwallet/settingsapp/helpers.py/get_setting
570
def seek(self, page_number): # Starting with #0 self.file_object.seek(0) try: self.image = Image.open(self.file_object) except __HOLE__: # Cannot identify image file self.image = self.convert(page_number=page_number) else: self.image.seek(page_number) self.image.load()
IOError
dataset/ETHPy150Open mayan-edms/mayan-edms/mayan/apps/converter/classes.py/ConverterBase.seek
571
def items_for_result(view, result): """ Generates the actual list of data. """ modeladmin = view.model_admin for field_name in view.list_display: empty_value_display = modeladmin.get_empty_value_display(field_name) row_classes = ['field-%s' % field_name] try: f, attr, value = lookup_field(field_name, result, modeladmin) except __HOLE__: result_repr = empty_value_display else: empty_value_display = getattr( attr, 'empty_value_display', empty_value_display) if f is None or f.auto_created: allow_tags = getattr(attr, 'allow_tags', False) boolean = getattr(attr, 'boolean', False) if boolean or not value: allow_tags = True if django.VERSION >= (1, 9): result_repr = display_for_value( value, empty_value_display, boolean) else: result_repr = display_for_value(value, boolean) # Strip HTML tags in the resulting text, except if the # function has an "allow_tags" attribute set to True. if allow_tags: result_repr = mark_safe(result_repr) if isinstance(value, (datetime.date, datetime.time)): row_classes.append('nowrap') else: if isinstance(f, models.ManyToOneRel): field_val = getattr(result, f.name) if field_val is None: result_repr = empty_value_display else: result_repr = field_val else: if django.VERSION >= (1, 9): result_repr = display_for_field( value, f, empty_value_display) else: result_repr = display_for_field(value, f) if isinstance(f, ( models.DateField, models.TimeField, models.ForeignKey) ): row_classes.append('nowrap') if force_text(result_repr) == '': result_repr = mark_safe('&nbsp;') row_classes.extend( modeladmin.get_extra_class_names_for_field_col(field_name, result)) row_attrs_dict = modeladmin.get_extra_attrs_for_field_col( field_name, result) row_attrs_dict['class'] = ' ' . join(row_classes) row_attrs = ''.join( ' %s="%s"' % (key, val) for key, val in row_attrs_dict.items()) row_attrs_safe = mark_safe(row_attrs) yield format_html('<td{}>{}</td>', row_attrs_safe, result_repr)
ObjectDoesNotExist
dataset/ETHPy150Open torchbox/wagtail/wagtail/contrib/modeladmin/templatetags/modeladmin_tags.py/items_for_result
572
def Rop(f, wrt, eval_points): """ Computes the R operation on `f` wrt to `wrt` evaluated at points given in `eval_points`. Mathematically this stands for the jacobian of `f` wrt to `wrt` right muliplied by the eval points. :type f: Variable or list of Variables `f` stands for the output of the computational graph to which you want to apply the R operator :type wrt: Variable or list of `Variables`s variables for which you compute the R operator of the expression described by `f` :type eval_points: Variable or list of Variables evalutation points for each of the variables in `wrt` :rtype: Variable or list/tuple of Variables depending on type of f :return: symbolic expression such that R_op[i] = sum_j ( d f[i] / d wrt[j]) eval_point[j] where the indices in that expression are magic multidimensional indices that specify both the position within a list and all coordinates of the tensor element in the last. If `wrt` is a list/tuple, then return a list/tuple with the results. """ from theano.tensor import as_tensor_variable using_list = isinstance(f, list) using_tuple = isinstance(f, tuple) if not isinstance(wrt, (list, tuple)): wrt = [wrt] if not isinstance(eval_points, (list, tuple)): eval_points = [eval_points] if not isinstance(f, (list, tuple)): f = [f] assert len(wrt) == len(eval_points) # Check that each element of wrt corresponds to an element # of eval_points with the same dimensionality. for pack in enumerate(zip(wrt, eval_points)): i = pack[0] wrt_elem, eval_point = pack[1] if not isinstance(wrt_elem, gof.Variable): wrt_elem = as_tensor_variable(wrt_elem) if not isinstance(eval_point, gof.Variable): eval_point = as_tensor_variable(eval_point) try: if wrt_elem.type.ndim != eval_point.type.ndim: raise ValueError('Element ' + str(i) + ' of wrt/eval_point have mismatched ' + 'dimensionality: ' + str(wrt_elem.type.ndim) + ' versus ' + str(eval_point.type.ndim)) except __HOLE__: # wrt_elem and eval_point don't always have ndim like random type # Tensor, Sparse and CudaNdArray have the ndim attribute pass seen_nodes = OrderedDict() def _traverse(node): """ TODO: writeme """ if node is None: return op = node.op inputs = node.inputs # Compute the evaluation points corresponding to each of the # inputs of the node local_eval_points = [] for inp in inputs: if inp in wrt: local_eval_points.append(eval_points[wrt.index(inp)]) elif inp.owner is None: try: local_eval_points.append(inp.zeros_like()) except: # None should be used for non-differentiable # arguments, like for example random states local_eval_points.append(None) elif inp.owner in seen_nodes: local_eval_points.append( seen_nodes[inp.owner][inp.owner.outputs.index(inp)]) else: # We actually need to compute the R_op for this node _traverse(inp.owner) local_eval_points.append( seen_nodes[inp.owner][inp.owner.outputs.index(inp)]) same_type_eval_points = [] for x, y in zip(inputs, local_eval_points): if y is not None: if not isinstance(x, gof.Variable): x = as_tensor_variable(x) if not isinstance(y, gof.Variable): y = as_tensor_variable(y) try: y = x.type.filter_variable(y) except TypeError: # This is a hack # Originally both grad and Rop were written # with the assumption that a variable and the # gradient wrt that variable would have the same # dtype. This was a bad assumption because the # gradient wrt an integer can take on non-integer # values. # grad is now fixed, but Rop is not, so when grad # does the right thing and violates this assumption # we have to make it be wrong for Rop to keep working # Rop should eventually be upgraded to handle integers # correctly, the same as grad y = theano.tensor.cast(y, x.type.dtype) y = x.type.filter_variable(y) assert x.type == y.type same_type_eval_points.append(y) else: same_type_eval_points.append(y) seen_nodes[node] = op.R_op(node.inputs, same_type_eval_points) # end _traverse # Populate the dictionary for out in f: _traverse(out.owner) rval = [] for out in f: if out in wrt: rval.append(eval_points[wrt.index(out)]) elif seen_nodes[out.owner][out.owner.outputs.index(out)] is None: raise ValueError(('The function is not differentiable with ' 'respect to the provided inputs !')) else: rval.append(seen_nodes[out.owner][out.owner.outputs.index(out)]) return format_as(using_list, using_tuple, rval)
AttributeError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/gradient.py/Rop
573
def _populate_var_to_app_to_idx(outputs, wrt, consider_constant): """ Helper function for grad function. outputs: a list of variables we want to take gradients of wrt: a list of variables we want to take the gradient with respect to. consider_constant: a list of variables not to backpropagate through. returns: var_to_app_to_idx: A dictionary mapping a variable to a second dictionary. The second dictionary maps apply nodes acting on this variable to the variable's index in the apply node's input list. This dictionary will only contain variables that meet two criteria: 1) The elements of at least one output are a function of the elements of the variable 2) The elements of the variable are a function of the elements of at least one member of wrt. This set is exactly the set of variables that connect the variables in wrt to the cost being differentiated. (A variable in consider_constant is not a function of anything) """ # Validate and format consider_constant if consider_constant is None: consider_constant = [] else: # error checking on consider_constant: verify that it is a collection # of theano variables # this is important, if someone accidentally passes a nested data # structure with theano variables at the leaves, only the root will # be properly considered constant try: iter(consider_constant) except __HOLE__: raise TypeError('consider_constant must be an iterable collection,' ' got ' + str(type(consider_constant))) for elem in consider_constant: if not isinstance(elem, gof.Variable): raise TypeError('Elements of consider_constant must be ' 'variables, but got ' + str(type(elem))) # var_to_app_to_idx[var][node] = [i,j] means node has # var as input at positions i and j var_to_app_to_idx = OrderedDict() # Set of variables that have been added to their true parents # ('true' here means that the elements of the variable are a function # of the elements of the parent, according to the op's # connection_pattern) # Note: we need to revisit the apply nodes repeatedly, because # different outputs of the apply node are connected to # different subsets of the inputs. accounted_for = set([]) def account_for(var): # Don't visit the same variable twice if var in accounted_for: return accounted_for.add(var) # Constants are not a function of anything if var in consider_constant: return # Recursively add the variables that this variable is # a function of. if var.owner is not None: app = var.owner connection_pattern = _node_to_pattern(app) var_idx = app.outputs.index(var) for i, ipt in enumerate(app.inputs): # don't process ipt if it is not a true # parent of var if not connection_pattern[i][var_idx]: continue if ipt not in var_to_app_to_idx: # This object here *must* be an OrderedDict, because # we iterate over its keys when adding up the terms of the # gradient on ipt. If it is a regular dict, the grad method # will return something that is analytically correct, but # whose order of doing additions depends on the memory # location of the apply nodes. var_to_app_to_idx[ipt] = OrderedDict() app_to_idx = var_to_app_to_idx[ipt] if app not in app_to_idx: app_to_idx[app] = [] idx = app_to_idx[app] if i not in idx: idx.append(i) account_for(ipt) # add all variables that are true ancestors of the cost for output in outputs: account_for(output) # determine which variables have elements of wrt as a true # ancestor. Do this with an upward pass starting from wrt, # following only true connections visited = set([]) def visit(var): if var in visited: return if var not in var_to_app_to_idx: return visited.add(var) nodes = var_to_app_to_idx[var] for node in nodes: connection_pattern = _node_to_pattern(node) for idx in nodes[node]: for ii, output in enumerate(node.outputs): if connection_pattern[idx][ii]: visit(output) for elem in wrt: visit(elem) # Remove variables that don't have wrt as a true ancestor orig_vars = list(var_to_app_to_idx.keys()) for var in orig_vars: if var not in visited: del var_to_app_to_idx[var] return var_to_app_to_idx
TypeError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/gradient.py/_populate_var_to_app_to_idx
574
def _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name=None): """ Helper function for grad function. var_to_app_to_idx: a dictionary mapping a variable to a second dictionary. the second dictionary maps apply nodes acting on this variable to the variable's index in the apply node's input list grad_dict: A dictionary mapping variables to their gradients. Should be populated by grad function, which should: -Set the gradient with respect to the cost to 1 -Load all gradients from known_grads, possibly overriding the cost -Set the gradient for disconnected inputs to a variable with type DisconnectedType() wrt: the minimal set of variables that must be included in grad_dict cost_name: The name of the cost being differentiated, optional. used to name the grad with respect to x as (d<cost_name>/dx) returns: a list of gradients corresponding to wrt """ # build a dict mapping node to the terms node contributes to each of # its inputs' gradients term_dict = OrderedDict() def access_term_cache(node): """ Populates term_dict[node] and returns it """ if node not in term_dict: inputs = node.inputs output_grads = [access_grad_cache(var) for var in node.outputs] # list of bools indicating if each output is connected to the cost outputs_connected = [not isinstance(g.type, DisconnectedType) for g in output_grads] connection_pattern = _node_to_pattern(node) # list of bools indicating if each input is connected to the cost inputs_connected = [ (True in [input_to_output and output_to_cost for input_to_output, output_to_cost in zip(input_to_outputs, outputs_connected)]) for input_to_outputs in connection_pattern ] # List of bools indicating if each output is an integer dtype output_is_int = [hasattr(output.type, 'dtype') and output.type.dtype in theano.tensor.discrete_dtypes for output in node.outputs] # List of bools indicating if each output is NullType ograd_is_nan = [isinstance(output.type, NullType) for output in output_grads] # List of bools indicating if each input only has NullType outputs only_connected_to_nan = [ (True not in [in_to_out and out_to_cost and not out_nan for in_to_out, out_to_cost, out_nan in zip(in_to_outs, outputs_connected, ograd_is_nan)]) for in_to_outs in connection_pattern] if True not in inputs_connected: # All outputs of this op are disconnected so we can skip # Calling the op's grad method and report that the inputs # are disconnected # (The op's grad method could do this too, but this saves the # implementer the trouble of worrying about this case) input_grads = [disconnected_type() for ipt in inputs] elif False not in only_connected_to_nan: # All inputs are only connected to nan gradients, so we don't # need to bother calling the grad method. We know the gradient # with respect to all connected inputs is nan. input_grads = [] for connected in inputs_connected: if connected: input_grads.append(null_type()) else: input_grads.append(disconnected_type()) else: # At least one input of this op is connected to the cost so and # not all output gradients are undefined so we must # call the op's grad method # Each Op's grad function requires inputs and output_grads # If the Op destroys any input, but the grad expression uses # it, then chances are the resulting graph will have a # dependency cycle. We avoid this cycle by passing (symbolic) # copies of each destroyed input. try: dinputs = [node.inputs[x[0]] for x in itervalues(node.op.destroy_map)] except __HOLE__: dinputs = [] def try_to_copy_if_needed(var): if var in dinputs and hasattr(var, 'copy'): return var.copy() return var inputs = [try_to_copy_if_needed(ipt) for ipt in inputs] # Build a list of output gradients with the same dtype as # the corresponding output variable. # If an output is of a float dtype, we want to cast the # output gradient into the same dtype, to avoid having a # gradient graph with double precision (taking more memory, # and more computation). # If an output is of an integer dtype, then we just leave it # alone. # DO NOT force integer variables to have zero grad. This causes # bugs where we fail to detect disconnected or undefined # gradients. # DO NOT force integer variables to have integer dtype. # This is a violation of the op contract. new_output_grads = [] for o, og in zip(node.outputs, output_grads): o_dt = getattr(o.type, 'dtype', None) og_dt = getattr(og.type, 'dtype', None) if (o_dt not in theano.tensor.discrete_dtypes and og_dt and o_dt != og_dt): new_output_grads.append(og.astype(o_dt)) else: new_output_grads.append(og) # Make sure that, if new_output_grads[i] has a floating point # dtype, it is the same dtype as outputs[i] for o, ng in zip(node.outputs, new_output_grads): o_dt = getattr(o.type, 'dtype', None) ng_dt = getattr(ng.type, 'dtype', None) if (ng_dt is not None and o_dt not in theano.tensor.discrete_dtypes): assert ng_dt == o_dt # Someone who had obviously not read the Op contract tried # to modify this part of the function. # If you ever think it is a good idea to make an integer # valued gradient, please # 1) Read the Op contract again # 2) Talk to Ian Goodfellow # (Both of these sources will tell you not to do it) for ng in new_output_grads: assert (getattr(ng.type, 'dtype', None) not in theano.tensor.discrete_dtypes) # If config.compute_test_value is turned on, check that the # gradients on the outputs of this node have the right shape. # We also check the gradient on the inputs later--both checks # are needed, because some gradients are only ever specified # by the user, not computed by Op.grad, and some gradients are # only computed and returned, but never passed as another # node's output grads. for idx, packed in enumerate(izip(node.outputs, new_output_grads)): orig_output, new_output_grad = packed if not hasattr(orig_output, 'shape'): continue if isinstance(new_output_grad.type, DisconnectedType): continue for orig_output_v, new_output_grad_v in get_debug_values( *packed): o_shape = orig_output_v.shape g_shape = new_output_grad_v.shape if o_shape != g_shape: raise ValueError( "Got a gradient of shape " + str(o_shape) + " on an output of shape " + str(g_shape)) input_grads = node.op.grad(inputs, new_output_grads) if input_grads is None: raise TypeError("%s.grad returned NoneType, " "expected iterable." % str(node.op)) if len(input_grads) != len(inputs): raise ValueError(("%s returned the wrong number of" + " gradient terms.") % str(node.op)) # We can not enforce this, as AdvancedSubtensor1 has an option to # return the sparse grad for optimization reason. # for ig, i in zip(input_grads, inputs): # if (not isinstance(ig.type, (DisconnectedType, NullType)) and # type(ig.type) != type(i.type)): # raise ValueError( # "%s returned the wrong type for gradient terms." # " Sparse inputs must have sparse grads and dense" # " inputs must have dense grad. Got %s, expected %s" %( # str(node.op), ig.type, i.type)) # must convert to list in case the op returns a tuple # we won't be able to post-process out the Nones if it does that input_grads = list(input_grads) # Need to propagate the NullType gradients; if an input grad is # not disconnected and the corresponding input is connected # to at least one output whose gradient is NullType then the input # grad should be NullType. for inp_idx in range(len(input_grads)): for out_idx in range(len(ograd_is_nan)): if (ograd_is_nan[out_idx] and connection_pattern[inp_idx][out_idx] and not isinstance(input_grads[inp_idx].type, DisconnectedType)): input_grads[inp_idx] = output_grads[out_idx] # Do type checking on the result # List of bools indicating if each input only has integer outputs only_connected_to_int = [ (True not in [in_to_out and out_to_cost and not out_int for in_to_out, out_to_cost, out_int in zip(in_to_outs, outputs_connected, output_is_int)]) for in_to_outs in connection_pattern] for i, term in enumerate(input_grads): # Disallow Nones if term is None: # We don't know what None means. in the past it has been # used to mean undefined, zero, or disconnected. # We therefore don't allow it because its usage has become # so muddied. raise TypeError( ('%s.grad returned None for' + ' a gradient term, ' 'this is prohibited. Instead of None,' 'return zeros_like(input), disconnected_type(),' ' or a NullType variable such as those made with ' 'the grad_undefined or grad_unimplemented helper ' 'functions.') % node.op) # Check that the gradient term for this input # has the right shape if hasattr(term, 'shape'): orig_ipt = inputs[i] for orig_ipt_v, term_v in get_debug_values(orig_ipt, term): i_shape = orig_ipt_v.shape t_shape = term_v.shape if i_shape != t_shape: raise ValueError( "%s.grad returned object of " "shape %s as gradient term on input %d " "of shape %s" % (node.op, t_shape, i, i_shape)) if not isinstance(term.type, (NullType, DisconnectedType)): if term.type.dtype not in theano.tensor.float_dtypes: raise TypeError(str(node.op) + '.grad illegally ' ' returned an integer-valued variable.' ' (Input index %d, dtype %s)' % ( i, term.type.dtype)) if only_connected_to_nan[i]: assert isinstance(term.type, NullType) if only_connected_to_int[i]: # This term has only integer outputs and we know # it's not undefined or disconnected # The only other valid thing it can be is 0 is_zero = _is_zero(term) assert is_zero in ['yes', 'no', 'maybe'] if is_zero == 'maybe': msg = "%s.grad returned %s of type %s for input" msg += " %d. This input's only connections to " msg += "the cost through this op are via " msg += "integer-valued outputs so it should be " msg += "NullType, DisconnectedType, or some form " msg += "of zeros. It is not NullType or " msg += "DisconnectedType and theano can't " msg += "simplify it to a constant, so it's not " msg += "verifiably zeros." msg = msg % (str(node.op), str(term), str(type(term)), i) if is_zero == 'no': msg = "%s.grad returned %s of type %s for input" msg += " %d. Since this input is only connected " msg += "to integer-valued outputs, it should " msg += "evaluate to zeros, but it evaluates to" msg += "%s." msg % (node.op, term, type(term), i, theano.get_scalar_constant_value(term)) raise ValueError(msg) # Check that op.connection_pattern matches the connectivity # logic driving the op.grad method for i, packed in enumerate(zip(inputs, input_grads, inputs_connected)): ipt, ig, connected = packed actually_connected = \ not isinstance(ig.type, DisconnectedType) if actually_connected and not connected: msg = "%s.grad returned %s of type %s for input %d." msg += " Expected DisconnectedType instance based on " msg += " the output of the op's connection_pattern " msg += "method." msg = msg % (str(node.op), str(ig), str(ig.type), i) raise TypeError(msg) if connected and not actually_connected: msg = "%s.grad returned DisconnectedType for input" msg += " %d." msg = msg % (str(node.op), i) if hasattr(node.op, 'connection_pattern'): msg += ' Its connection_pattern method does not' msg += ' allow this.' raise TypeError(msg) else: msg += ' You may want to implement a ' msg += 'connection_pattern method for it.' warnings.warn(msg) # cache the result term_dict[node] = input_grads return term_dict[node] # populate grad_dict[var] and return it def access_grad_cache(var): if var not in grad_dict: # If var is not in grad_dict already, we must compute it if var in var_to_app_to_idx: null_terms = [] terms = [] node_to_idx = var_to_app_to_idx[var] for node in node_to_idx: for idx in node_to_idx[node]: term = access_term_cache(node)[idx] if not isinstance(term, gof.Variable): raise TypeError( "%s.grad returned %s, expected" " Variable instance." % (str(node.op), type(term))) if isinstance(term.type, NullType): null_terms.append(term) continue # Don't try to sum up DisconnectedType placeholders if isinstance(term.type, DisconnectedType): continue if hasattr(var, 'ndim') and term.ndim != var.ndim: raise ValueError( ("%s.grad returned a term with" " %d dimensions, but %d are required.") % ( str(node.op), term.ndim, var.ndim)) terms.append(term) # Add up the terms to get the total gradient on this variable if len(null_terms) > 0: # At least one term is a NullType : the total gradient # will also be a NullType grad_dict[var] = null_terms[0] elif len(terms) > 0: # the next line is like sum(terms) but doesn't add an # extraneous TensorConstant(0) grad_dict[var] = reduce(lambda x, y: x + y, terms) else: grad_dict[var] = disconnected_type() if cost_name is not None and var.name is not None: grad_dict[var].name = '(d%s/d%s)' % (cost_name, var.name) else: # this variable isn't connected to the cost in the # computational graph grad_dict[var] = disconnected_type() # end if cache miss return grad_dict[var] rval = [access_grad_cache(elem) for elem in wrt] return rval
AttributeError
dataset/ETHPy150Open rizar/attention-lvcsr/libs/Theano/theano/gradient.py/_populate_grad_dict
575
def rawstack(depth = 1): data = [] while depth is not None: try: f = sys._getframe(depth) except __HOLE__: depth = None else: data.append(( '/'.join(f.f_code.co_filename.split('/')[-2:]), f.f_code.co_name, f.f_lineno)) depth += 1 return data
ValueError
dataset/ETHPy150Open slideinc/notifier/notifier/pyinfo.py/rawstack
576
@classmethod def _fetch_task_executor(cls, options): kwargs = {} executor_cls = cls._default_executor_cls # Match the desired executor to a class that will work with it... desired_executor = options.get('executor') if isinstance(desired_executor, six.string_types): matched_executor_cls = None for m in cls._executor_str_matchers: if m.matches(desired_executor): matched_executor_cls = m.executor_cls break if matched_executor_cls is None: expected = set() for m in cls._executor_str_matchers: expected.update(m.strings) raise ValueError("Unknown executor string '%s' expected" " one of %s (or mixed case equivalent)" % (desired_executor, list(expected))) else: executor_cls = matched_executor_cls elif desired_executor is not None: matched_executor_cls = None for m in cls._executor_cls_matchers: if m.matches(desired_executor): matched_executor_cls = m.executor_cls break if matched_executor_cls is None: expected = set() for m in cls._executor_cls_matchers: expected.update(m.types) raise TypeError("Unknown executor '%s' (%s) expected an" " instance of %s" % (desired_executor, type(desired_executor), list(expected))) else: executor_cls = matched_executor_cls kwargs['executor'] = desired_executor try: for (k, value_converter) in executor_cls.constructor_options: try: kwargs[k] = value_converter(options[k]) except KeyError: pass except __HOLE__: pass return executor_cls(**kwargs)
AttributeError
dataset/ETHPy150Open openstack/taskflow/taskflow/engines/action_engine/engine.py/ParallelActionEngine._fetch_task_executor
577
def initialize(): global global_db, local_db, search_dbs, compress_by_default, db_access, \ git_bin, debug if configuration.check('git_bin'): git_bin = configuration.git_bin if git_bin.startswith("@executable_path/"): non_expand_path = git_bin git_bin = get_executable_path(git_bin[len("@executable_path/"):]) if git_bin is not None: configuration.git_bin = non_expand_path if git_bin is None: git_bin = 'git' configuration.git_bin = git_bin if configuration.check('compress_by_default'): compress_by_default = configuration.compress_by_default if configuration.check('debug'): debug = configuration.debug if configuration.check('global_db'): global_db = configuration.global_db if configuration.check('local_db'): local_db = configuration.local_db if not os.path.exists(local_db): raise RuntimeError('local_db "%s" does not exist' % local_db) else: local_db = os.path.join(current_dot_vistrails(), 'persistent_files') if not os.path.exists(local_db): try: os.mkdir(local_db) except __HOLE__: raise RuntimeError('local_db "%s" does not exist' % local_db) local_repo = repo.get_repo(local_db) repo.set_current_repo(local_repo) debug_print('creating DatabaseAccess') db_path = os.path.join(local_db, '.files.db') db_access = DatabaseAccessSingleton(db_path) debug_print('done', db_access) search_dbs = [local_db,] if configuration.check('search_dbs'): try: check_paths = literal_eval(configuration.search_dbs) except Exception: print "*** persistence error: cannot parse search_dbs ***" else: for path in check_paths: if os.path.exists(path): search_dbs.append(path) else: print '*** persistence warning: cannot find path "%s"' % path
OSError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/persistence/init.py/initialize
578
def console_to_str(s): """ From pypa/pip project, pip.backwardwardcompat. License MIT. """ try: return s.decode(console_encoding) except __HOLE__: return s.decode('utf_8')
UnicodeDecodeError
dataset/ETHPy150Open tony/tmuxp/tmuxp/_compat.py/console_to_str
579
def cleanup(filename): ''' tries to remove the given filename. Ignores non-existent files ''' try: os.remove(filename) except __HOLE__: pass
OSError
dataset/ETHPy150Open tanghaibao/jcvi/apps/tesseract.py/cleanup
580
@pytest.fixture(scope='session') def bokeh_server(request, log_file): bokeh_port = pytest.config.option.bokeh_port cmd = ["bin/bokeh", "serve"] argv = ["--port=%s" % bokeh_port] bokeh_server_url = 'http://localhost:%s' % bokeh_port try: proc = subprocess.Popen(cmd + argv, stdout=log_file, stderr=log_file) except __HOLE__: write("Failed to run: %s" % " ".join(cmd + argv)) sys.exit(1) else: # Add in the clean-up code def stop_bokeh_server(): write("Shutting down bokeh-server ...") proc.kill() request.addfinalizer(stop_bokeh_server) def wait_until(func, timeout=5.0, interval=0.01): start = time.time() while True: if func(): return True if time.time() - start > timeout: return False time.sleep(interval) def wait_for_bokeh_server(): def helper(): if proc.returncode is not None: return True try: return requests.get(bokeh_server_url) except ConnectionError: return False return wait_until(helper) if not wait_for_bokeh_server(): write("Timeout when running: %s" % " ".join(cmd + argv)) sys.exit(1) if proc.returncode is not None: write("bokeh server exited with code " + str(proc.returncode)) sys.exit(1) return bokeh_server_url
OSError
dataset/ETHPy150Open bokeh/bokeh/tests/plugins/bokeh_server.py/bokeh_server
581
def __getattr__(self, key): try: return self.__dict__[key] except __HOLE__: if key in self.__class__.attributes: return None else: raise AttributeError(key)
KeyError
dataset/ETHPy150Open bcwaldon/python-gerrit/gerrit/model.py/BaseModel.__getattr__
582
def coerce(cls, value): """Attempt to convert the given value to a wx.Colour Accepted Values: wx.Colour(Ptr) '#FFFFFF' style strings 'black' string colour names 3-tuple or 3-list of 0-255 integers None -- (gives black) """ if cls.check( value ): return value elif isinstance( value, (str,unicode) ): if value and value[0] == '#': rest = value[1:] if rest: value = int( rest, 16) return wx.Colour( value >> 16 & 255, value >> 8 & 255, value & 255 ) else: return wx.Colour( 0,0,0) else: try: obj = wx.Colour( value ) except (ValueError,__HOLE__): global COLOUR_DB_INITIALISED if not COLOUR_DB_INITIALISED: COLOUR_DB_INITIALISED = 1 colourdb.updateColourDB() obj = wx.NamedColour( value ) if not obj.Ok(): raise ValueError( """Unrecognised string value %r for Colour value"""%(value)) elif isinstance( value, (tuple,list) ): if len(value) == 3: obj = wx.Colour( *value ) else: raise ValueError( """Unable to create wx.Colour from %r, incorrect length"""%( value )) elif value is None: return wx.Colour( 0,0,0) else: raise TypeError( """Unable to convert value %r (type %s) to wx.Colour object"""%( value, type(value))) return obj
TypeError
dataset/ETHPy150Open correl/Transmission-XBMC/resources/lib/basictypes/wx/colour.py/wxColour_DT.coerce
583
def template_not_found_message(template): message = ["Template not found: {0}".format(template), "Use the `--template` option to specify a template."] try: from pkg_resources import resource_listdir message.extend(["The following templates are built-in:"] + resource_listdir('dayone_export', 'templates')) except __HOLE__: pass return '\n'.join(message)
ImportError
dataset/ETHPy150Open nathangrigg/dayone_export/dayone_export/cli.py/template_not_found_message
584
def run(args=None): args = parse_args(args) locale.setlocale(locale.LC_ALL, args.locale) # determine output format if args.format is None: args.format = os.path.splitext(args.output)[1][1:] if args.output \ else 'html' if args.format.lower() in ['md', 'markdown', 'mdown', 'mkdn']: args.format = 'md' # Check journal files exist args.journal = os.path.expanduser(args.journal) if not os.path.exists(args.journal): return "File not found: " + args.journal if not os.path.exists(os.path.join(args.journal, 'entries')): return "Not a valid Day One package: " + args.journal # tags tags = args.tags if tags is not None: if tags != 'any': tags = [tag.strip() for tag in tags.split(',')] # excluded tags excluded_tags = args.exclude if excluded_tags is not None: excluded_tags = [tag.strip() for tag in excluded_tags.split(',')] # parse before and after date dates = [args.before, args.after] for i, date in enumerate(dates): if date: try: dates[i] = dateutil.parser.parse(date) except (__HOLE__, OverflowError): return "Unable to parse date '{0}'".format(date) before, after = dates generator = dayone_export( args.journal, template=args.template, reverse=args.reverse, tags=tags, exclude=excluded_tags, before=before, after=after, format=args.format, template_dir=args.template_dir, autobold=args.autobold, nl2br=args.nl2br, filename_template=args.output) try: # Output is a generator returning each file's name and contents one at a time for filename, output in generator: if args.output: with codecs.open(filename, 'w', encoding='utf-8') as f: f.write(output) else: compat.print_bytes(output.encode('utf-8')) compat.print_bytes("\n".encode('utf-8')) except jinja2.TemplateNotFound as err: return template_not_found_message(err) except PlistError as err: return str(err)
ValueError
dataset/ETHPy150Open nathangrigg/dayone_export/dayone_export/cli.py/run
585
def save(self): """Create new case and all called-for versions.""" assert self.is_valid() version_kwargs = self.cleaned_data.copy() product = version_kwargs.pop("product") idprefix = version_kwargs.pop("idprefix") priority = version_kwargs.pop("priority") # ensure priority is an int, if not, store "None" try: int(priority) except __HOLE__: priority = None self.save_new_tags(product) case = model.Case.objects.create( product=product, user=self.user, idprefix=idprefix, priority=priority, ) version_kwargs["case"] = case version_kwargs["user"] = self.user del version_kwargs["add_tags"] del version_kwargs["add_attachment"] suite = version_kwargs.pop("suite", None) if suite: order = model.SuiteCase.objects.filter( suite=suite, ).aggregate(Max("order"))["order__max"] or 0 model.SuiteCase.objects.create( case=case, suite=suite, user=self.user, order=order + 1, ) productversions = [version_kwargs.pop("productversion")] if version_kwargs.pop("and_later_versions"): productversions.extend(product.versions.filter( order__gt=productversions[0].order)) for productversion in productversions: this_version_kwargs = version_kwargs.copy() this_version_kwargs["productversion"] = productversion caseversion = model.CaseVersion.objects.create( **this_version_kwargs) steps_formset = StepFormSet( data=self.data, instance=caseversion) steps_formset.save(user=self.user) self.save_tags(caseversion) self.save_attachments(caseversion) return case
ValueError
dataset/ETHPy150Open mozilla/moztrap/moztrap/view/manage/cases/forms.py/AddCaseForm.save
586
def save(self): """Create and return the new case(s) and version(s).""" assert self.is_valid() product = self.cleaned_data["product"] idprefix = self.cleaned_data["idprefix"] priority = self.cleaned_data["priority"] # ensure priority is an int, if not, store "None" try: int(priority) except __HOLE__: priority = None self.save_new_tags(product) productversions = [self.cleaned_data["productversion"]] if self.cleaned_data.get("and_later_versions"): productversions.extend(product.versions.filter( order__gt=productversions[0].order)) suite = self.cleaned_data.get("suite") cases = [] order = 0 if suite: order = model.SuiteCase.objects.filter( suite=suite, ).aggregate(Max("order"))["order__max"] or 0 for case_data in self.cleaned_data["cases"]: case = model.Case.objects.create( product=product, user=self.user, idprefix=idprefix, priority=priority, ) version_kwargs = case_data.copy() steps_data = version_kwargs.pop("steps") version_kwargs["case"] = case version_kwargs["status"] = self.cleaned_data["status"] version_kwargs["user"] = self.user if suite: order += 1 model.SuiteCase.objects.create( case=case, suite=suite, user=self.user, order=order, ) for productversion in productversions: this_version_kwargs = version_kwargs.copy() this_version_kwargs["productversion"] = productversion caseversion = model.CaseVersion.objects.create( **this_version_kwargs) for i, step_kwargs in enumerate(steps_data, 1): model.CaseStep.objects.create( user=self.user, caseversion=caseversion, number=i, **step_kwargs) self.save_tags(caseversion) cases.append(case) return cases
ValueError
dataset/ETHPy150Open mozilla/moztrap/moztrap/view/manage/cases/forms.py/AddBulkCaseForm.save
587
def save(self, user=None): """Save the edited caseversion.""" user = user or self.user assert self.is_valid() version_kwargs = self.cleaned_data.copy() del version_kwargs["add_tags"] del version_kwargs["add_attachment"] idprefix = version_kwargs.pop("idprefix") priority = version_kwargs.pop("priority") for k, v in version_kwargs.items(): setattr(self.instance, k, v) if self.instance.case.idprefix != idprefix: self.instance.case.idprefix = idprefix self.instance.case.save(force_update=True) if self.instance.case.priority != priority: try: int(priority) except __HOLE__: priority = None self.instance.case.priority = priority self.instance.case.save(force_update=True) self.instance.save(force_update=True) self.save_new_tags(self.instance.case.product) self.save_tags(self.instance) self.save_attachments(self.instance) self.steps_formset.save(user=user) return self.instance
ValueError
dataset/ETHPy150Open mozilla/moztrap/moztrap/view/manage/cases/forms.py/EditCaseVersionForm.save
588
def _construct_form(self, i, **kwargs): """Set empty_permitted and instance for all forms.""" kwargs["empty_permitted"] = False if self.is_bound: pk_key = "{0}-id".format(self.add_prefix(i)) try: pk = int(self.data.get(pk_key)) except (__HOLE__, TypeError): pk = None if pk: kwargs["instance"] = self._existing_object(pk) if kwargs.get("instance") is None: self.data[pk_key] = "" return super(BaseStepFormSet, self)._construct_form(i, **kwargs)
ValueError
dataset/ETHPy150Open mozilla/moztrap/moztrap/view/manage/cases/forms.py/BaseStepFormSet._construct_form
589
def parse_datetime(dateTime): try: ret = datetime.datetime.strptime(dateTime, "%Y-%m-%d %H:%M:%S") except __HOLE__: ret = datetime.datetime.strptime(dateTime, "%Y-%m-%d %H:%M:%S.%f") return dt.as_utc(ret)
ValueError
dataset/ETHPy150Open gbeced/pyalgotrade/pyalgotrade/bitstamp/httpclient.py/parse_datetime
590
def tearDown(self): try: del rest.api.APIS['v1'] except __HOLE__: pass
KeyError
dataset/ETHPy150Open funkybob/django-nap/tests/test_api.py/ApiTest.tearDown
591
def configure_make_args(make_args, jobs_args, use_internal_make_jobserver): """Initialize the internal GNU Make jobserver or configure it as a pass-through :param make_args: arguments to be passed to GNU Make :type make_args: list :param use_internal_make_jobserver: if true, use the internal jobserver :type make_args: bool :rtype: tuple (final make_args, using makeflags, using cliflags, using jobserver) """ # Configure default jobs options: use all CPUs in each package try: # NOTE: this will yeild greater than 100% CPU utilization n_cpus = cpu_count() jobs_flags = { 'jobs': n_cpus, 'load-average': n_cpus + 1} except __HOLE__: # If the number of cores cannot be determined, limit to one job jobs_flags = { 'jobs': 1, 'load-average': 1} # Get MAKEFLAGS from environment makeflags_jobs_flags = extract_jobs_flags(os.environ.get('MAKEFLAGS', '')) using_makeflags_jobs_flags = len(makeflags_jobs_flags) > 0 if using_makeflags_jobs_flags: makeflags_jobs_flags_dict = extract_jobs_flags_values(' '.join(makeflags_jobs_flags)) jobs_flags.update(makeflags_jobs_flags_dict) # Extract make jobs flags (these override MAKEFLAGS) cli_jobs_flags = jobs_args using_cli_flags = len(cli_jobs_flags) > 0 if cli_jobs_flags: jobs_flags.update(extract_jobs_flags_values(' '.join(cli_jobs_flags))) # Remove jobs flags from cli args if they're present make_args = re.sub(' '.join(cli_jobs_flags), '', ' '.join(make_args)).split() # Instantiate the jobserver job_server.initialize( max_jobs=jobs_flags.get('jobs', None), max_load=jobs_flags.get('load-average', None), gnu_make_enabled=use_internal_make_jobserver) # If the jobserver is supported if job_server.gnu_make_enabled(): jobs_args = [] else: jobs_args = cli_jobs_flags return make_args + jobs_args, using_makeflags_jobs_flags, using_cli_flags, job_server.gnu_make_enabled()
NotImplementedError
dataset/ETHPy150Open catkin/catkin_tools/catkin_tools/argument_parsing.py/configure_make_args
592
def get_cached_token(self): """Read and return a cached token, or None if not found. The token is read from the cached token file. """ # Only read the token once if self.username in self.memory: return self.memory[self.username] try: f = open(self.get_cached_token_filename(), "r") token = f.read() f.close() return token.strip() except __HOLE__: return None
IOError
dataset/ETHPy150Open onitu/onitu/drivers/flickr/onitu_flickr/flickrapi/tokencache.py/TokenCache.get_cached_token
593
def acquire(self, timeout=60): '''Locks the token cache for this key and username. If the token cache is already locked, waits until it is released. Throws an exception when the lock cannot be acquired after ``timeout`` seconds. ''' # Check whether there is a PID file already with our PID in # it. lockpid = self.get_lock_pid() if lockpid == os.getpid(): LOG.debug('The lock is ours, continuing') return # Figure out the lock filename lock = self.get_lock_name() LOG.debug('Acquiring lock %s' % lock) # Try to obtain the lock start_time = time.time() while True: try: os.makedirs(lock) break except __HOLE__: # If the path doesn't exist, the error isn't that it # can't be created because someone else has got the # lock. Just bail out then. if not os.path.exists(lock): LOG.error('Unable to acquire lock %s, aborting' % lock) raise if time.time() - start_time >= timeout: # Timeout has passed, bail out raise LockingError('Unable to acquire lock ' + '%s, aborting' % lock) # Wait for a bit, then try again LOG.debug('Unable to acquire lock, waiting') time.sleep(0.1) # Write the PID file LOG.debug('Lock acquired, writing our PID') pidfile = open(self.pidfile_name, 'w') try: pidfile.write('%s' % os.getpid()) finally: pidfile.close()
OSError
dataset/ETHPy150Open onitu/onitu/drivers/flickr/onitu_flickr/flickrapi/tokencache.py/LockingTokenCache.acquire
594
def _python_colouriser(text): out = StringIO() try: htmlizer.filter(StringIO(text), out) except __HOLE__: out = StringIO("""Starting after Nevow 0.4.1 Twisted 2.0 is a required dependency. Please install it""") return out.getvalue()
AttributeError
dataset/ETHPy150Open twisted/nevow/examples/pastebin/pastebin/web/pages.py/_python_colouriser
595
def locateChild(self, context, segments): try: return Pasting(self.pastebin, int(segments[0])), segments[1:] except __HOLE__: pass return BasePage.locateChild(self, context, segments)
ValueError
dataset/ETHPy150Open twisted/nevow/examples/pastebin/pastebin/web/pages.py/RootPage.locateChild
596
def getTopicNumber(irc, msg, args, state): def error(s): state.errorInvalid(_('topic number'), s) try: n = int(args[0]) if not n: raise ValueError except ValueError: error(args[0]) if n > 0: n -= 1 topic = irc.state.getTopic(state.channel) separator = state.cb.registryValue('separator', state.channel) topics = splitTopic(topic, separator) if not topics: state.error(format(_('There are no topics in %s.'), state.channel), Raise=True) try: topics[n] except __HOLE__: error(args[0]) del args[0] while n < 0: n += len(topics) state.args.append(n)
IndexError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Topic/plugin.py/getTopicNumber
597
def __init__(self, irc): self.__parent = super(Topic, self) self.__parent.__init__(irc) self.undos = ircutils.IrcDict() self.redos = ircutils.IrcDict() self.lastTopics = ircutils.IrcDict() self.watchingFor332 = ircutils.IrcSet() try: pkl = open(filename, 'rb') try: self.undos = pickle.load(pkl) self.redos = pickle.load(pkl) self.lastTopics = pickle.load(pkl) self.watchingFor332 = pickle.load(pkl) except Exception as e: self.log.debug('Unable to load pickled data: %s', e) pkl.close() except __HOLE__ as e: self.log.debug('Unable to open pickle file: %s', e) world.flushers.append(self._flush)
IOError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Topic/plugin.py/Topic.__init__
598
def _flush(self): try: pklfd, tempfn = tempfile.mkstemp(suffix='topic', dir=datadir) pkl = os.fdopen(pklfd, 'wb') try: pickle.dump(self.undos, pkl) pickle.dump(self.redos, pkl) pickle.dump(self.lastTopics, pkl) pickle.dump(self.watchingFor332, pkl) except Exception as e: self.log.warning('Unable to store pickled data: %s', e) pkl.close() shutil.move(tempfn, filename) except (__HOLE__, shutil.Error) as e: self.log.warning('File error: %s', e)
IOError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Topic/plugin.py/Topic._flush
599
def _getUndo(self, channel): try: return self.undos[channel].pop() except (KeyError, __HOLE__): return None
IndexError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Topic/plugin.py/Topic._getUndo