Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
3,800 |
def _get_str(self):
global Save_Strings
if self.duplicate or self.is_derived():
return self.get_path()
srcnode = self.srcnode()
if srcnode.stat() is None and self.stat() is not None:
result = self.get_path()
else:
result = srcnode.get_path()
if not Save_Strings:
# We're not at the point where we're saving the string
# representations of FS Nodes (because we haven't finished
# reading the SConscript files and need to have str() return
# things relative to them). That also means we can't yet
# cache values returned (or not returned) by stat(), since
# Python code in the SConscript files might still create
# or otherwise affect the on-disk file. So get rid of the
# values that the underlying stat() method saved.
try: del self._memo['stat']
except __HOLE__: pass
if self is not srcnode:
try: del srcnode._memo['stat']
except KeyError: pass
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base._get_str
|
3,801 |
def stat(self):
try: return self._memo['stat']
except __HOLE__: pass
try: result = self.fs.stat(self.abspath)
except os.error: result = None
self._memo['stat'] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.stat
|
3,802 |
def get_path(self, dir=None):
"""Return path relative to the current working directory of the
Node.FS.Base object that owns us."""
if not dir:
dir = self.fs.getcwd()
if self == dir:
return '.'
path_elems = self.path_elements
pathname = ''
try: i = path_elems.index(dir)
except __HOLE__:
for p in path_elems[:-1]:
pathname += p.dirname
else:
for p in path_elems[i+1:-1]:
pathname += p.dirname
return pathname + path_elems[-1].name
|
ValueError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.get_path
|
3,803 |
def src_builder(self):
"""Fetch the source code builder for this node.
If there isn't one, we cache the source code builder specified
for the directory (which in turn will cache the value from its
parent directory, and so on up to the file system root).
"""
try:
scb = self.sbuilder
except __HOLE__:
scb = self.dir.src_builder()
self.sbuilder = scb
return scb
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.src_builder
|
3,804 |
def get_subst_proxy(self):
try:
return self._proxy
except __HOLE__:
ret = EntryProxy(self)
self._proxy = ret
return ret
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.get_subst_proxy
|
3,805 |
def Rfindalldirs(self, pathlist):
"""
Return all of the directories for a given path list, including
corresponding "backing" directories in any repositories.
The Node lookups are relative to this Node (typically a
directory), so memoizing result saves cycles from looking
up the same path for each target in a given directory.
"""
try:
memo_dict = self._memo['Rfindalldirs']
except KeyError:
memo_dict = {}
self._memo['Rfindalldirs'] = memo_dict
else:
try:
return memo_dict[pathlist]
except __HOLE__:
pass
create_dir_relative_to_self = self.Dir
result = []
for path in pathlist:
if isinstance(path, SCons.Node.Node):
result.append(path)
else:
dir = create_dir_relative_to_self(path)
result.extend(dir.get_all_rdirs())
memo_dict[pathlist] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.Rfindalldirs
|
3,806 |
def rentry(self):
try:
return self._memo['rentry']
except KeyError:
pass
result = self
if not self.exists():
norm_name = _my_normcase(self.name)
for dir in self.dir.get_all_rdirs():
try:
node = dir.entries[norm_name]
except __HOLE__:
if dir.entry_exists_on_disk(self.name):
result = dir.Entry(self.name)
break
self._memo['rentry'] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Base.rentry
|
3,807 |
def chdir(self, dir, change_os_dir=0):
"""Change the current working directory for lookups.
If change_os_dir is true, we will also change the "real" cwd
to match.
"""
curr=self._cwd
try:
if dir is not None:
self._cwd = dir
if change_os_dir:
os.chdir(dir.abspath)
except __HOLE__:
self._cwd = curr
raise
|
OSError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FS.chdir
|
3,808 |
def get_root(self, drive):
"""
Returns the root directory for the specified drive, creating
it if necessary.
"""
drive = _my_normcase(drive)
try:
return self.Root[drive]
except __HOLE__:
root = RootDir(drive, self)
self.Root[drive] = root
if not drive:
self.Root[self.defaultDrive] = root
elif drive == self.defaultDrive:
self.Root[''] = root
return root
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FS.get_root
|
3,809 |
def _lookup(self, p, directory, fsclass, create=1):
"""
The generic entry point for Node lookup with user-supplied data.
This translates arbitrary input into a canonical Node.FS object
of the specified fsclass. The general approach for strings is
to turn it into a fully normalized absolute path and then call
the root directory's lookup_abs() method for the heavy lifting.
If the path name begins with '#', it is unconditionally
interpreted relative to the top-level directory of this FS. '#'
is treated as a synonym for the top-level SConstruct directory,
much like '~' is treated as a synonym for the user's home
directory in a UNIX shell. So both '#foo' and '#/foo' refer
to the 'foo' subdirectory underneath the top-level SConstruct
directory.
If the path name is relative, then the path is looked up relative
to the specified directory, or the current directory (self._cwd,
typically the SConscript directory) if the specified directory
is None.
"""
if isinstance(p, Base):
# It's already a Node.FS object. Make sure it's the right
# class and return.
p.must_be_same(fsclass)
return p
# str(p) in case it's something like a proxy object
p = str(p)
if not os_sep_is_slash:
p = p.replace(OS_SEP, '/')
if p[0:1] == '#':
# There was an initial '#', so we strip it and override
# whatever directory they may have specified with the
# top-level SConstruct directory.
p = p[1:]
directory = self.Top
# There might be a drive letter following the
# '#'. Although it is not described in the SCons man page,
# the regression test suite explicitly tests for that
# syntax. It seems to mean the following thing:
#
# Assuming the the SCons top dir is in C:/xxx/yyy,
# '#X:/toto' means X:/xxx/yyy/toto.
#
# i.e. it assumes that the X: drive has a directory
# structure similar to the one found on drive C:.
if do_splitdrive:
drive, p = _my_splitdrive(p)
if drive:
root = self.get_root(drive)
else:
root = directory.root
else:
root = directory.root
# We can only strip trailing after splitting the drive
# since the drive might the UNC '//' prefix.
p = p.strip('/')
needs_normpath = needs_normpath_match(p)
# The path is relative to the top-level SCons directory.
if p in ('', '.'):
p = directory.labspath
else:
p = directory.labspath + '/' + p
else:
if do_splitdrive:
drive, p = _my_splitdrive(p)
if drive and not p:
# This causes a naked drive letter to be treated
# as a synonym for the root directory on that
# drive.
p = '/'
else:
drive = ''
# We can only strip trailing '/' since the drive might the
# UNC '//' prefix.
if p != '/':
p = p.rstrip('/')
needs_normpath = needs_normpath_match(p)
if p[0:1] == '/':
# Absolute path
root = self.get_root(drive)
else:
# This is a relative lookup or to the current directory
# (the path name is not absolute). Add the string to the
# appropriate directory lookup path, after which the whole
# thing gets normalized.
if directory:
if not isinstance(directory, Dir):
directory = self.Dir(directory)
else:
directory = self._cwd
if p in ('', '.'):
p = directory.labspath
else:
p = directory.labspath + '/' + p
if drive:
root = self.get_root(drive)
else:
root = directory.root
if needs_normpath is not None:
# Normalize a pathname. Will return the same result for
# equivalent paths.
#
# We take advantage of the fact that we have an absolute
# path here for sure. In addition, we know that the
# components of lookup path are separated by slashes at
# this point. Because of this, this code is about 2X
# faster than calling os.path.normpath() followed by
# replacing os.sep with '/' again.
ins = p.split('/')[1:]
outs = []
for d in ins:
if d == '..':
try:
outs.pop()
except __HOLE__:
pass
elif d not in ('', '.'):
outs.append(d)
p = '/' + '/'.join(outs)
return root._lookup_abs(p, fsclass, create)
|
IndexError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FS._lookup
|
3,810 |
def __clearRepositoryCache(self, duplicate=None):
"""Called when we change the repository(ies) for a directory.
This clears any cached information that is invalidated by changing
the repository."""
for node in self.entries.values():
if node != self.dir:
if node != self and isinstance(node, Dir):
node.__clearRepositoryCache(duplicate)
else:
node.clear()
try:
del node._srcreps
except __HOLE__:
pass
if duplicate is not None:
node.duplicate=duplicate
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.__clearRepositoryCache
|
3,811 |
def get_all_rdirs(self):
try:
return list(self._memo['get_all_rdirs'])
except __HOLE__:
pass
result = [self]
fname = '.'
dir = self
while dir:
for rep in dir.getRepositories():
result.append(rep.Dir(fname))
if fname == '.':
fname = dir.name
else:
fname = dir.name + OS_SEP + fname
dir = dir.up()
self._memo['get_all_rdirs'] = list(result)
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.get_all_rdirs
|
3,812 |
def rel_path(self, other):
"""Return a path to "other" relative to this directory.
"""
# This complicated and expensive method, which constructs relative
# paths between arbitrary Node.FS objects, is no longer used
# by SCons itself. It was introduced to store dependency paths
# in .sconsign files relative to the target, but that ended up
# being significantly inefficient.
#
# We're continuing to support the method because some SConstruct
# files out there started using it when it was available, and
# we're all about backwards compatibility..
try:
memo_dict = self._memo['rel_path']
except __HOLE__:
memo_dict = {}
self._memo['rel_path'] = memo_dict
else:
try:
return memo_dict[other]
except KeyError:
pass
if self is other:
result = '.'
elif not other in self.path_elements:
try:
other_dir = other.get_dir()
except AttributeError:
result = str(other)
else:
if other_dir is None:
result = other.name
else:
dir_rel_path = self.rel_path(other_dir)
if dir_rel_path == '.':
result = other.name
else:
result = dir_rel_path + OS_SEP + other.name
else:
i = self.path_elements.index(other) + 1
path_elems = ['..'] * (len(self.path_elements) - i) \
+ [n.name for n in other.path_elements[i:]]
result = OS_SEP.join(path_elems)
memo_dict[other] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.rel_path
|
3,813 |
def _create(self):
"""Create this directory, silently and without worrying about
whether the builder is the default or not."""
listDirs = []
parent = self
while parent:
if parent.exists():
break
listDirs.append(parent)
p = parent.up()
if p is None:
# Don't use while: - else: for this condition because
# if so, then parent is None and has no .path attribute.
raise SCons.Errors.StopError(parent.path)
parent = p
listDirs.reverse()
for dirnode in listDirs:
try:
# Don't call dirnode.build(), call the base Node method
# directly because we definitely *must* create this
# directory. The dirnode.build() method will suppress
# the build if it's the default builder.
SCons.Node.Node.build(dirnode)
dirnode.get_executor().nullify()
# The build() action may or may not have actually
# created the directory, depending on whether the -n
# option was used or not. Delete the _exists and
# _rexists attributes so they can be reevaluated.
dirnode.clear()
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir._create
|
3,814 |
def rdir(self):
if not self.exists():
norm_name = _my_normcase(self.name)
for dir in self.dir.get_all_rdirs():
try: node = dir.entries[norm_name]
except __HOLE__: node = dir.dir_on_disk(self.name)
if node and node.exists() and \
(isinstance(dir, Dir) or isinstance(dir, Entry)):
return node
return self
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.rdir
|
3,815 |
def entry_exists_on_disk(self, name):
try:
d = self.on_disk_entries
except AttributeError:
d = {}
try:
entries = os.listdir(self.abspath)
except __HOLE__:
pass
else:
for entry in map(_my_normcase, entries):
d[entry] = True
self.on_disk_entries = d
if sys.platform == 'win32' or sys.platform == 'cygwin':
name = _my_normcase(name)
result = d.get(name)
if result is None:
# Belt-and-suspenders for Windows: check directly for
# 8.3 file names that don't show up in os.listdir().
result = os.path.exists(self.abspath + OS_SEP + name)
d[name] = result
return result
else:
return name in d
|
OSError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.entry_exists_on_disk
|
3,816 |
def srcdir_list(self):
try:
return self._memo['srcdir_list']
except __HOLE__:
pass
result = []
dirname = '.'
dir = self
while dir:
if dir.srcdir:
result.append(dir.srcdir.Dir(dirname))
dirname = dir.name + OS_SEP + dirname
dir = dir.up()
self._memo['srcdir_list'] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.srcdir_list
|
3,817 |
def srcdir_find_file(self, filename):
try:
memo_dict = self._memo['srcdir_find_file']
except KeyError:
memo_dict = {}
self._memo['srcdir_find_file'] = memo_dict
else:
try:
return memo_dict[filename]
except KeyError:
pass
def func(node):
if (isinstance(node, File) or isinstance(node, Entry)) and \
(node.is_derived() or node.exists()):
return node
return None
norm_name = _my_normcase(filename)
for rdir in self.get_all_rdirs():
try: node = rdir.entries[norm_name]
except KeyError: node = rdir.file_on_disk(filename)
else: node = func(node)
if node:
result = (node, self)
memo_dict[filename] = result
return result
for srcdir in self.srcdir_list():
for rdir in srcdir.get_all_rdirs():
try: node = rdir.entries[norm_name]
except __HOLE__: node = rdir.file_on_disk(filename)
else: node = func(node)
if node:
result = (File(filename, self, self.fs), srcdir)
memo_dict[filename] = result
return result
result = (None, None)
memo_dict[filename] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.srcdir_find_file
|
3,818 |
def dir_on_disk(self, name):
if self.entry_exists_on_disk(name):
try: return self.Dir(name)
except __HOLE__: pass
node = self.srcdir_duplicate(name)
if isinstance(node, File):
return None
return node
|
TypeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.dir_on_disk
|
3,819 |
def file_on_disk(self, name):
if self.entry_exists_on_disk(name) or \
diskcheck_rcs(self, name) or \
diskcheck_sccs(self, name):
try: return self.File(name)
except __HOLE__: pass
node = self.srcdir_duplicate(name)
if isinstance(node, Dir):
return None
return node
|
TypeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/Dir.file_on_disk
|
3,820 |
def _lookup_abs(self, p, klass, create=1):
"""
Fast (?) lookup of a *normalized* absolute path.
This method is intended for use by internal lookups with
already-normalized path data. For general-purpose lookups,
use the FS.Entry(), FS.Dir() or FS.File() methods.
The caller is responsible for making sure we're passed a
normalized absolute path; we merely let Python's dictionary look
up and return the One True Node.FS object for the path.
If a Node for the specified "p" doesn't already exist, and
"create" is specified, the Node may be created after recursive
invocation to find or create the parent directory or directories.
"""
k = _my_normcase(p)
try:
result = self._lookupDict[k]
except __HOLE__:
if not create:
msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self))
raise SCons.Errors.UserError(msg)
# There is no Node for this path name, and we're allowed
# to create it.
# (note: would like to use p.rsplit('/',1) here but
# that's not in python 2.3)
# e.g.: dir_name, file_name = p.rsplit('/',1)
last_slash = p.rindex('/')
if (last_slash >= 0):
dir_name = p[:last_slash]
file_name = p[last_slash+1:]
else:
dir_name = p # shouldn't happen, just in case
file_name = ''
dir_node = self._lookup_abs(dir_name, Dir)
result = klass(file_name, dir_node, self.fs)
# Double-check on disk (as configured) that the Node we
# created matches whatever is out there in the real world.
result.diskcheck_match()
self._lookupDict[k] = result
dir_node.entries[_my_normcase(file_name)] = result
dir_node.implicit = None
else:
# There is already a Node for this path name. Allow it to
# complain if we were looking for an inappropriate type.
result.must_be_same(klass)
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/RootDir._lookup_abs
|
3,821 |
def convert_to_sconsign(self):
"""
Converts this FileBuildInfo object for writing to a .sconsign file
This replaces each Node in our various dependency lists with its
usual string representation: relative to the top-level SConstruct
directory, or an absolute path if it's outside.
"""
if os_sep_is_slash:
node_to_str = str
else:
def node_to_str(n):
try:
s = n.path
except __HOLE__:
s = str(n)
else:
s = s.replace(OS_SEP, '/')
return s
for attr in ['bsources', 'bdepends', 'bimplicit']:
try:
val = getattr(self, attr)
except AttributeError:
pass
else:
setattr(self, attr, list(map(node_to_str, val)))
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FileBuildInfo.convert_to_sconsign
|
3,822 |
def prepare_dependencies(self):
"""
Prepares a FileBuildInfo object for explaining what changed
The bsources, bdepends and bimplicit lists have all been
stored on disk as paths relative to the top-level SConstruct
directory. Convert the strings to actual Nodes (for use by the
--debug=explain code and --implicit-cache).
"""
attrs = [
('bsources', 'bsourcesigs'),
('bdepends', 'bdependsigs'),
('bimplicit', 'bimplicitsigs'),
]
for (nattr, sattr) in attrs:
try:
strings = getattr(self, nattr)
nodeinfos = getattr(self, sattr)
except __HOLE__:
continue
nodes = []
for s, ni in zip(strings, nodeinfos):
if not isinstance(s, SCons.Node.Node):
s = ni.str_to_node(s)
nodes.append(s)
setattr(self, nattr, nodes)
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FileBuildInfo.prepare_dependencies
|
3,823 |
def get_size(self):
try:
return self._memo['get_size']
except __HOLE__:
pass
if self.rexists():
size = self.rfile().getsize()
else:
size = 0
self._memo['get_size'] = size
return size
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_size
|
3,824 |
def get_timestamp(self):
try:
return self._memo['get_timestamp']
except __HOLE__:
pass
if self.rexists():
timestamp = self.rfile().getmtime()
else:
timestamp = 0
self._memo['get_timestamp'] = timestamp
return timestamp
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_timestamp
|
3,825 |
def convert_old_entry(self, old_entry):
# Convert a .sconsign entry from before the Big Signature
# Refactoring, doing what we can to convert its information
# to the new .sconsign entry format.
#
# The old format looked essentially like this:
#
# BuildInfo
# .ninfo (NodeInfo)
# .bsig
# .csig
# .timestamp
# .size
# .bsources
# .bsourcesigs ("signature" list)
# .bdepends
# .bdependsigs ("signature" list)
# .bimplicit
# .bimplicitsigs ("signature" list)
# .bact
# .bactsig
#
# The new format looks like this:
#
# .ninfo (NodeInfo)
# .bsig
# .csig
# .timestamp
# .size
# .binfo (BuildInfo)
# .bsources
# .bsourcesigs (NodeInfo list)
# .bsig
# .csig
# .timestamp
# .size
# .bdepends
# .bdependsigs (NodeInfo list)
# .bsig
# .csig
# .timestamp
# .size
# .bimplicit
# .bimplicitsigs (NodeInfo list)
# .bsig
# .csig
# .timestamp
# .size
# .bact
# .bactsig
#
# The basic idea of the new structure is that a NodeInfo always
# holds all available information about the state of a given Node
# at a certain point in time. The various .b*sigs lists can just
# be a list of pointers to the .ninfo attributes of the different
# dependent nodes, without any copying of information until it's
# time to pickle it for writing out to a .sconsign file.
#
# The complicating issue is that the *old* format only stored one
# "signature" per dependency, based on however the *last* build
# was configured. We don't know from just looking at it whether
# it was a build signature, a content signature, or a timestamp
# "signature". Since we no longer use build signatures, the
# best we can do is look at the length and if it's thirty two,
# assume that it was (or might have been) a content signature.
# If it was actually a build signature, then it will cause a
# rebuild anyway when it doesn't match the new content signature,
# but that's probably the best we can do.
import SCons.SConsign
new_entry = SCons.SConsign.SConsignEntry()
new_entry.binfo = self.new_binfo()
binfo = new_entry.binfo
for attr in self.convert_copy_attrs:
try:
value = getattr(old_entry, attr)
except __HOLE__:
continue
setattr(binfo, attr, value)
delattr(old_entry, attr)
for attr in self.convert_sig_attrs:
try:
sig_list = getattr(old_entry, attr)
except AttributeError:
continue
value = []
for sig in sig_list:
ninfo = self.new_ninfo()
if len(sig) == 32:
ninfo.csig = sig
else:
ninfo.timestamp = sig
value.append(ninfo)
setattr(binfo, attr, value)
delattr(old_entry, attr)
return new_entry
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.convert_old_entry
|
3,826 |
def get_stored_info(self):
try:
return self._memo['get_stored_info']
except __HOLE__:
pass
try:
sconsign_entry = self.dir.sconsign().get_entry(self.name)
except (KeyError, EnvironmentError):
import SCons.SConsign
sconsign_entry = SCons.SConsign.SConsignEntry()
sconsign_entry.binfo = self.new_binfo()
sconsign_entry.ninfo = self.new_ninfo()
else:
if isinstance(sconsign_entry, FileBuildInfo):
# This is a .sconsign file from before the Big Signature
# Refactoring; convert it as best we can.
sconsign_entry = self.convert_old_entry(sconsign_entry)
try:
delattr(sconsign_entry.ninfo, 'bsig')
except AttributeError:
pass
self._memo['get_stored_info'] = sconsign_entry
return sconsign_entry
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_stored_info
|
3,827 |
def get_stored_implicit(self):
binfo = self.get_stored_info().binfo
binfo.prepare_dependencies()
try: return binfo.bimplicit
except __HOLE__: return None
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_stored_implicit
|
3,828 |
def get_found_includes(self, env, scanner, path):
"""Return the included implicit dependencies in this file.
Cache results so we only scan the file once per path
regardless of how many times this information is requested.
"""
memo_key = (id(env), id(scanner), path)
try:
memo_dict = self._memo['get_found_includes']
except KeyError:
memo_dict = {}
self._memo['get_found_includes'] = memo_dict
else:
try:
return memo_dict[memo_key]
except __HOLE__:
pass
if scanner:
# result = [n.disambiguate() for n in scanner(self, env, path)]
result = scanner(self, env, path)
result = [N.disambiguate() for N in result]
else:
result = []
memo_dict[memo_key] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_found_includes
|
3,829 |
def find_src_builder(self):
if self.rexists():
return None
scb = self.dir.src_builder()
if scb is _null:
if diskcheck_sccs(self.dir, self.name):
scb = get_DefaultSCCSBuilder()
elif diskcheck_rcs(self.dir, self.name):
scb = get_DefaultRCSBuilder()
else:
scb = None
if scb is not None:
try:
b = self.builder
except __HOLE__:
b = None
if b is None:
self.builder_set(scb)
return scb
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.find_src_builder
|
3,830 |
def has_src_builder(self):
"""Return whether this Node has a source builder or not.
If this Node doesn't have an explicit source code builder, this
is where we figure out, on the fly, if there's a transparent
source code builder for it.
Note that if we found a source builder, we also set the
self.builder attribute, so that all of the methods that actually
*build* this file don't have to do anything different.
"""
try:
scb = self.sbuilder
except __HOLE__:
scb = self.sbuilder = self.find_src_builder()
return scb is not None
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.has_src_builder
|
3,831 |
def exists(self):
try:
return self._memo['exists']
except __HOLE__:
pass
# Duplicate from source path if we are set up to do this.
if self.duplicate and not self.is_derived() and not self.linked:
src = self.srcnode()
if src is not self:
# At this point, src is meant to be copied in a variant directory.
src = src.rfile()
if src.abspath != self.abspath:
if src.exists():
self.do_duplicate(src)
# Can't return 1 here because the duplication might
# not actually occur if the -n option is being used.
else:
# The source file does not exist. Make sure no old
# copy remains in the variant directory.
if print_duplicate:
print "dup: no src for %s, unlinking old variant copy"%self
if Base.exists(self) or self.islink():
self.fs.unlink(self.path)
# Return None explicitly because the Base.exists() call
# above will have cached its value if the file existed.
self._memo['exists'] = None
return None
result = Base.exists(self)
self._memo['exists'] = result
return result
#
# SIGNATURE SUBSYSTEM
#
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.exists
|
3,832 |
def get_max_drift_csig(self):
"""
Returns the content signature currently stored for this node
if it's been unmodified longer than the max_drift value, or the
max_drift value is 0. Returns None otherwise.
"""
old = self.get_stored_info()
mtime = self.get_timestamp()
max_drift = self.fs.max_drift
if max_drift > 0:
if (time.time() - mtime) > max_drift:
try:
n = old.ninfo
if n.timestamp and n.csig and n.timestamp == mtime:
return n.csig
except __HOLE__:
pass
elif max_drift == 0:
try:
return old.ninfo.csig
except AttributeError:
pass
return None
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_max_drift_csig
|
3,833 |
def get_csig(self):
"""
Generate a node's content signature, the digested signature
of its content.
node - the node
cache - alternate node to use for the signature cache
returns - the content signature
"""
ninfo = self.get_ninfo()
try:
return ninfo.csig
except __HOLE__:
pass
csig = self.get_max_drift_csig()
if csig is None:
try:
if self.get_size() < SCons.Node.FS.File.md5_chunksize:
contents = self.get_contents()
else:
csig = self.get_content_hash()
except IOError:
# This can happen if there's actually a directory on-disk,
# which can be the case if they've disabled disk checks,
# or if an action with a File target actually happens to
# create a same-named directory by mistake.
csig = ''
else:
if not csig:
csig = SCons.Util.MD5signature(contents)
ninfo.csig = csig
return csig
#
# DECISION SUBSYSTEM
#
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_csig
|
3,834 |
def changed(self, node=None, allowcache=False):
"""
Returns if the node is up-to-date with respect to the BuildInfo
stored last time it was built.
For File nodes this is basically a wrapper around Node.changed(),
but we allow the return value to get cached after the reference
to the Executor got released in release_target_info().
@see: Node.changed()
"""
if node is None:
try:
return self._memo['changed']
except __HOLE__:
pass
has_changed = SCons.Node.Node.changed(self, node)
if allowcache:
self._memo['changed'] = has_changed
return has_changed
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.changed
|
3,835 |
def changed_content(self, target, prev_ni):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except __HOLE__:
return 1
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.changed_content
|
3,836 |
def changed_timestamp_then_content(self, target, prev_ni):
if not self.changed_timestamp_match(target, prev_ni):
try:
self.get_ninfo().csig = prev_ni.csig
except __HOLE__:
pass
return False
return self.changed_content(target, prev_ni)
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.changed_timestamp_then_content
|
3,837 |
def changed_timestamp_newer(self, target, prev_ni):
try:
return self.get_timestamp() > target.get_timestamp()
except __HOLE__:
return 1
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.changed_timestamp_newer
|
3,838 |
def changed_timestamp_match(self, target, prev_ni):
try:
return self.get_timestamp() != prev_ni.timestamp
except __HOLE__:
return 1
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.changed_timestamp_match
|
3,839 |
def rfile(self):
try:
return self._memo['rfile']
except KeyError:
pass
result = self
if not self.exists():
norm_name = _my_normcase(self.name)
for dir in self.dir.get_all_rdirs():
try: node = dir.entries[norm_name]
except __HOLE__: node = dir.file_on_disk(self.name)
if node and node.exists() and \
(isinstance(node, File) or isinstance(node, Entry) \
or not node.is_derived()):
result = node
# Copy over our local attributes to the repository
# Node so we identify shared object files in the
# repository and don't assume they're static.
#
# This isn't perfect; the attribute would ideally
# be attached to the object in the repository in
# case it was built statically in the repository
# and we changed it to shared locally, but that's
# rarely the case and would only occur if you
# intentionally used the same suffix for both
# shared and static objects anyway. So this
# should work well in practice.
result.attributes = self.attributes
break
self._memo['rfile'] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.rfile
|
3,840 |
def get_cachedir_csig(self):
"""
Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
them from the cache, but the normal get_csig() method will try
to open up the local file, which doesn't exist because the -n
option meant we didn't actually pull the file from cachedir.
But since the file *does* actually exist in the cachedir, we
can use its contents for the csig.
"""
try:
return self.cachedir_csig
except __HOLE__:
pass
cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
if not self.exists() and cachefile and os.path.exists(cachefile):
self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
SCons.Node.FS.File.md5_chunksize * 1024)
else:
self.cachedir_csig = self.get_csig()
return self.cachedir_csig
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_cachedir_csig
|
3,841 |
def get_contents_sig(self):
"""
A helper method for get_cachedir_bsig.
It computes and returns the signature for this
node's contents.
"""
try:
return self.contentsig
except __HOLE__:
pass
executor = self.get_executor()
result = self.contentsig = SCons.Util.MD5signature(executor.get_contents())
return result
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_contents_sig
|
3,842 |
def get_cachedir_bsig(self):
"""
Return the signature for a cached file, including
its children.
It adds the path of the cached file to the cache signature,
because multiple targets built by the same action will all
have the same build signature, and we have to differentiate
them somehow.
"""
try:
return self.cachesig
except __HOLE__:
pass
# Collect signatures for all children
children = self.children()
sigs = [n.get_cachedir_csig() for n in children]
# Append this node's signature...
sigs.append(self.get_contents_sig())
# ...and it's path
sigs.append(self.path)
# Merge this all into a single signature
result = self.cachesig = SCons.Util.MD5collect(sigs)
return result
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/File.get_cachedir_bsig
|
3,843 |
def filedir_lookup(self, p, fd=None):
"""
A helper method for find_file() that looks up a directory for
a file we're trying to find. This only creates the Dir Node if
it exists on-disk, since if the directory doesn't exist we know
we won't find any files in it... :-)
It would be more compact to just use this as a nested function
with a default keyword argument (see the commented-out version
below), but that doesn't work unless you have nested scopes,
so we define it here just so this work under Python 1.5.2.
"""
if fd is None:
fd = self.default_filedir
dir, name = os.path.split(fd)
drive, d = _my_splitdrive(dir)
if not name and d[:1] in ('/', OS_SEP):
#return p.fs.get_root(drive).dir_on_disk(name)
return p.fs.get_root(drive)
if dir:
p = self.filedir_lookup(p, dir)
if not p:
return None
norm_name = _my_normcase(name)
try:
node = p.entries[norm_name]
except __HOLE__:
return p.dir_on_disk(name)
if isinstance(node, Dir):
return node
if isinstance(node, Entry):
node.must_be_same(Dir)
return node
return None
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FileFinder.filedir_lookup
|
3,844 |
def find_file(self, filename, paths, verbose=None):
"""
find_file(str, [Dir()]) -> [nodes]
filename - a filename to find
paths - a list of directory path *nodes* to search in. Can be
represented as a list, a tuple, or a callable that is
called with no arguments and returns the list or tuple.
returns - the node created from the found file.
Find a node corresponding to either a derived file or a file
that exists already.
Only the first file found is returned, and none is returned
if no file is found.
"""
memo_key = self._find_file_key(filename, paths)
try:
memo_dict = self._memo['find_file']
except __HOLE__:
memo_dict = {}
self._memo['find_file'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
if verbose and not callable(verbose):
if not SCons.Util.is_String(verbose):
verbose = "find_file"
_verbose = u' %s: ' % verbose
verbose = lambda s: sys.stdout.write(_verbose + s)
filedir, filename = os.path.split(filename)
if filedir:
# More compact code that we can't use until we drop
# support for Python 1.5.2:
#
#def filedir_lookup(p, fd=filedir):
# """
# A helper function that looks up a directory for a file
# we're trying to find. This only creates the Dir Node
# if it exists on-disk, since if the directory doesn't
# exist we know we won't find any files in it... :-)
# """
# dir, name = os.path.split(fd)
# if dir:
# p = filedir_lookup(p, dir)
# if not p:
# return None
# norm_name = _my_normcase(name)
# try:
# node = p.entries[norm_name]
# except KeyError:
# return p.dir_on_disk(name)
# if isinstance(node, Dir):
# return node
# if isinstance(node, Entry):
# node.must_be_same(Dir)
# return node
# if isinstance(node, Dir) or isinstance(node, Entry):
# return node
# return None
#paths = [_f for _f in map(filedir_lookup, paths) if _f]
self.default_filedir = filedir
paths = [_f for _f in map(self.filedir_lookup, paths) if _f]
result = None
for dir in paths:
if verbose:
verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
node, d = dir.srcdir_find_file(filename)
if node:
if verbose:
verbose("... FOUND '%s' in '%s'\n" % (filename, d))
result = node
break
memo_dict[memo_key] = result
return result
|
KeyError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/FileFinder.find_file
|
3,845 |
def invalidate_node_memos(targets):
"""
Invalidate the memoized values of all Nodes (files or directories)
that are associated with the given entries. Has been added to
clear the cache of nodes affected by a direct execution of an
action (e.g. Delete/Copy/Chmod). Existing Node caches become
inconsistent if the action is run through Execute(). The argument
`targets` can be a single Node object or filename, or a sequence
of Nodes/filenames.
"""
from traceback import extract_stack
# First check if the cache really needs to be flushed. Only
# actions run in the SConscript with Execute() seem to be
# affected. XXX The way to check if Execute() is in the stacktrace
# is a very dirty hack and should be replaced by a more sensible
# solution.
for f in extract_stack():
if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
break
else:
# Dont have to invalidate, so return
return
if not SCons.Util.is_List(targets):
targets = [targets]
for entry in targets:
# If the target is a Node object, clear the cache. If it is a
# filename, look up potentially existing Node object first.
try:
entry.clear_memoized_values()
except __HOLE__:
# Not a Node object, try to look up Node by filename. XXX
# This creates Node objects even for those filenames which
# do not correspond to an existing Node object.
node = get_default_fs().Entry(entry)
if node:
node.clear_memoized_values()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
AttributeError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Node/FS.py/invalidate_node_memos
|
3,846 |
def render(self, request):
actual_render = getattr(self, 'render_%s' % request.method)
try:
return actual_render(request)
except __HOLE__ as e:
typical_message = 'No JSON object could be decoded'
if e.message == typical_message:
request.setResponseCode(BAD_REQUEST, message='CDMI Request body is malformed')
return ''
else:
raise e
|
ValueError
|
dataset/ETHPy150Open livenson/vcdm/src/vcdm/server/cdmi/cdmiresource.py/StorageResource.render
|
3,847 |
def configure_logger(loglevel=2, quiet=False, logdir=None):
"Creates the logger instance and adds handlers plus formatting."
logger = logging.getLogger()
# Set the loglevel.
if loglevel > 3:
loglevel = 3 # Cap at 3 to avoid index errors.
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
logger.setLevel(levels[loglevel])
logformat = "%(asctime)-14s %(levelname)-8s %(name)-8s %(message)s"
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
if not quiet:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logger.debug("Added logging console handler.")
logger.info("Loglevel is {}.".format(levels[loglevel]))
if logdir:
try:
logfile = os.path.join(logdir, "demimove.log")
file_handler = logging.FileHandler(logfile)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.debug("Added logging file handler: {}.".format(logfile))
except __HOLE__:
logger.error("Could not attach file handler.")
|
IOError
|
dataset/ETHPy150Open mikar/demimove/demimove/helpers.py/configure_logger
|
3,848 |
def splitpath(path):
try:
match = splitrx.match(path).groups()
if match[-1] is None:
match = match[:-1] + ("",)
return match
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open mikar/demimove/demimove/helpers.py/splitpath
|
3,849 |
def _get_system_username():
"""
Obtain name of current system user, which will be default connection user.
"""
import getpass
username = None
try:
username = getpass.getuser()
# getpass.getuser supported on both Unix and Windows systems.
# getpass.getuser may call pwd.getpwuid which in turns may raise KeyError
# if it cannot find a username for the given UID, e.g. on ep.io
# and similar "non VPS" style services. Rather than error out, just keep
# the 'default' username to None. Can check for this value later if needed.
except __HOLE__:
pass
except ImportError:
if win32:
import win32api
import win32security
import win32profile
username = win32api.GetUserName()
return username
|
KeyError
|
dataset/ETHPy150Open fabric/fabric/fabric/state.py/_get_system_username
|
3,850 |
def _update_content(sender, instance, created=None, **kwargs):
"""
Re-save any content models referencing the just-modified
``FileUpload``.
We don't do anything special to the content model, we just re-save
it. If signals are in use, we assume that the content model has
incorporated ``render_uploads`` into some kind of rendering that
happens automatically at save-time.
"""
if created: # a brand new FileUpload won't be referenced
return
for ref in FileUploadReference.objects.filter(upload=instance):
try:
obj = ref.content_object
if obj:
obj.save()
except __HOLE__:
pass
|
AttributeError
|
dataset/ETHPy150Open carljm/django-adminfiles/adminfiles/listeners.py/_update_content
|
3,851 |
def isint(int_str):
try:
int(int_str)
return True
except (TypeError, __HOLE__):
return False
|
ValueError
|
dataset/ETHPy150Open closeio/flask-mongorest/flask_mongorest/utils.py/isint
|
3,852 |
def worker_input_generator(self):
'''Call this on the worker processes: yields input.'''
while True:
try:
x = self.input_queue.get(timeout=_IDLE_TIMEOUT)
if x is None:
return
if self.input_index is not None:
vimap.exception_handling.print_warning(
"Didn't produce an output for input!",
input_index=self.input_index)
self.input_index, z = x
self.debug("Got input #{0}", self.input_index)
yield z
except multiprocessing.queues.Empty:
# print("Waiting")
pass
except __HOLE__:
print(
"Worker error getting item from input queue",
file=sys.stderr)
raise
|
IOError
|
dataset/ETHPy150Open gatoatigrado/vimap/vimap/real_worker_routine.py/WorkerRoutine.worker_input_generator
|
3,853 |
def safe_close_queue(self, name, queue):
self.debug("Closing queue {0}", name)
queue.close()
try:
self.debug("Joining thread for queue {0}", name)
try:
self.debug(
"Joining queue {name} (size {size}, full: {full})",
name=name,
size=queue.qsize(),
full=queue.full())
except NotImplementedError:
pass # Mac OS X doesn't implement qsize()
queue.join_thread()
# threads might have already been closed
except __HOLE__ as e:
self.debug("Couldn't join queue {0}; error {1}", name, e)
else:
self.debug("Done closing {0}, no exceptions.", name)
|
AssertionError
|
dataset/ETHPy150Open gatoatigrado/vimap/vimap/real_worker_routine.py/WorkerRoutine.safe_close_queue
|
3,854 |
def run(self, input_queue, output_queue):
'''
Takes ordered items from input_queue, lets `fcn` iterate over
those, and puts items yielded by `fcn` onto the output queue,
with their IDs.
'''
self.input_queue, self.output_queue = input_queue, output_queue
self.input_index = None
self.debug("starting; PID {0}", os.getpid())
try:
fcn_iter = self.fcn(
self.worker_input_generator(),
*self.init_args,
**self.init_kwargs
)
try:
iter(fcn_iter)
except __HOLE__:
vimap.exception_handling.print_warning(
"Your worker function must yield values for inputs it consumes!",
fcn_return_value=fcn_iter)
assert False
for output in fcn_iter:
self.handle_output(output)
except Exception:
ec = vimap.exception_handling.ExceptionContext.current()
self.debug('{0}', ec.formatted_traceback)
self.output_queue.put((self.input_index, 'exception', ec))
self.explicitly_close_queues()
self.debug("exiting")
|
TypeError
|
dataset/ETHPy150Open gatoatigrado/vimap/vimap/real_worker_routine.py/WorkerRoutine.run
|
3,855 |
def convert(self, value):
if value is None:
return None
try:
# Try to return the URL if it's a ``File``, falling back to the string
# itself if it's been overridden or is a default.
return getattr(value, 'url', value)
except __HOLE__:
return None
|
ValueError
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/FileField.convert
|
3,856 |
def hydrate(self, bundle):
value = super(DateField, self).hydrate(bundle)
if value and not hasattr(value, 'year'):
try:
# Try to rip a date/datetime out of it.
value = make_aware(parse(value))
if hasattr(value, 'hour'):
value = value.date()
except __HOLE__:
pass
return value
|
ValueError
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/DateField.hydrate
|
3,857 |
def hydrate(self, bundle):
value = super(DateTimeField, self).hydrate(bundle)
if value and not hasattr(value, 'year'):
try:
# Try to rip a date/datetime out of it.
value = make_aware(parse(value))
except __HOLE__:
pass
return value
|
ValueError
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/DateTimeField.hydrate
|
3,858 |
def resource_from_uri(self, fk_resource, uri, request=None, related_obj=None, related_name=None):
"""
Given a URI is provided, the related resource is attempted to be
loaded based on the identifiers in the URI.
"""
try:
obj = fk_resource.get_via_uri(uri, request=request)
bundle = fk_resource.build_bundle(obj=obj, request=request)
return fk_resource.full_dehydrate(bundle)
except __HOLE__:
raise ApiFieldError("Could not find the provided object via resource URI '%s'." % uri)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/RelatedField.resource_from_uri
|
3,859 |
def dehydrate(self, bundle):
foreign_obj = None
if isinstance(self.attribute, basestring):
attrs = self.attribute.split('__')
foreign_obj = bundle.obj
for attr in attrs:
previous_obj = foreign_obj
try:
foreign_obj = getattr(foreign_obj, attr, None)
except __HOLE__:
foreign_obj = None
elif callable(self.attribute):
foreign_obj = self.attribute(bundle)
if not foreign_obj:
if not self.null:
raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attr))
return None
self.fk_resource = self.get_related_resource(foreign_obj)
fk_bundle = Bundle(obj=foreign_obj, request=bundle.request)
return self.dehydrate_related(fk_bundle, self.fk_resource)
|
ObjectDoesNotExist
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/ToOneField.dehydrate
|
3,860 |
def dehydrate(self, bundle):
if not bundle.obj or not bundle.obj.pk:
if not self.null:
raise ApiFieldError("The model '%r' does not have a primary key and can not be used in a ToMany context." % bundle.obj)
return []
the_m2ms = None
previous_obj = bundle.obj
attr = self.attribute
if isinstance(self.attribute, basestring):
attrs = self.attribute.split('__')
the_m2ms = bundle.obj
for attr in attrs:
previous_obj = the_m2ms
try:
the_m2ms = getattr(the_m2ms, attr, None)
except __HOLE__:
the_m2ms = None
if not the_m2ms:
break
elif callable(self.attribute):
the_m2ms = self.attribute(bundle)
if not the_m2ms:
if not self.null:
raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attr))
return []
self.m2m_resources = []
m2m_dehydrated = []
# TODO: Also model-specific and leaky. Relies on there being a
# ``Manager`` there.
for m2m in the_m2ms.all():
m2m_resource = self.get_related_resource(m2m)
m2m_bundle = Bundle(obj=m2m, request=bundle.request)
self.m2m_resources.append(m2m_resource)
m2m_dehydrated.append(self.dehydrate_related(m2m_bundle, m2m_resource))
return m2m_dehydrated
|
ObjectDoesNotExist
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/ToManyField.dehydrate
|
3,861 |
def to_time(self, s):
try:
dt = parse(s)
except __HOLE__, e:
raise ApiFieldError(str(e))
else:
return datetime.time(dt.hour, dt.minute, dt.second)
|
ValueError
|
dataset/ETHPy150Open hzlf/openbroadcast/website/apps/tastypie__/fields.py/TimeField.to_time
|
3,862 |
def label_for_value(self, value):
#key = self.rel.get_related_field().name
if isinstance(value, DBRef):
value = value.id
try:
obj = self.rel.to.objects().get(**{'pk': value})
return ' <strong>%s</strong>' % escape(Truncator(obj).words(14, truncate='...'))
except (__HOLE__, self.rel.to.DoesNotExist):
return ''
|
ValueError
|
dataset/ETHPy150Open jschrewe/django-mongoadmin/mongoadmin/widgets.py/ReferenceRawIdWidget.label_for_value
|
3,863 |
def __cmp__(self, other):
if isinstance(other,SessionId):
return cmp(self.id,other.id)
else:
try:
return cmp(hash(self.id),hash(other))
except __HOLE__:
return 1
|
TypeError
|
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/sessions/session_id.py/SessionId.__cmp__
|
3,864 |
@analytics_task()
def track_confirmed_account_on_hubspot(webuser):
vid = _get_user_hubspot_id(webuser)
if vid:
# Only track the property if the contact already exists.
try:
domain = webuser.domain_memberships[0].domain
except (__HOLE__, AttributeError):
domain = ''
_track_on_hubspot(webuser, {
'confirmed_account': True,
'domain': domain
})
|
IndexError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/analytics/tasks.py/track_confirmed_account_on_hubspot
|
3,865 |
def __getitem__(self, key):
try:
return self.__getattr__(key)
except __HOLE__:
raise KeyError
|
AttributeError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/api/resource.py/ResourceDictField.__getitem__
|
3,866 |
def __getitem__(self, key):
try:
return self.__getattr__(key)
except __HOLE__:
raise KeyError
|
AttributeError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/api/resource.py/ItemResource.__getitem__
|
3,867 |
def __getitem__(self, key):
payload = self._item_list[key]
# TODO: Should try and guess the url based on the parent url,
# and the id number if the self link doesn't exist.
try:
url = payload['links']['self']['href']
except __HOLE__:
url = ''
# We need to import this here because of the mutual imports.
from rbtools.api.factory import create_resource
return create_resource(self._transport,
payload,
url,
mime_type=self._item_mime_type,
guess_token=False)
|
KeyError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/api/resource.py/ListResource.__getitem__
|
3,868 |
@request_method_decorator
def _get_template_request(self, url_template, values={}, **kwargs):
"""Generate an HttpRequest from a uri-template.
This will replace each '{variable}' in the template with the
value from kwargs['variable'], or if it does not exist, the
value from values['variable']. The resulting url is used to
create an HttpRequest.
"""
def get_template_value(m):
try:
return str(kwargs.pop(m.group('key'), None) or
values[m.group('key')])
except __HOLE__:
raise ValueError('Template was not provided a value for "%s"' %
m.group('key'))
url = self._TEMPLATE_PARAM_RE.sub(get_template_value, url_template)
return HttpRequest(url, query_args=kwargs)
|
KeyError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/api/resource.py/RootResource._get_template_request
|
3,869 |
def lock(file, flags):
try:
fcntl.flock(_getfd(file), flags)
except __HOLE__, exc_value:
# IOError: [Errno 11] Resource temporarily unavailable
if exc_value[0] == 11:
raise LockException(LockException.LOCK_FAILED, exc_value[1])
else:
raise
|
IOError
|
dataset/ETHPy150Open harryliu/edwin/edwinServer/site_packages/ConcurrentLogHandler084/portalocker.py/lock
|
3,870 |
def _get_from_cache(self, target_self, *args, **kwargs):
target_self_cls_name = reflection.get_class_name(target_self,
fully_qualified=False)
func_name = "%(module)s.%(class)s.%(func_name)s" % {
'module': target_self.__module__,
'class': target_self_cls_name,
'func_name': self.func.__name__,
}
key = (func_name,) + args
if kwargs:
key += dict2tuple(kwargs)
try:
item = target_self._cache.get(key, self._not_cached)
except __HOLE__:
LOG.debug("Method %(func_name)s cannot be cached due to "
"unhashable parameters: args: %(args)s, kwargs: "
"%(kwargs)s",
{'func_name': func_name,
'args': args,
'kwargs': kwargs})
return self.func(target_self, *args, **kwargs)
if item is self._not_cached:
item = self.func(target_self, *args, **kwargs)
target_self._cache.set(key, item, None)
return item
|
TypeError
|
dataset/ETHPy150Open openstack/neutron/neutron/common/utils.py/cache_method_results._get_from_cache
|
3,871 |
def ensure_dir(dir_path):
"""Ensure a directory with 755 permissions mode."""
try:
os.makedirs(dir_path, 0o755)
except __HOLE__ as e:
# If the directory already existed, don't raise the error.
if e.errno != errno.EEXIST:
raise
|
OSError
|
dataset/ETHPy150Open openstack/neutron/neutron/common/utils.py/ensure_dir
|
3,872 |
def cpu_count():
try:
return multiprocessing.cpu_count()
except __HOLE__:
return 1
|
NotImplementedError
|
dataset/ETHPy150Open openstack/neutron/neutron/common/utils.py/cpu_count
|
3,873 |
def load_class_by_alias_or_classname(namespace, name):
"""Load class using stevedore alias or the class name
:param namespace: namespace where the alias is defined
:param name: alias or class name of the class to be loaded
:returns class if calls can be loaded
:raises ImportError if class cannot be loaded
"""
if not name:
LOG.error(_LE("Alias or class name is not set"))
raise ImportError(_("Class not found."))
try:
# Try to resolve class by alias
mgr = driver.DriverManager(namespace, name)
class_to_load = mgr.driver
except RuntimeError:
e1_info = sys.exc_info()
# Fallback to class name
try:
class_to_load = importutils.import_class(name)
except (ImportError, __HOLE__):
LOG.error(_LE("Error loading class by alias"),
exc_info=e1_info)
LOG.error(_LE("Error loading class by class name"),
exc_info=True)
raise ImportError(_("Class not found."))
return class_to_load
|
ValueError
|
dataset/ETHPy150Open openstack/neutron/neutron/common/utils.py/load_class_by_alias_or_classname
|
3,874 |
def convertIconToPNG(app_name, destination_path, desired_size):
'''Converts an application icns file to a png file, choosing the
representation closest to (but >= than if possible) the desired_size.
Returns True if successful, False otherwise'''
app_path = os.path.join('/Applications', app_name + '.app')
if not os.path.exists(app_path):
return False
try:
info = FoundationPlist.readPlist(
os.path.join(app_path, 'Contents/Info.plist'))
except FoundationPlist.FoundationPlistException:
info = {}
try:
try:
icon_filename = info.get('CFBundleIconFile', app_name)
except __HOLE__:
icon_filename = app_name
icon_path = os.path.join(app_path, 'Contents/Resources', icon_filename)
if not os.path.splitext(icon_path)[1]:
# no file extension, so add '.icns'
icon_path += u'.icns'
if os.path.exists(icon_path):
image_data = NSData.dataWithContentsOfFile_(icon_path)
bitmap_reps = NSBitmapImageRep.imageRepsWithData_(image_data)
chosen_rep = None
for bitmap_rep in bitmap_reps:
if not chosen_rep:
chosen_rep = bitmap_rep
elif (bitmap_rep.pixelsHigh() >= desired_size
and bitmap_rep.pixelsHigh() < chosen_rep.pixelsHigh()):
chosen_rep = bitmap_rep
if chosen_rep:
png_data = chosen_rep.representationUsingType_properties_(
NSPNGFileType, None)
png_data.writeToFile_atomically_(destination_path, False)
return True
except Exception:
return False
return False
|
AttributeError
|
dataset/ETHPy150Open munki/munki/code/apps/Managed Software Center/Managed Software Center/MunkiItems.py/convertIconToPNG
|
3,875 |
def __getitem__(self, name):
'''Allow access to instance variables and methods via dictionary syntax.
This allows us to use class instances as a data source
for our HTML templates (which want a dictionary-like object)'''
try:
return super(GenericItem, self).__getitem__(name)
except KeyError, err:
try:
attr = getattr(self, name)
except __HOLE__:
raise KeyError(err)
if callable(attr):
return attr()
else:
return attr
|
AttributeError
|
dataset/ETHPy150Open munki/munki/code/apps/Managed Software Center/Managed Software Center/MunkiItems.py/GenericItem.__getitem__
|
3,876 |
def daemonize(self):
"""UNIX double-fork magic."""
try:
pid = os.fork()
if pid > 0:
# First parent; exit.
sys.exit(0)
except __HOLE__ as e:
sys.stderr.write('Could not fork! %d (%s)\n' %
(e.errno, e.strerror))
sys.exit(1)
# Disconnect from parent environment.
os.chdir('/')
os.setsid()
os.umask(0o022)
# Fork again.
try:
pid = os.fork()
if pid > 0:
# Second parent; exit.
sys.exit(0)
except OSError as e:
sys.stderr.write('Could not fork (2nd)! %d (%s)\n' %
(e.errno, e.strerror))
sys.exit(1)
# Redirect file descriptors.
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Write the pidfile.
atexit.register(self.delpid)
pid = str(os.getpid())
with open(self.pidfile, 'w+') as fp:
fp.write('%s\n' % pid)
|
OSError
|
dataset/ETHPy150Open sivy/pystatsd/pystatsd/daemon.py/Daemon.daemonize
|
3,877 |
def stop(self):
"""Stop the daemon."""
pid = None
if os.path.exists(self.pidfile):
with open(self.pidfile, 'r') as fp:
pid = int(fp.read().strip())
if not pid:
msg = 'pidfile (%s) does not exist. Daemon not running?\n'
sys.stderr.write(msg % self.pidfile)
return
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except __HOLE__ as e:
e = str(e)
if e.find('No such process') > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print(e)
sys.exit(1)
|
OSError
|
dataset/ETHPy150Open sivy/pystatsd/pystatsd/daemon.py/Daemon.stop
|
3,878 |
def get_result(self, rs):
etags = []
key = name = None
newsource = None
if rs is not None:
try:
while True:
key = rs.next(self.conn.timeout())
getLogger().debug("got key {} with etag: {}".format(
key.name, key.etag))
etags.append(key.etag)
except __HOLE__:
pass
except WorkerFailureError:
getLogger().debug("error getting result.", exc_info=True)
if not etags:
raise UploadEmptyError()
if hasattr(self.upload, 'complete_upload'):
try:
key = self.conn.complete_multipart(self.upload, etags)
except Exception as e:
getLogger().debug("error completing multipart", exc_info=True)
raise CloudProviderUploadError(e)
name = key.key_name
else:
name = key.name
uploaded = len(etags)
total = self.source.chunks
getLogger().debug("Uploaded {} out of {} chunks".format(
uploaded, total))
size = self.source.size
if uploaded < total:
for seq in xrange(uploaded, total):
make_progress({'part': seq, 'tx': 0})
skip = self.source.chunkstart(uploaded)
newsource = ChunkedFile(
self.source.path, skip=skip, chunk=self.source.chunk)
size = size - newsource.size
getLogger().debug("saving progress for {}".format(key))
save_progress(name, size)
return (key.etag, newsource)
|
StopIteration
|
dataset/ETHPy150Open longaccess/longaccess-client/lacli/pool.py/MPUpload.get_result
|
3,879 |
def _remove_finder(importer, finder):
"""Remove an existing finder from pkg_resources."""
existing_finder = _get_finder(importer)
if not existing_finder:
return
if isinstance(existing_finder, ChainedFinder):
try:
existing_finder.finders.remove(finder)
except __HOLE__:
return
if len(existing_finder.finders) == 1:
pkg_resources.register_finder(importer, existing_finder.finders[0])
elif len(existing_finder.finders) == 0:
pkg_resources.register_finder(importer, pkg_resources.find_nothing)
else:
pkg_resources.register_finder(importer, pkg_resources.find_nothing)
|
ValueError
|
dataset/ETHPy150Open pantsbuild/pex/pex/finders.py/_remove_finder
|
3,880 |
def remove(self, event):
"""remove(event)
Removes the event from the event queue. The event is an Event object as
returned by the add or getevents methods."""
left = self.alarm(0)
try:
i = self.eventq.index(event)
except __HOLE__:
pass
else:
if i == 0: # currently scheduled event time
ev = self.eventq.pop(0)
self._adjust(ev.delay-left)
self._runq()
if self.eventq:
self.alarm(self.eventq[0].delay)
else:
del self.eventq[i]
self.alarm(left)
|
ValueError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/scheduler.py/Scheduler.remove
|
3,881 |
def get_scheduler():
global scheduler
try:
return scheduler
except __HOLE__:
scheduler = Scheduler()
return scheduler
|
NameError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/scheduler.py/get_scheduler
|
3,882 |
def process_request(self, req):
path = req.args['path']
rm = RepositoryManager(self.env)
reponame, repos, path = rm.get_repository_by_path(path)
if repos is None or path != '/':
msg = u'No such repository (%s)\n' % path
self.log.warning(msg.rstrip('\n'))
req.send(msg.encode('utf-8'), 'text/plain', 400)
if req.method != 'POST':
msg = u'Endpoint is ready to accept GitHub notifications.\n'
self.log.warning(u'Method not allowed (%s)' % req.method)
req.send(msg.encode('utf-8'), 'text/plain', 405)
event = req.get_header('X-GitHub-Event')
if event == 'ping':
payload = json.loads(req.read())
req.send(payload['zen'].encode('utf-8'), 'text/plain', 200)
elif event != 'push':
msg = u'Only ping and push are supported\n'
self.log.warning(msg.rstrip('\n'))
req.send(msg.encode('utf-8'), 'text/plain', 400)
output = u'Running hook on %s\n' % (reponame or '(default)')
output += u'* Updating clone\n'
try:
git = repos.git.repo # GitRepository
except AttributeError:
git = repos.repos.git.repo # GitCachedRepository
git.remote('update', '--prune')
# Ensure that repos.get_changeset can find the new changesets.
output += u'* Synchronizing with clone\n'
repos.sync()
try:
payload = json.loads(req.read())
revs = [commit['id']
for commit in payload['commits'] if commit['distinct']]
except (ValueError, __HOLE__):
msg = u'Invalid payload\n'
self.log.warning(msg.rstrip('\n'))
req.send(msg.encode('utf-8'), 'text/plain', 400)
branches = self.get_branches(reponame)
added, skipped, unknown = classify_commits(revs, repos, branches)
if added:
output += u'* Adding %s\n' % describe_commits(added)
# This is where Trac gets notified of the commits in the changeset
rm.notify('changeset_added', reponame, added)
if skipped:
output += u'* Skipping %s\n' % describe_commits(skipped)
if unknown:
output += u'* Unknown %s\n' % describe_commits(unknown)
self.log.error(u'Payload contains unknown %s',
describe_commits(unknown))
for line in output.splitlines():
self.log.debug(line)
req.send(output.encode('utf-8'), 'text/plain', 200 if output else 204)
|
KeyError
|
dataset/ETHPy150Open trac-hacks/trac-github/tracext/github.py/GitHubPostCommitHook.process_request
|
3,883 |
def childFactory(self, ctx, name):
"""Since we created anchor tags linking to children of this resource
directly by id, when the anchor is clicked, childFactory will be called
with the appropriate id as the name argument."""
try:
## Pass the id of the database item we want to be rendered on this page
## to the DBItem constructor. This integer will be used as the default data
## for this page.
return DBItem(int(name))
except __HOLE__:
pass
## returning None results in a 404
|
ValueError
|
dataset/ETHPy150Open twisted/nevow/examples/db/db.py/DBBrowser.childFactory
|
3,884 |
def get_value(self, data):
try:
return data[self.name]
except __HOLE__:
raise MissingFieldValue(self.name)
|
KeyError
|
dataset/ETHPy150Open stepank/pyws/src/pyws/functions/args/field.py/Field.get_value
|
3,885 |
def validate(self, value):
try:
return self.type.validate(value, self.none_value)
except __HOLE__:
raise WrongFieldValueType(self.name)
|
ValueError
|
dataset/ETHPy150Open stepank/pyws/src/pyws/functions/args/field.py/Field.validate
|
3,886 |
def header_property(name, doc, transform=None):
"""Creates a header getter/setter.
Args:
name: Header name, e.g., "Content-Type"
doc: Docstring for the property
transform: Transformation function to use when setting the
property. The value will be passed to the function, and
the function should return the transformed value to use
as the value of the header (default ``None``).
"""
normalized_name = name.lower()
def fget(self):
try:
return self._headers[normalized_name]
except __HOLE__:
return None
if transform is None:
def fset(self, value):
self._headers[normalized_name] = value
else:
def fset(self, value):
self._headers[normalized_name] = transform(value)
def fdel(self):
del self._headers[normalized_name]
return property(fget, fset, fdel, doc)
|
KeyError
|
dataset/ETHPy150Open falconry/falcon/falcon/response_helpers.py/header_property
|
3,887 |
def is_ascii_encodable(s):
"""Check if argument encodes to ascii without error."""
try:
s.encode('ascii')
except UnicodeEncodeError:
# NOTE(tbug): Py2 and Py3 will raise this if string contained
# chars that could not be ascii encoded
return False
except UnicodeDecodeError:
# NOTE(tbug): py2 will raise this if type is str
# and contains non-ascii chars
return False
except __HOLE__:
# NOTE(tbug): s is probably not a string type
return False
return True
|
AttributeError
|
dataset/ETHPy150Open falconry/falcon/falcon/response_helpers.py/is_ascii_encodable
|
3,888 |
def product_detail(request, object_id):
product = get_object_or_404(Product.objects.filter(is_active=True), pk=object_id)
if request.method == 'POST':
form = OrderItemForm(request.POST)
if form.is_valid():
order = shop.order_from_request(request, create=True)
try:
order.modify_item(product, form.cleaned_data.get('quantity'))
messages.success(request, _('The cart has been updated.'))
except __HOLE__, e:
if e.code == 'order_sealed':
[messages.error(request, msg) for msg in e.messages]
else:
raise
return redirect('plata_shop_cart')
else:
form = OrderItemForm()
return render_to_response('product/product_detail.html', {
'object': product,
'form': form,
}, context_instance=RequestContext(request))
|
ValidationError
|
dataset/ETHPy150Open matthiask/plata/examples/simple/views.py/product_detail
|
3,889 |
def _encode_value(self, value):
if isinstance(value, str):
ret = '"' + value + '"'
elif isinstance(value, float):
ret = str(value) + 'f'
elif sys.version_info[0] >= 3 and isinstance(value, int):
if value > 2147483647:
ret = str(value) + 'l'
else:
ret = str(value)
elif sys.version_info[0] < 3 and isinstance(value, long):
ret = str(value) + 'l'
elif isinstance(value, int):
ret = str(value)
elif isinstance(value, datetime):
ret = str(int(time.mktime(value.timetuple())) * 1000) + 't'
elif isinstance(value, date):
ret = str(int(time.mktime(value.timetuple())) * 1000) + 'a'
elif isinstance(value, Decimal):
ret = str(value) + 'c'
elif isinstance(value, list):
try:
ret = "[" + ','.join(
map(
lambda elem: self._parse_value(type(value[0])(elem))
if not isinstance(value[0], OrientRecordLink)
else elem.get_hash(),
value
)) + ']'
except __HOLE__ as e:
raise Exception("wrong type commistion")
elif isinstance(value, dict):
ret = "{" + ','.join(map(
lambda elem: '"' + elem + '":' + self._parse_value(value[elem]),
value)) + '}'
elif isinstance(value, OrientRecord):
ret = "(" + self.__encode(value) + ")"
elif isinstance(value, OrientRecordLink):
ret = value.get_hash()
elif isinstance(value, OrientBinaryObject):
ret = value.get_hash()
else:
ret = ''
return ret
#
# DECODING STUFF
#
# Consume the first field key, which could be a class name.
# :param content str The input to consume
# :return: list The collected string and any remaining content,
# followed by a boolean indicating whether this is a class name.
|
ValueError
|
dataset/ETHPy150Open mogui/pyorient/pyorient/serializations.py/OrientSerializationCSV._encode_value
|
3,890 |
def _parse_value( self, content ):
"""
Consume a field value.
:param: content str The input to consume
:return: list The collected value and any remaining content.
"""
c = ''
content = content.lstrip( " " )
try:
c = content[ 0 ] # string index out of range 0
except __HOLE__:
pass
if len( content ) == 0 or c == ',':
return [ None, content ]
elif c == '"':
return self._parse_string( content[1:] )
elif c == '#':
return self._parse_rid( content[1:] )
elif c == '[':
return self._parse_collection( content[1:] )
elif c == '<':
return self._parse_set( content[1:] )
elif c == '{':
return self._parse_map( content[1:] )
elif c == '(':
return self._parse_record( content[1:] )
elif c == '%':
return self._parse_bag( content[1:] )
elif c == '_':
return self._parse_binary( content[1:] )
elif c == '-' or self._is_numeric( c ):
return self._parse_number( content )
elif c == 'n' and content[ 0:4 ] == 'null':
return [ None, content[ 4: ] ]
elif c == 't' and content[ 0:4 ] == 'true':
return [ True, content[ 4: ] ]
elif c == 'f' and content[ 0:5 ] == 'false':
return [ False, content[ 5: ] ]
else:
return [ None, content ]
|
IndexError
|
dataset/ETHPy150Open mogui/pyorient/pyorient/serializations.py/OrientSerializationCSV._parse_value
|
3,891 |
@staticmethod
def _is_numeric( content ):
try:
float( content )
return True
except __HOLE__:
return False
|
ValueError
|
dataset/ETHPy150Open mogui/pyorient/pyorient/serializations.py/OrientSerializationCSV._is_numeric
|
3,892 |
def _parse_number(self, content):
"""
Consume a number.
If the number has a suffix, consume it also and instantiate the
right type, e.g. for dates
:param content str The content to consume
:return: list The collected number and any remaining content.
"""
length = len(content)
collected = ''
is_float = False
i = 0
for i in range(0, length):
c = content[i]
if c == '-' or self._is_numeric(c):
collected += c
elif c == '.':
is_float = True
collected += c
elif c == 'E' and is_float:
collected += c
else:
break
content = content[i:]
c = ''
try:
c = content[ 0 ] # string index out of range 0
except __HOLE__:
pass
if c == 'a':
collected = date.fromtimestamp(float(collected) / 1000)
content = content[1:]
elif c == 't':
# date
collected = datetime.fromtimestamp(float(collected) / 1000)
content = content[1:]
elif c == 'f' or c == 'd':
# float # double
collected = float(collected)
content = content[1:]
elif c == 'c':
collected = Decimal(collected)
content = content[1:]
elif c == 'b' or c == 's':
collected = int(collected)
content = content[1:]
elif c == 'l':
if sys.version_info[0] < 3:
collected = long(collected) # python 2.x long type
else:
collected = int(collected)
content = content[1:]
elif is_float:
collected = float(collected)
else:
collected = int(collected)
return [collected, content]
|
IndexError
|
dataset/ETHPy150Open mogui/pyorient/pyorient/serializations.py/OrientSerializationCSV._parse_number
|
3,893 |
def test_iterable(value):
"""Check if it's possible to iterate over an object."""
try:
iter(value)
except __HOLE__:
return False
return True
|
TypeError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/jinja2/tests.py/test_iterable
|
3,894 |
def inet_aton(text):
"""Convert a text format IPv6 address into network format.
@param text: the textual address
@type text: string
@rtype: string
@raises dns.exception.SyntaxError: the text was not properly formatted
"""
#
# Our aim here is not something fast; we just want something that works.
#
if text == '::':
text = '0::'
#
# Get rid of the icky dot-quad syntax if we have it.
#
m = _v4_ending.match(text)
if not m is None:
text = "%s:%04x:%04x" % (m.group(1),
int(m.group(2)) * 256 + int(m.group(3)),
int(m.group(4)) * 256 + int(m.group(5)))
#
# Try to turn '::<whatever>' into ':<whatever>'; if no match try to
# turn '<whatever>::' into '<whatever>:'
#
m = _colon_colon_start.match(text)
if not m is None:
text = text[1:]
else:
m = _colon_colon_end.match(text)
if not m is None:
text = text[:-1]
#
# Now canonicalize into 8 chunks of 4 hex digits each
#
chunks = text.split(':')
l = len(chunks)
if l > 8:
raise dns.exception.SyntaxError
seen_empty = False
canonical = []
for c in chunks:
if c == '':
if seen_empty:
raise dns.exception.SyntaxError
seen_empty = True
for i in xrange(0, 8 - l + 1):
canonical.append('0000')
else:
lc = len(c)
if lc > 4:
raise dns.exception.SyntaxError
if lc != 4:
c = ('0' * (4 - lc)) + c
canonical.append(c)
if l < 8 and not seen_empty:
raise dns.exception.SyntaxError
text = ''.join(canonical)
#
# Finally we can go to binary.
#
try:
return text.decode('hex_codec')
except __HOLE__:
raise dns.exception.SyntaxError
|
TypeError
|
dataset/ETHPy150Open catap/namebench/nb_third_party/dns/ipv6.py/inet_aton
|
3,895 |
def request(self, operation, url, data=None, headers=None):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and
DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.http
client = atom.http.HttpClient()
http_response = client.request('GET', 'http://www.google.com/')
Args:
operation: str The HTTP operation to be performed. This is usually one
of 'GET', 'POST', 'PUT', or 'DELETE'
data: filestream, list of parts, or other object which can be converted
to a string. Should be set to None when performing a GET or DELETE.
If data is a file-like object which can be read, this method will
read a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be
evaluated and sent.
url: The full URL to which the request should be sent. Can be a string
or atom.url.Url.
headers: dict of strings. HTTP headers which should be sent
in the request.
"""
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if data and 'Content-Length' not in all_headers:
if isinstance(data, str):
all_headers['Content-Length'] = str(len(data))
else:
raise atom.http_interface.ContentLengthRequired('Unable to calculate '
'the length of the data parameter. Specify a value for '
'Content-Length')
# Set the content type to the default value if none was set.
if 'Content-Type' not in all_headers:
all_headers['Content-Type'] = DEFAULT_CONTENT_TYPE
if self.v2_http_client is not None:
http_request = atom.http_core.HttpRequest(method=operation)
atom.http_core.Uri.parse_uri(str(url)).modify_request(http_request)
http_request.headers = all_headers
if data:
http_request._body_parts.append(data)
return self.v2_http_client.request(http_request=http_request)
if not isinstance(url, atom.url.Url):
if isinstance(url, str):
url = atom.url.parse_url(url)
else:
raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
'parameter because it was not a string or atom.url.Url')
connection = self._prepare_connection(url, all_headers)
if self.debug:
connection.debuglevel = 1
connection.putrequest(operation, self._get_access_url(url),
skip_host=True)
if url.port is not None:
connection.putheader('Host', '%s:%s' % (url.host, url.port))
else:
connection.putheader('Host', url.host)
# Overcome a bug in Python 2.4 and 2.5
# httplib.HTTPConnection.putrequest adding
# HTTP request header 'Host: www.google.com:443' instead of
# 'Host: www.google.com', and thus resulting the error message
# 'Token invalid - AuthSub token has wrong scope' in the HTTP response.
if (url.protocol == 'https' and int(url.port or 443) == 443 and
hasattr(connection, '_buffer') and
isinstance(connection._buffer, list)):
header_line = 'Host: %s:443' % url.host
replacement_header_line = 'Host: %s' % url.host
try:
connection._buffer[connection._buffer.index(header_line)] = (
replacement_header_line)
except __HOLE__: # header_line missing from connection._buffer
pass
# Send the HTTP headers.
for header_name in all_headers:
connection.putheader(header_name, all_headers[header_name])
connection.endheaders()
# If there is data, send it in the request.
if data:
if isinstance(data, list):
for data_part in data:
_send_data_part(data_part, connection)
else:
_send_data_part(data, connection)
# Return the HTTP Response from the server.
return connection.getresponse()
|
ValueError
|
dataset/ETHPy150Open kuri65536/python-for-android/python3-alpha/python-libs/atom/http.py/HttpClient.request
|
3,896 |
def __init__(self, path_spec, process_func=None, dtype=None,
as_grey=False, plugin=None):
try:
import skimage
except __HOLE__:
if plugin is not None:
warn("A plugin was specified but ignored. Plugins can only "
"be specified if scikit-image is available. Instead, "
"ImageSequence will try using matplotlib and scipy "
"in that order.")
self.kwargs = dict()
else:
self.kwargs = dict(plugin=plugin)
self._is_zipfile = False
self._zipfile = None
self._get_files(path_spec)
tmp = self.imread(self._filepaths[0], **self.kwargs)
self._first_frame_shape = tmp.shape
self._validate_process_func(process_func)
self._as_grey(as_grey, process_func)
if dtype is None:
self._dtype = tmp.dtype
else:
self._dtype = dtype
|
ImportError
|
dataset/ETHPy150Open soft-matter/pims/pims/image_sequence.py/ImageSequence.__init__
|
3,897 |
def __repr__(self):
# May be overwritten by subclasses
try:
source = self.pathname
except __HOLE__:
source = '(list of images)'
return """<Frames>
Source: {pathname}
Length: {count} frames
Frame Shape: {frame_shape!r}
Pixel Datatype: {dtype}""".format(frame_shape=self.frame_shape,
count=len(self),
pathname=source,
dtype=self.pixel_type)
|
AttributeError
|
dataset/ETHPy150Open soft-matter/pims/pims/image_sequence.py/ImageSequence.__repr__
|
3,898 |
def filename_to_indices(filename, identifiers='tzc'):
""" Find ocurrences of axis indices (e.g. t001, z06, c2)
in a filename and returns a list of indices.
Parameters
----------
filename : string
filename to be searched for indices
identifiers : string or list of strings, optional
iterable of N strings preceding axis indices, in that order
Returns
---------
list of int
axis indices. Elements default to 0 when index was not found.
"""
escaped = [re.escape(a) for a in identifiers]
axes = re.findall('(' + '|'.join(escaped) + r')(\d+)',
filename)
if len(axes) > len(identifiers):
axes = axes[-3:]
order = [a[0] for a in axes]
result = [0] * len(identifiers)
for (i, col) in enumerate(identifiers):
try:
result[i] = int(axes[order.index(col)][1])
except __HOLE__:
result[i] = 0
return result
|
ValueError
|
dataset/ETHPy150Open soft-matter/pims/pims/image_sequence.py/filename_to_indices
|
3,899 |
def __repr__(self):
try:
source = self.pathname
except __HOLE__:
source = '(list of images)'
s = "<ImageSequenceND>\nSource: {0}\n".format(source)
s += "Axes: {0}\n".format(self.ndim)
for dim in self._sizes:
s += "Axis '{0}' size: {1}\n".format(dim, self._sizes[dim])
s += """Pixel Datatype: {dtype}""".format(dtype=self.pixel_type)
return s
|
AttributeError
|
dataset/ETHPy150Open soft-matter/pims/pims/image_sequence.py/ImageSequenceND.__repr__
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.