function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
if encoding is None:
if plus:
# unquote_to_bytes doesn't have a _plus variant
value = to_basestring(value).replace('+', ' ')
return urllib_parse.unquote_to_bytes(value)
else:
unquote = (urllib_parse.unquote_plus if plus
else urllib_parse.unquote)
return unquote(to_basestring(value), encoding=encoding) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def utf8(value):
# type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None]
"""Converts a string argument to a byte string.
If the argument is already a byte string or None, it is returned unchanged.
Otherwise it must be a unicode string and is encoded as utf8.
"""
if isinstance(value, _UTF8_TYPES):
return value
if not isinstance(value, unicode_type):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.encode("utf-8") | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def to_unicode(value):
"""Converts a string argument to a unicode string.
If the argument is already a unicode string or None, it is returned
unchanged. Otherwise it must be a byte string and is decoded as utf8.
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8") | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def to_basestring(value):
"""Converts a string argument to a subclass of basestring.
In python2, byte and unicode strings are mostly interchangeable,
so functions that deal with a user-supplied argument in combination
with ascii string constants can use either and should return the type
the user supplied. In python3, the two types are not interchangeable,
so this method is needed to convert byte strings to unicode.
"""
if isinstance(value, _BASESTRING_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8") | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def linkify(text, shorten=False, extra_params="",
require_protocol=False, permitted_protocols=["http", "https"]):
"""Converts plain text into HTML with links.
For example: ``linkify("Hello http://tornadoweb.org!")`` would return
``Hello <a href="http://tornadoweb.org">http://tornadoweb.org</a>!``
Parameters:
* ``shorten``: Long urls will be shortened for display.
* ``extra_params``: Extra text to include in the link tag, or a callable
taking the link as an argument and returning the extra text
e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``,
or::
def extra_params_cb(url):
if url.startswith("http://example.com"):
return 'class="internal"'
else:
return 'class="external" rel="nofollow"'
linkify(text, extra_params=extra_params_cb)
* ``require_protocol``: Only linkify urls which include a protocol. If
this is False, urls such as www.facebook.com will also be linkified.
* ``permitted_protocols``: List (or set) of protocols which should be
linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp",
"mailto"])``. It is very unsafe to include protocols such as
``javascript``.
"""
if extra_params and not callable(extra_params):
extra_params = " " + extra_params.strip()
def make_link(m):
url = m.group(1)
proto = m.group(2)
if require_protocol and not proto:
return url # not protocol, no linkify
if proto and proto not in permitted_protocols:
return url # bad protocol, no linkify
href = m.group(1)
if not proto:
href = "http://" + href # no proto specified, use http
if callable(extra_params):
params = " " + extra_params(href).strip()
else:
params = extra_params
# clip long urls. max_len is just an approximation
max_len = 30
if shorten and len(url) > max_len:
before_clip = url
if proto:
proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for :
else:
proto_len = 0
parts = url[proto_len:].split("/")
if len(parts) > 1:
# Grab the whole host part plus the first bit of the path
# The path is usually not that interesting once shortened
# (no more slug, etc), so it really just provides a little
# extra indication of shortening.
url = url[:proto_len] + parts[0] + "/" + \
parts[1][:8].split('?')[0].split('.')[0]
if len(url) > max_len * 1.5: # still too long
url = url[:max_len]
if url != before_clip:
amp = url.rfind('&')
# avoid splitting html char entities
if amp > max_len - 5:
url = url[:amp]
url += "..."
if len(url) >= len(before_clip):
url = before_clip
else:
# full url is visible on mouse-over (for those who don't
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u'<a href="%s"%s>%s</a>' % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than & so
# that we won't pick up ", etc.
text = _unicode(xhtml_escape(text))
return _URL_RE.sub(make_link, text) | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _build_unicode_map():
unicode_map = {}
for name, value in htmlentitydefs.name2codepoint.items():
unicode_map[name] = unichr(value)
return unicode_map | unnikrishnankgs/va | [
1,
5,
1,
10,
1496432585
] |
def _cls(self):
return ds.TransformedDistribution | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def testCachedSamplesWithoutInverse(self):
with self.test_session() as sess:
mu = 3.0
sigma = 0.02
log_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=bs.Exp(event_ndims=0))
sample = log_normal.sample(1)
sample_val, log_pdf_val = sess.run([sample, log_normal.log_prob(sample)])
self.assertAllClose(
stats.lognorm.logpdf(sample_val, s=sigma, scale=np.exp(mu)),
log_pdf_val,
atol=1e-2) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def testEntropy(self):
with self.test_session():
shift = np.array([[-1, 0, 1], [-1, -2, -3]], dtype=np.float32)
diag = np.array([[1, 2, 3], [2, 3, 2]], dtype=np.float32)
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(shift[i], np.diag(diag[i]**2)).entropy()]
for i in range(len(diag))])
fake_mvn = self._cls()(
ds.MultivariateNormalDiag(
loc=array_ops.zeros_like(shift),
scale_diag=array_ops.ones_like(diag),
validate_args=True),
bs.AffineLinearOperator(
shift,
scale=la.LinearOperatorDiag(diag, is_non_singular=True),
validate_args=True),
validate_args=True)
self.assertAllClose(actual_mvn_entropy,
fake_mvn.entropy().eval()) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def _cls(self):
return ds.TransformedDistribution | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def _testMVN(self,
base_distribution_class,
base_distribution_kwargs,
batch_shape=(),
event_shape=(),
not_implemented_message=None):
with self.test_session() as sess:
# Overriding shapes must be compatible w/bijector; most bijectors are
# batch_shape agnostic and only care about event_ndims.
# In the case of `Affine`, if we got it wrong then it would fire an
# exception due to incompatible dimensions.
batch_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_batch_shape")
event_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_event_shape")
feed_dict = {batch_shape_pl: np.array(batch_shape, dtype=np.int32),
event_shape_pl: np.array(event_shape, dtype=np.int32)}
fake_mvn_dynamic = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape_pl,
event_shape=event_shape_pl,
validate_args=True)
fake_mvn_static = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=True)
actual_mean = np.tile(self._shift, [2, 1]) # Affine elided this tile.
actual_cov = np.matmul(self._tril, np.transpose(self._tril, [0, 2, 1]))
def actual_mvn_log_prob(x):
return np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).logpdf(x[:, i, :])]
for i in range(len(actual_cov))]).T
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).entropy()]
for i in range(len(actual_cov))])
self.assertAllEqual([3], fake_mvn_static.event_shape)
self.assertAllEqual([2], fake_mvn_static.batch_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.event_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.batch_shape)
x = fake_mvn_static.sample(5, seed=0).eval()
for unsupported_fn in (fake_mvn_static.log_cdf,
fake_mvn_static.cdf,
fake_mvn_static.survival_function,
fake_mvn_static.log_survival_function):
with self.assertRaisesRegexp(NotImplementedError,
not_implemented_message):
unsupported_fn(x)
num_samples = 5e3
for fake_mvn, feed_dict in ((fake_mvn_static, {}),
(fake_mvn_dynamic, feed_dict)):
# Ensure sample works by checking first, second moments.
y = fake_mvn.sample(int(num_samples), seed=0)
x = y[0:5, ...]
sample_mean = math_ops.reduce_mean(y, 0)
centered_y = array_ops.transpose(y - sample_mean, [1, 2, 0])
sample_cov = math_ops.matmul(
centered_y, centered_y, transpose_b=True) / num_samples
[
sample_mean_,
sample_cov_,
x_,
fake_event_shape_,
fake_batch_shape_,
fake_log_prob_,
fake_prob_,
fake_entropy_,
] = sess.run([
sample_mean,
sample_cov,
x,
fake_mvn.event_shape_tensor(),
fake_mvn.batch_shape_tensor(),
fake_mvn.log_prob(x),
fake_mvn.prob(x),
fake_mvn.entropy(),
], feed_dict=feed_dict)
self.assertAllClose(actual_mean, sample_mean_, atol=0.1, rtol=0.1)
self.assertAllClose(actual_cov, sample_cov_, atol=0., rtol=0.1)
# Ensure all other functions work as intended.
self.assertAllEqual([5, 2, 3], x_.shape)
self.assertAllEqual([3], fake_event_shape_)
self.assertAllEqual([2], fake_batch_shape_)
self.assertAllClose(actual_mvn_log_prob(x_), fake_log_prob_,
atol=0., rtol=1e-6)
self.assertAllClose(np.exp(actual_mvn_log_prob(x_)), fake_prob_,
atol=0., rtol=1e-5)
self.assertAllClose(actual_mvn_entropy, fake_entropy_,
atol=0., rtol=1e-6) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def testScalarBatchNonScalarEvent(self):
self._testMVN(
base_distribution_class=ds.MultivariateNormalDiag,
base_distribution_kwargs={"loc": [0., 0., 0.],
"scale_diag": [1., 1, 1]},
batch_shape=[2],
not_implemented_message="not implemented")
with self.test_session():
# Can't override event_shape for scalar batch, non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[0.], scale_diag=[1.]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def testNonScalarBatchNonScalarEvent(self):
with self.test_session():
# Can't override event_shape and/or batch_shape for non_scalar batch,
# non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[[0.]],
scale_diag=[[1.]]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True) | npuichigo/ttsflow | [
16,
6,
16,
1,
1500635633
] |
def __init__(self,params,parent):
self.params=params
self.parent=parent | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def run(self): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def subplot(*args):
import pylab | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def __init__(self,parent=None,title='',direction='H',
size=(750,750),lfname=None,params=None):
self.fig=None | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Body(self): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Stopping(self):
return self.stopping | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Yield(self):
wx.Yield() | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def ResetTitle(self): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Plot(self,sim): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Run_Pause(self,event):
if not self.running: | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def __load_sim__(self,lfname): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Reset_Simulation(self,event=None):
if not os.path.exists(self.tmpfile):
return | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Restart(self,event=None): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Load_Simulation(self,event=None):
self.canvas.Show(False)
if self.modified:
(root,sfname)=os.path.split(self.params['save_sim_file'])
dlg=MessageDialog(self,
text="Do you want to save the changes you made to %s?" % sfname,
title="Load Simulation", ok=0, yes_no=1,cancel=1)
result=dlg.ShowModal()
dlg.Destroy() | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Save_Simulation(self,event=None): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Save_Simulation_As(self,event=None): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Set_Simulation_Parameters(self,event):
self.canvas.Show(False)
set_simulation_parameters(self.params,self)
self.canvas.Show(True) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Set_Input_Parameters(self,event):
self.canvas.Show(False)
set_input_parameters(self.params,self)
self.canvas.Show(True) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Set_Output_Parameters(self,event):
self.canvas.Show(False)
set_output_parameters(self.params,self)
self.canvas.Show(True) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Set_Weight_Parameters(self,event):
self.canvas.Show(False)
set_weight_parameters(self.params,self)
self.canvas.Show(True) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Set_Parameter_Structure(self,event):
set_parameter_structure(self.params,self) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def CreateMenu(self): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Display(self,event=None):
self.canvas.Show(False)
dlg = FileDialog(self, "Choose Display Module",default_dir=os.getcwd()+"/",
wildcard='Python Plot Files|plot*.py|All Files|*.*')
result = dlg.ShowModal()
dlg.Destroy()
if result == 'ok':
lfname = dlg.GetPaths()[0]
modulename=os.path.splitext(os.path.split(lfname)[-1])[0]
self.params['display_module']=modulename | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def About(self,event):
win=AboutWindow()
win.Show() | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Nop(self,event):
self.canvas.Show(False)
dlg = MessageDialog(self, "Error","Function Not Implemented",icon='error')
dlg.ShowModal()
dlg.Destroy()
self.canvas.Show(True) | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def Quit(self,event=None): | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def run(lfname=None,params=None,use_splash=True):
if use_splash:
app1=Application(splash.SplashFrame)
app1.Run()
app = Application(MainFrame, title="Plasticity",lfname=lfname,
params=params)
app.Run() | bblais/plasticity | [
5,
3,
5,
1,
1430486979
] |
def test_api_endpoint_existence(todolist_app):
with todolist_app.test_client() as client:
resp = client.get('/tasks')
assert resp.status_code == 200 | inkmonk/flask-sqlalchemy-booster | [
8,
3,
8,
8,
1430659799
] |
def __init__(self, upload):
self.upload = upload | dirkmoors/drf-tus | [
21,
20,
21,
2,
1488992116
] |
def handle_save(self):
pass | dirkmoors/drf-tus | [
21,
20,
21,
2,
1488992116
] |
def finish(self):
# Trigger signal
signals.saved.send(sender=self.__class__, instance=self)
# Finish
self.upload.finish()
self.upload.save() | dirkmoors/drf-tus | [
21,
20,
21,
2,
1488992116
] |
def handle_save(self):
# Save temporary field to file field
file_field = getattr(self.upload, self.destination_file_field)
file_field.save(self.upload.filename, File(open(self.upload.temporary_file_path)))
# Finish upload
self.finish() | dirkmoors/drf-tus | [
21,
20,
21,
2,
1488992116
] |
def __getattr__(cls, name):
return MagicMock() | ageitgey/face_recognition | [
47526,
12782,
47526,
704,
1488577959
] |
def combinationSum(self, candidates, target):
candidates.sort()
self.result = []
self.dfs(candidates,target,0,[])
return self.result | UmassJin/Leetcode | [
85,
40,
85,
57,
1426803902
] |
def dfs(self,candidates,target,start,reslist):
length = len(candidates)
if target == 0:
return self.result.append(reslist) | UmassJin/Leetcode | [
85,
40,
85,
57,
1426803902
] |
def combinationSum(self, candidates, target):
self.result = []
self.dfs(candidates,0,target,[])
return self.result | UmassJin/Leetcode | [
85,
40,
85,
57,
1426803902
] |
def dfs(self,can,cursum,target,res):
if cursum > target: return
if cursum == target:
self.result.append(res)
return
for i in xrange(len(can)):
if not res or res[len(res)-1] <= can[i]:
self.dfs(can,cursum+can[i],target,res+[can[i]]) | UmassJin/Leetcode | [
85,
40,
85,
57,
1426803902
] |
def __init__(self, **kwargs):
"""Initialization method.
Args:
**kwargs: Keyword arguments.
Kwargs:
hash_table (str): The hash table package id.
remote (str): The remote ckan url.
api_key (str): The ckan api key.
ua (str): The user agent.
force (bool): Force (default: True).
quiet (bool): Suppress debug statements (default: False).
Returns:
New instance of :class:`CKAN`
Examples:
>>> CKAN() #doctest: +ELLIPSIS
<ckanutils.CKAN object at 0x...>
"""
default_ua = environ.get(UA_ENV, DEF_USER_AGENT)
def_remote = environ.get(REMOTE_ENV)
def_api_key = environ.get(API_KEY_ENV)
remote = kwargs.get('remote', def_remote)
self.api_key = kwargs.get('api_key', def_api_key)
self.force = kwargs.get('force', True)
self.quiet = kwargs.get('quiet')
self.user_agent = kwargs.get('ua', default_ua)
self.verbose = not self.quiet
self.hash_table = kwargs.get('hash_table', DEF_HASH_PACK)
ckan_kwargs = {'apikey': self.api_key, 'user_agent': self.user_agent}
attr = 'RemoteCKAN' if remote else 'LocalCKAN'
ckan = getattr(ckanapi, attr)(remote, **ckan_kwargs)
self.address = ckan.address
self.package_show = ckan.action.package_show
try:
self.hash_table_pack = self.package_show(id=self.hash_table)
except NotFound:
self.hash_table_pack = None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
self.hash_table_pack = None
else:
raise err
try:
self.hash_table_id = self.hash_table_pack['resources'][0]['id']
except (IndexError, TypeError):
self.hash_table_id = None
# shortcuts
self.datastore_search = ckan.action.datastore_search
self.datastore_create = ckan.action.datastore_create
self.datastore_delete = ckan.action.datastore_delete
self.datastore_upsert = ckan.action.datastore_upsert
self.datastore_search = ckan.action.datastore_search
self.resource_show = ckan.action.resource_show
self.resource_create = ckan.action.resource_create
self.package_create = ckan.action.package_create
self.package_update = ckan.action.package_update
self.package_privatize = ckan.action.bulk_update_private
self.revision_show = ckan.action.revision_show
self.organization_list = ckan.action.organization_list_for_user
self.organization_show = ckan.action.organization_show
self.license_list = ckan.action.license_list
self.group_list = ckan.action.group_list
self.user = ckan.action.get_site_user() | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def delete_table(self, resource_id, **kwargs):
"""Deletes a datastore table.
Args:
resource_id (str): The datastore resource id.
**kwargs: Keyword arguments that are passed to datastore_create.
Kwargs:
force (bool): Delete resource even if read-only.
filters (dict): Filters to apply before deleting, e.g.,
{"name": "fred"}. If missing delete whole table and all
dependent views.
Returns:
dict: Original filters sent if table was found, `None` otherwise.
Raises:
ValidationError: If unable to validate user on ckan site.
Examples:
>>> CKAN(quiet=True).delete_table('rid')
Can't delete. Table `rid` was not found in datastore.
"""
kwargs.setdefault('force', self.force)
kwargs['resource_id'] = resource_id
init_msg = "Can't delete. Table `%s`" % resource_id
err_msg = '%s was not found in datastore.' % init_msg
read_msg = '%s is read only.' % init_msg
if self.verbose:
print('Deleting table `%s` from datastore...' % resource_id)
try:
result = self.datastore_delete(**kwargs)
except NotFound:
print(err_msg)
result = None
except ValidationError as err:
if 'read-only' in err.error_dict:
print(read_msg)
print("Set 'force' to True and try again.")
result = None
elif err.error_dict.get('resource_id') == ['Not found: Resource']:
print(err_msg)
result = None
else:
raise err
return result | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def get_hash(self, resource_id):
"""Gets the hash of a datastore table.
Args:
resource_id (str): The datastore resource id.
Returns:
str: The datastore resource hash.
Raises:
NotFound: If `hash_table_id` isn't set or not in datastore.
NotAuthorized: If unable to authorize ckan user.
Examples:
>>> CKAN(hash_table='hash_jhb34rtj34t').get_hash('rid')
Traceback (most recent call last):
NotFound: {u'item': u'package', u'message': u'Package \ | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def fetch_resource(self, resource_id, user_agent=None, stream=True):
"""Fetches a single resource from filestore.
Args:
resource_id (str): The filestore resource id.
Kwargs:
user_agent (str): The user agent.
stream (bool): Stream content (default: True).
Returns:
obj: requests.Response object.
Raises:
NotFound: If unable to find the resource.
NotAuthorized: If access to fetch resource is denied.
Examples:
>>> CKAN(quiet=True).fetch_resource('rid')
Traceback (most recent call last):
NotFound: Resource `rid` was not found in filestore.
"""
user_agent = user_agent or self.user_agent
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
resource = self.resource_show(id=resource_id)
except NotFound:
raise NotFound(err_msg)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
url = resource.get('perma_link') or resource.get('url')
if self.verbose:
print('Downloading url %s...' % url)
headers = {'User-Agent': user_agent}
r = requests.get(url, stream=stream, headers=headers)
err_msg = 'Access to fetch resource %s was denied.' % resource_id
if any('403' in h.headers.get('x-ckan-error', '') for h in r.history):
raise NotAuthorized(err_msg)
elif r.status_code == 401:
raise NotAuthorized(err_msg)
else:
return r | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def _update_filestore(self, func, *args, **kwargs):
"""Helps create or update a single resource on filestore.
To create a resource, you must supply either `url`, `filepath`, or
`fileobj`.
Args:
func (func): The resource passed to resource_create.
*args: Postional arguments that are passed to `func`
**kwargs: Keyword arguments that are passed to `func`.
Kwargs:
url (str): New file url (for file link).
fileobj (obj): New file like object (for file upload).
filepath (str): New file path (for file upload).
name (str): The resource name.
description (str): The resource description.
hash (str): The resource hash.
Returns:
obj: requests.Response object if `post` option is specified,
ckan resource object otherwise.
See also:
ckanutils.get_filestore_update_func
Examples:
>>> ckan = CKAN(quiet=True)
>>> url = 'http://example.com/file'
>>> resource = {'package_id': 'pid'}
>>> kwargs = {'name': 'name', 'url': url, 'format': 'csv'}
>>> res = ckan.get_filestore_update_func(resource, **kwargs)
>>> ckan._update_filestore(res[0], *res[1], **res[2])
Package `pid` was not found.
>>> resource['resource_id'] = 'rid'
>>> res = ckan.get_filestore_update_func(resource, **kwargs)
>>> ckan._update_filestore(res[0], *res[1], **res[2])
Resource `rid` was not found in filestore.
"""
data = kwargs.get('data', {})
files = kwargs.get('files', {})
resource_id = kwargs.get('resource_id', data.get('resource_id'))
package_id = kwargs.get('package_id', data.get('package_id'))
f = kwargs.get('upload', files.get('upload'))
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
r = func(*args, **kwargs) or {'id': None}
except NotFound:
pck_msg = 'Package `%s` was not found.' % package_id
print(err_msg if resource_id else pck_msg)
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
print(err_msg)
r = None
else:
raise err
except requests.exceptions.ConnectionError as err:
if 'Broken pipe' in err.message[1]:
print('File size too large. Try uploading a smaller file.')
r = None
else:
raise err
else:
return r
finally:
f.close() if f else None | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def update_filestore(self, resource_id, **kwargs):
"""Updates a single resource on filestore.
Args:
resource_id (str): The filestore resource id.
**kwargs: Keyword arguments that are passed to resource_create.
Kwargs:
url (str): New file url (for file link).
filepath (str): New file path (for file upload).
fileobj (obj): New file like object (for file upload).
post (bool): Post data using requests instead of ckanapi.
name (str): The resource name.
description (str): The resource description.
hash (str): The resource hash.
Returns:
obj: requests.Response object if `post` option is specified,
ckan resource object otherwise.
Examples:
>>> CKAN(quiet=True).update_filestore('rid')
Resource `rid` was not found in filestore.
"""
err_msg = 'Resource `%s` was not found in filestore.' % resource_id
try:
resource = self.resource_show(id=resource_id)
except NotFound:
print(err_msg)
return None
except ValidationError as err:
if err.error_dict.get('resource_id') == ['Not found: Resource']:
raise NotFound(err_msg)
else:
raise err
else:
resource['package_id'] = self.get_package_id(resource_id)
if self.verbose:
print('Updating resource %s...' % resource_id)
f, args, data = self.get_filestore_update_func(resource, **kwargs)
return self._update_filestore(f, *args, **data) | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def find_ids(self, packages, **kwargs):
default = {'rid': '', 'pname': ''}
kwargs.update({'method': self.query, 'default': default})
return pr.find(packages, **kwargs) | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def create_hash_table(self, verbose=False):
kwargs = {
'resource_id': self.hash_table_id,
'fields': [
{'id': 'datastore_id', 'type': 'text'},
{'id': 'hash', 'type': 'text'}],
'primary_key': 'datastore_id'
}
if verbose:
print('Creating hash table...')
self.create_table(**kwargs) | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def get_update_date(self, item):
timestamps = {
'revision_timestamp': 'revision',
'last_modified': 'resource',
'metadata_modified': 'package'
}
for key, value in timestamps.items():
if key in item:
timestamp = item[key]
item_type = value
break
else:
keys = timestamps.keys()
msg = 'None of the following keys found in item: %s' % keys
raise TypeError(msg)
if not timestamp and item_type == 'resource':
# print('Resource timestamp is empty. Querying revision.')
timestamp = self.revision_show(id=item['revision_id'])['timestamp']
return dt.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f') | reubano/ckanutils | [
3,
3,
3,
3,
1434441316
] |
def register(self, tag):
return functools.partial(self._register, tag) | afg984/pyardrone | [
27,
4,
27,
4,
1439552654
] |
def build_dataset(reader, phi_list, class_func, vectorizer=None, verbose=False):
"""Core general function for building experimental
hand-generated feature datasets. | ptoman/icgauge | [
9,
2,
9,
1,
1460743801
] |
def experiment_features(
train_reader=data_readers.toy,
assess_reader=None,
train_size=0.7,
phi_list=[fe.manual_content_flags],
class_func=lt.identity_class_func,
train_func=training.fit_logistic_at_with_crossvalidation,
score_func=scipy.stats.stats.pearsonr,
verbose=True):
"""Generic experimental framework for hand-crafted features.
Either assesses with a random train/test split of `train_reader`
or with `assess_reader` if it is given. | ptoman/icgauge | [
9,
2,
9,
1,
1460743801
] |
def get_score_example_pairs(y, y_hat, examples):
""" Return a list of dicts: {truth score, predicted score, example} """
paired_results = sorted(zip(y, y_hat), key=lambda x: x[0]-x[1])
performance = []
for i, (truth, prediction) in enumerate(paired_results):
performance.append({"truth": truth, "prediction": prediction, "example": examples[i]})
return performance | ptoman/icgauge | [
9,
2,
9,
1,
1460743801
] |
def experiment_features_iterated(
train_reader=data_readers.toy,
assess_reader=None,
train_size=0.7,
phi_list=[fe.manual_content_flags],
class_func=lt.identity_class_func,
train_func=training.fit_logistic_at_with_crossvalidation,
score_func=utils.safe_weighted_f1,
verbose=True,
iterations=1):
"""
Generic iterated experimental framework for hand-crafted features.
"""
correlation_overall = []
cronbach_overall = []
conf_matrix_overall = None
assess_performance = []
while len(correlation_overall) < iterations:
print "\nStarting iteration: %d/%d" % (len(correlation_overall)+1, iterations)
try:
correlation_local, cronbach_local, conf_matrix_local, perf_local = experiment_features(
train_reader=train_reader,
assess_reader=assess_reader,
train_size=train_size,
phi_list=phi_list,
class_func=class_func,
train_func=train_func,
score_func=score_func,
verbose=verbose) | ptoman/icgauge | [
9,
2,
9,
1,
1460743801
] |
def try_pull_image(self, name, tag="latest"):
'''
Pull an image
'''
self.log("(Try) Pulling image %s:%s" % (name, tag)) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def log(self, msg, pretty_print=False):
qb_log(msg) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.archive_path = parameters.get('archive_path')
self.container_limits = parameters.get('container_limits')
self.dockerfile = parameters.get('dockerfile')
self.force = parameters.get('force')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.nocache = parameters.get('nocache')
self.path = parameters.get('path')
self.pull = parameters.get('pull')
self.repository = parameters.get('repository')
self.rm = parameters.get('rm')
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = parameters.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = parameters.get('buildargs') | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def fail(self, msg):
self.client.fail(msg) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
image = self.client.find_image(self.name, self.tag)
if image:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted' | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
stream=True,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True
)
build_output = []
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
self.buildargs[key] = to_native(value)
params['buildargs'] = self.buildargs
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if "stream" in line:
build_output.append(line["stream"])
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail(
"Error building %s - code: %s, message: %s, logs: %s" % (
self.name,
errorDetail.get('code'),
errorDetail.get('message'),
build_output))
else:
self.fail("Error building %s - message: %s, logs: %s" % (
self.name, line.get('error'), build_output))
return self.client.find_image(name=self.name, tag=self.tag) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def log(self, msg, pretty_print=False):
return qb_log(msg) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def warn( self, warning ):
self.results['warnings'].append( str(warning) ) | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def qb_debug(name, message, **payload):
if not qb.ipc.stdio.client.log.connected:
return False | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def main():
argument_spec = dict(
archive_path=dict(type='path'),
container_limits=dict(type='dict'),
dockerfile=dict(type='str'),
force=dict(type='bool', default=False),
http_timeout=dict(type='int'),
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='bool', default=False),
path=dict(type='path', aliases=['build_path']),
pull=dict(type='bool', default=True),
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True),
state=dict(type='str', choices=['absent', 'present', 'build'], default='present'),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', default='no', choices=['no', 'encrypt', 'verify']),
buildargs=dict(type='dict', default=None), | nrser/qb | [
1,
1,
1,
8,
1448301308
] |
def setUp(self):
entry_Li = ComputedEntry("Li", -1.90753119)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "LiTiO2_batt.json")) as f:
entries_LTO = json.load(f, cls=MontyDecoder)
self.ie_LTO = InsertionElectrode.from_entries(entries_LTO, entry_Li)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "FeF3_batt.json")) as fid:
entries = json.load(fid, cls=MontyDecoder)
self.ce_FF = ConversionElectrode.from_composition_and_entries(Composition("FeF3"), entries) | materialsproject/pymatgen | [
1063,
732,
1063,
235,
1319343039
] |
def testPlotly(self):
plotter = VoltageProfilePlotter(xaxis="frac_x")
plotter.add_electrode(self.ie_LTO, "LTO insertion")
plotter.add_electrode(self.ce_FF, "FeF3 conversion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "Atomic Fraction of Li")
plotter = VoltageProfilePlotter(xaxis="x_form")
plotter.add_electrode(self.ce_FF, "FeF3 conversion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "x in Li<sub>x</sub>FeF3")
plotter.add_electrode(self.ie_LTO, "LTO insertion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "x Workion Ion per Host F.U.") | materialsproject/pymatgen | [
1063,
732,
1063,
235,
1319343039
] |
def __init__(self, songs_data=None):
if songs_data is None:
self.songs_data = []
else:
self.songs_data = songs_data | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def songs(self):
for s in self.songs_data:
yield s | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def num_features(self):
if len(self.songs_data):
return self.songs_data[0].X.shape[1] | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def size(self):
return len(self.songs_data) | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def __init__(self, audio_path, label_path):
if not os.path.isfile(audio_path):
raise IOError("Audio file at %s does not exist" % audio_path)
if label_path and not os.path.isfile(label_path):
raise IOError("MIDI file at %s does not exist" % label_path)
self.audio_path = audio_path
self.label_path = label_path | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def x(self):
return self.__x | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def x(self, x):
self.__x = x | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def X(self):
return self.__X | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def X(self, X):
if hasattr(self, 'Y') and self.Y.shape[0] != X.shape[0]:
raise ValueError("Number of feature frames must equal number of label frames")
self.__X = X | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def Y(self):
return self.__Y | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def Y(self, Y):
if hasattr(self, 'X') and self.X.shape[0] != Y.shape[0]:
raise ValueError("Number of label frames must equal number of feature frames")
self.__Y = Y | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def num_pitches(self):
if hasattr(self, 'Y'):
return np.shape(self.Y)[1]
return 0 | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def num_features(self):
if hasattr(self, 'X'):
return self.X.shape[1] | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def num_frames(self):
if hasattr(self, 'X'):
return self.X.shape[0] | Guitar-Machine-Learning-Group/guitar-transcriber | [
1,
2,
1,
1,
1476390274
] |
def setHome(home):
global __home
__home = home | ManiacalLabs/PixelWeb | [
18,
6,
18,
10,
1445800144
] |
def initConfig():
try:
if not os.path.exists(__home):
print "Creating {}".format(__home)
os.makedirs(__home)
except:
print "Failed to initialize PixelWeb config!" | ManiacalLabs/PixelWeb | [
18,
6,
18,
10,
1445800144
] |
def writeConfig(file, data, key = None, path=None):
if not path:
path = __home
base = data
if key:
base = readConfig(file, path=path)
base[key] = data
with open(path + "/" + file + ".json", "w") as fp:
json.dump(base, fp, indent=4, sort_keys=True) | ManiacalLabs/PixelWeb | [
18,
6,
18,
10,
1445800144
] |
def readServerConfig():
data = readConfig("config", path=__home)
base = paramsToDict(BASE_SERVER_CONFIG.params)
if len(data.keys()) == 0:
data = paramsToDict(BASE_SERVER_CONFIG.params)
elif len(data.keys()) != len(base.keys()):
data.upgrade(base)
return d(data) | ManiacalLabs/PixelWeb | [
18,
6,
18,
10,
1445800144
] |
def test_segment_pools():
### Test Segment ID Pool Operations
# Get all configured Segment Pools
get_segment_resp = client_session.read('vdnSegmentPools')
client_session.view_response(get_segment_resp)
# Add a Segment Pool
segments_create_body = client_session.extract_resource_body_example('vdnSegmentPools', 'create')
client_session.view_body_dict(segments_create_body)
segments_create_body['segmentRange']['begin'] = '11002'
segments_create_body['segmentRange']['end'] = '11003'
segments_create_body['segmentRange']['name'] = 'legacy'
create_response = client_session.create('vdnSegmentPools', request_body_dict=segments_create_body)
client_session.view_response(create_response)
time.sleep(5)
# Update the new Segment Pool:
update_segment_body = client_session.extract_resource_body_example('vdnSegmentPool', 'update')
update_segment_body['segmentRange']['name'] = 'PythonTest'
update_segment_body['segmentRange']['end'] = '11005'
client_session.update('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']},
request_body_dict=update_segment_body)
time.sleep(5)
# Display a specific Segment pool (the new one)
specific_segement_resp = client_session.read('vdnSegmentPool', uri_parameters={'segmentPoolId':
create_response['objectId']})
client_session.view_response(specific_segement_resp)
time.sleep(5)
# Delete new Segment Pool
client_session.delete('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']}) | vmware/nsxramlclient | [
41,
33,
41,
9,
1440806810
] |
def has_ext_modules(self):
return True | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def finalize_options(self):
ret = InstallCommandBase.finalize_options(self)
self.install_headers = os.path.join(self.install_purelib,
'tensorflow', 'include')
return ret | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = [] | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def mkdir_and_copy_file(self, header):
install_dir = os.path.join(self.install_dir, os.path.dirname(header))
# Get rid of some extra intervening directories so we can have fewer
# directories for -I
install_dir = re.sub('/google/protobuf_archive/src', '', install_dir)
# Copy eigen code into tensorflow/include.
# A symlink would do, but the wheel file that gets created ignores
# symlink within the directory hierarchy.
# NOTE(keveman): Figure out how to customize bdist_wheel package so
# we can do the symlink.
if 'external/eigen_archive/' in install_dir:
extra_dir = install_dir.replace('external/eigen_archive', '')
if not os.path.exists(extra_dir):
self.mkpath(extra_dir)
self.copy_file(header, extra_dir)
if not os.path.exists(install_dir):
self.mkpath(install_dir)
return self.copy_file(header, install_dir) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.