function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def __init__(self) -> None:
# map from node name to the node object
self.map: dict[T, DisjointSetTreeNode[T]] = {} | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def find_set(self, data: T) -> DisjointSetTreeNode[T]:
# find the set x belongs to (with path-compression)
elem_ref = self.map[data]
if elem_ref != elem_ref.parent:
elem_ref.parent = self.find_set(elem_ref.parent.data)
return elem_ref.parent | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def union(self, data1: T, data2: T) -> None:
# merge 2 disjoint sets
self.link(self.find_set(data1), self.find_set(data2)) | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def __init__(self) -> None:
# connections: map from the node to the neighbouring nodes (with weights)
self.connections: dict[T, dict[T, int]] = {} | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def add_edge(self, node1: T, node2: T, weight: int) -> None:
# add an edge with the given weight
self.add_node(node1)
self.add_node(node2)
self.connections[node1][node2] = weight
self.connections[node2][node1] = weight | TheAlgorithms/Python | [
154959,
39275,
154959,
147,
1468662241
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_management_group.metadata['url'] # type: ignore
path_format_arguments = {
'managementGroupsNamespace': self._serialize.url("management_groups_namespace", management_groups_namespace, 'str'),
'managementGroupName': self._serialize.url("management_group_name", management_group_name, 'str'),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_query_results_for_subscription(
self,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_query_results_for_resource_group(
self,
resource_group_name: str,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_query_results_for_resource(
self,
resource_id: str,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_resource.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def convert_pandoc_rst(source, from_format, to_format, extra_args=None):
"""
Overwrites `convert_pandoc
<https://github.com/jupyter/nbconvert/blob/master/nbconvert/filters/pandoc.py>`_.
@param source string to convert
@param from_format from format
@param to_format to format
@param extra_args extra arguments
@return results
"""
return convert_pandoc(source, from_format, to_format, extra_args=None) | sdpython/pyquickhelper | [
21,
10,
21,
22,
1388194285
] |
def __init__(self, *args, **kwargs):
"""
Overwrites the extra loaders to get the right template.
"""
filename = os.path.join(os.path.dirname(__file__), 'rst_modified.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content = f.read()
filename = os.path.join(os.path.dirname(__file__), 'rst.tpl')
with open(filename, 'r', encoding='utf-8') as f:
content2 = f.read()
dl = DictLoader({'rst_modified.tpl': content, 'rst.tpl': content2})
kwargs['extra_loaders'] = [dl]
RSTExporter.__init__(self, *args, **kwargs) | sdpython/pyquickhelper | [
21,
10,
21,
22,
1388194285
] |
def _template_file_default(self):
return "rst_modified.tpl" | sdpython/pyquickhelper | [
21,
10,
21,
22,
1388194285
] |
def _file_extension_default(self):
return '.rst' | sdpython/pyquickhelper | [
21,
10,
21,
22,
1388194285
] |
def _template_name_default(self):
return 'rst' | sdpython/pyquickhelper | [
21,
10,
21,
22,
1388194285
] |
def parse(self, response):
shops = response.xpath('//div[@id="js_subnav"]//li[@class="level-1"]/a/@href')
for shop in shops:
yield scrapy.Request(
response.urljoin(shop.extract()),
callback=self.parse_shop
) | iandees/all-the-places | [
379,
151,
379,
602,
1465952958
] |
def htlc():
pass | xeroc/uptick | [
42,
29,
42,
6,
1483541352
] |
def create(ctx, to, amount, symbol, type, hash, expiration, length, account):
""" Create an HTLC contract from a hash and lock-time
"""
ctx.blockchain.blocking = True
tx = ctx.blockchain.htlc_create(
Amount(amount, symbol),
to,
hash_type=type,
hash_hex=hash,
expiration=expiration,
account=account,
preimage_length=length
)
tx.pop("trx", None)
print_tx(tx)
results = tx.get("operation_results", {})
if results:
htlc_id = results[0][1]
print("Your htlc_id is: {}".format(htlc_id)) | xeroc/uptick | [
42,
29,
42,
6,
1483541352
] |
def create_from_secret(ctx, to, amount, symbol, type, secret, expiration,
length, account):
"""Create an HTLC contract from a secret preimage
If you are the party choosing the preimage, this version of
htlc_create will compute the hash for you from the supplied
preimage, and create the HTLC with the resulting hash.
"""
if length != 0 and length != len(secret):
raise ValueError("Length must be zero or agree with actual preimage length")
ctx.blockchain.blocking = True
tx = ctx.blockchain.htlc_create(
Amount(amount, symbol),
to,
preimage=secret,
preimage_length=length,
hash_type=type,
expiration=expiration,
account=account,
)
tx.pop("trx", None)
print_tx(tx)
results = tx.get("operation_results", {})
if results:
htlc_id = results[0][1]
print("Your htlc_id is: {}".format(htlc_id)) | xeroc/uptick | [
42,
29,
42,
6,
1483541352
] |
def main(config_file):
return {'config_file': config_file} | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def on_error(e): # pragma: no cover
"""Error handler
RuntimeError or ValueError exceptions raised by commands will be handled
by this function.
"""
exname = {'RuntimeError': 'Runtime error', 'Value Error': 'Value error'}
sys.stderr.write('{}: {}\n'.format(exname[e.__class__.__name__], str(e)))
sys.stderr.write('See file slam_error.log for additional details.\n')
sys.exit(1) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def on_unexpected_error(e): # pragma: no cover
"""Catch-all error handler
Unexpected errors will be handled by this function.
"""
sys.stderr.write('Unexpected error: {} ({})\n'.format(
str(e), e.__class__.__name__))
sys.stderr.write('See file slam_error.log for additional details.\n')
sys.exit(1) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def init(name, description, bucket, timeout, memory, stages, requirements,
function, runtime, config_file, **kwargs):
"""Generate a configuration file."""
if os.path.exists(config_file):
raise RuntimeError('Please delete the old version {} if you want to '
'reconfigure your project.'.format(config_file))
module, app = function.split(':')
if not name:
name = module.replace('_', '-')
if not re.match('^[a-zA-Z][-a-zA-Z0-9]*$', name):
raise ValueError('The name {} is invalid, only letters, numbers and '
'dashes are allowed.'.format(name))
if not bucket:
random_suffix = ''.join(
random.choice(string.ascii_lowercase + string.digits)
for n in range(8))
bucket = '{}-{}'.format(name.lower(), random_suffix)
stages = [s.strip() for s in stages.split(',')]
if runtime is None:
if sys.version_info[0] == 2: # pragma: no cover
runtime = 'python2.7'
else:
runtime = 'python3.6'
# generate slam.yaml
template_file = os.path.join(os.path.dirname(__file__),
'templates/slam.yaml')
with open(template_file) as f:
template = f.read()
template = render_template(template, name=name, description=description,
module=module, app=app, bucket=bucket,
timeout=timeout, memory=memory,
requirements=requirements, stages=stages,
devstage=stages[0], runtime=runtime)
with open(config_file, 'wt') as f:
f.write(template)
# plugins
config = _load_config(config_file)
for name, plugin in plugins.items():
# write plugin documentation as a comment in config file
with open(config_file, 'at') as f:
f.write('\n\n# ' + (plugin.__doc__ or name).replace(
'\n', '\n# ') + '\n')
if hasattr(plugin, 'init'):
arguments = {k: v for k, v in kwargs.items()
if k in getattr(plugin.init, '_argnames', [])}
plugin_config = plugin.init.func(config=config, **arguments)
if plugin_config:
with open(config_file, 'at') as f:
yaml.dump({name: plugin_config}, f,
default_flow_style=False)
print('The configuration file for your project has been generated. '
'Remember to add {} to source control.'.format(config_file)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def _run_lambda_function(event, context, app, config): # pragma: no cover
"""Run the function. This is the default when no plugins (such as wsgi)
define an alternative run function."""
args = event.get('args', [])
kwargs = event.get('kwargs', {})
# first attempt to invoke the function passing the lambda event and context
try:
ret = app(*args, event=event, context=context, **kwargs)
except TypeError:
# try again without passing the event and context
ret = app(*args, **kwargs)
return ret | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def _build(config, rebuild_deps=False):
package = datetime.utcnow().strftime("lambda_package.%Y%m%d_%H%M%S.zip")
ignore = ['\\.slam\\/venv\\/.*$', '\\.pyc$']
if os.environ.get('VIRTUAL_ENV'):
# make sure the currently active virtualenv is not included in the pkg
venv = os.path.relpath(os.environ['VIRTUAL_ENV'], os.getcwd())
if not venv.startswith('.'):
ignore.append(venv.replace('/', '\\/') + '\\/.*$')
# create .slam directory if it doesn't exist yet
if not os.path.exists('.slam'):
os.mkdir('.slam')
_generate_lambda_handler(config)
# create or update virtualenv
if rebuild_deps:
if os.path.exists('.slam/venv'):
shutil.rmtree('.slam/venv')
if not os.path.exists('.slam/venv'):
_run_command('virtualenv .slam/venv')
_run_command('.slam/venv/bin/pip install -r ' + config['requirements'])
# build lambda package
build_package('.', config['requirements'], virtualenv='.slam/venv',
extra_files=['.slam/handler.py'], ignore=ignore,
zipfile_name=package)
# cleanup lambda uploader's temp directory
if os.path.exists('.lambda_uploader_temp'):
shutil.rmtree('.lambda_uploader_temp')
return package | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def _ensure_bucket_exists(s3, bucket, region): # pragma: no cover
try:
s3.head_bucket(Bucket=bucket)
except botocore.exceptions.ClientError:
if region != 'us-east-1':
s3.create_bucket(Bucket=bucket, CreateBucketConfiguration={
'LocationConstraint': region})
else:
s3.create_bucket(Bucket=bucket) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def _print_status(config):
cfn = boto3.client('cloudformation')
lmb = boto3.client('lambda')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
print('{} has not been deployed yet.'.format(config['name']))
else:
print('{} is deployed!'.format(config['name']))
print(' Function name: {}'.format(
_get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]))
print(' S3 bucket: {}'.format(config['aws']['s3_bucket']))
print(' Stages:')
stages = list(config['stage_environments'].keys())
stages.sort()
plugin_status = {}
for name, plugin in plugins.items():
if name in config and hasattr(plugin, 'status'):
statuses = plugin.status(config, stack)
if statuses:
for s, status in statuses.items():
plugin_status.setdefault(s, []).append(status)
for s in stages:
fd = None
try:
fd = lmb.get_function(FunctionName=_get_from_stack(
stack, 'Output', 'FunctionArn'), Qualifier=s)
except botocore.exceptions.ClientError: # pragma: no cover
continue
v = ':{}'.format(fd['Configuration']['Version'])
if s in plugin_status and len(plugin_status[s]) > 0:
print(' {}{}: {}'.format(s, v,
' '.join(plugin_status[s])))
else:
print(' {}{}'.format(s, v)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def build(rebuild_deps, config_file):
"""Build lambda package."""
config = _load_config(config_file)
print("Building lambda package...")
package = _build(config, rebuild_deps=rebuild_deps)
print("{} has been built successfully.".format(package)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def deploy(stage, lambda_package, no_lambda, rebuild_deps, config_file):
"""Deploy the project to the development stage."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
s3 = boto3.client('s3')
cfn = boto3.client('cloudformation')
region = _get_aws_region()
# obtain previous deployment if it exists
previous_deployment = None
try:
previous_deployment = cfn.describe_stacks(
StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
pass
# build lambda package if required
built_package = False
new_package = True
if lambda_package is None and not no_lambda:
print("Building lambda package...")
lambda_package = _build(config, rebuild_deps=rebuild_deps)
built_package = True
elif lambda_package is None:
# preserve package from previous deployment
new_package = False
lambda_package = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
# create S3 bucket if it doesn't exist yet
bucket = config['aws']['s3_bucket']
_ensure_bucket_exists(s3, bucket, region)
# upload lambda package to S3
if new_package:
s3.upload_file(lambda_package, bucket, lambda_package)
if built_package:
# we created the package, so now that is on S3 we can delete it
os.remove(lambda_package)
# prepare cloudformation template
template_body = get_cfn_template(config)
parameters = [
{'ParameterKey': 'LambdaS3Bucket', 'ParameterValue': bucket},
{'ParameterKey': 'LambdaS3Key', 'ParameterValue': lambda_package},
]
stages = list(config['stage_environments'].keys())
stages.sort()
for s in stages:
param = s.title() + 'Version'
if s != stage:
v = _get_from_stack(previous_deployment, 'Parameter', param) \
if previous_deployment else '$LATEST'
v = v or '$LATEST'
else:
v = '$LATEST'
parameters.append({'ParameterKey': param, 'ParameterValue': v})
# run the cloudformation template
if previous_deployment is None:
print('Deploying {}:{}...'.format(config['name'], stage))
cfn.create_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_create_complete')
else:
print('Updating {}:{}...'.format(config['name'], stage))
cfn.update_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_update_complete')
# wait for cloudformation to do its thing
try:
waiter.wait(StackName=config['name'])
except botocore.exceptions.ClientError:
# the update failed, so we remove the lambda package from S3
if built_package:
s3.delete_object(Bucket=bucket, Key=lambda_package)
raise
else:
if previous_deployment and new_package:
# the update succeeded, so it is safe to delete the lambda package
# used by the previous deployment
old_pkg = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
s3.delete_object(Bucket=bucket, Key=old_pkg)
# we are done, show status info and exit
_print_status(config) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def publish(version, stage, config_file):
"""Publish a version of the project to a stage."""
config = _load_config(config_file)
cfn = boto3.client('cloudformation')
if version is None:
version = config['devstage']
elif version not in config['stage_environments'].keys() and \
not version.isdigit():
raise ValueError('Invalid version. Use a stage name or a numeric '
'version number.')
if version == stage:
raise ValueError('Cannot deploy a stage into itself.')
# obtain previous deployment
try:
previous_deployment = cfn.describe_stacks(
StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
# preserve package from previous deployment
bucket = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Bucket')
lambda_package = _get_from_stack(previous_deployment, 'Parameter',
'LambdaS3Key')
# prepare cloudformation template
template_body = get_cfn_template(config)
parameters = [
{'ParameterKey': 'LambdaS3Bucket', 'ParameterValue': bucket},
{'ParameterKey': 'LambdaS3Key', 'ParameterValue': lambda_package},
]
stages = list(config['stage_environments'].keys())
stages.sort()
for s in stages:
param = s.title() + 'Version'
if s != stage:
v = _get_from_stack(previous_deployment, 'Parameter', param) \
if previous_deployment else '$LATEST'
v = v or '$LATEST'
else:
if version.isdigit():
# explicit version number
v = version
else:
# publish version from a stage
v = _get_from_stack(previous_deployment, 'Parameter',
version.title() + 'Version')
if v == '$LATEST':
# publish a new version from $LATEST
lmb = boto3.client('lambda')
v = lmb.publish_version(FunctionName=_get_from_stack(
previous_deployment, 'Output', 'FunctionArn'))[
'Version']
parameters.append({'ParameterKey': param, 'ParameterValue': v})
# run the cloudformation template
print('Publishing {}:{} to {}...'.format(config['name'], version, stage))
cfn.update_stack(StackName=config['name'], TemplateBody=template_body,
Parameters=parameters,
Capabilities=['CAPABILITY_IAM'])
waiter = cfn.get_waiter('stack_update_complete')
# wait for cloudformation to do its thing
try:
waiter.wait(StackName=config['name'])
except botocore.exceptions.ClientError:
raise
# we are done, show status info and exit
_print_status(config) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def invoke(stage, nowait, dry_run, config_file, args):
"""Invoke the lambda function."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
cfn = boto3.client('cloudformation')
lmb = boto3.client('lambda')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
function = _get_from_stack(stack, 'Output', 'FunctionArn')
if dry_run:
invocation_type = 'DryRun'
elif nowait:
invocation_type = 'Event'
else:
invocation_type = 'RequestResponse'
# parse input arguments
data = {}
for arg in args:
s = arg.split('=', 1)
if len(s) != 2:
raise ValueError('Invalid argument ' + arg)
if s[0][-1] == ':':
# JSON argument
data[s[0][:-1]] = json.loads(s[1])
else:
# string argument
data[s[0]] = s[1]
rv = lmb.invoke(FunctionName=function, InvocationType=invocation_type,
Qualifier=stage,
Payload=json.dumps({'kwargs': data}, sort_keys=True))
if rv['StatusCode'] != 200 and rv['StatusCode'] != 202:
raise RuntimeError('Unexpected error. Status code = {}.'.format(
rv['StatusCode']))
if invocation_type == 'RequestResponse':
payload = json.loads(rv['Payload'].read().decode('utf-8'))
if 'FunctionError' in rv:
if 'stackTrace' in payload:
print('Traceback (most recent call last):')
for frame in payload['stackTrace']:
print(' File "{}", line {}, in {}'.format(
frame[0], frame[1], frame[2]))
print(' ' + frame[3])
print('{}: {}'.format(payload['errorType'],
payload['errorMessage']))
else:
raise RuntimeError('Unknown error')
else:
print(str(payload)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def delete(no_logs, config_file):
"""Delete the project."""
config = _load_config(config_file)
s3 = boto3.client('s3')
cfn = boto3.client('cloudformation')
logs = boto3.client('logs')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
raise RuntimeError('This project has not been deployed yet.')
bucket = _get_from_stack(stack, 'Parameter', 'LambdaS3Bucket')
lambda_package = _get_from_stack(stack, 'Parameter', 'LambdaS3Key')
function = _get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]
api_id = _get_from_stack(stack, 'Output', 'ApiId')
if api_id:
log_groups = ['API-Gateway-Execution-Logs_' + api_id + '/' + stage
for stage in config['stage_environments'].keys()]
else:
log_groups = []
log_groups.append('/aws/lambda/' + function)
print('Deleting {}...'.format(config['name']))
cfn.delete_stack(StackName=config['name'])
waiter = cfn.get_waiter('stack_delete_complete')
waiter.wait(StackName=config['name'])
if not no_logs:
print('Deleting logs...')
for log_group in log_groups:
try:
logs.delete_log_group(logGroupName=log_group)
except botocore.exceptions.ClientError:
print(' Log group {} could not be deleted.'.format(log_group))
print('Deleting files...')
try:
s3.delete_object(Bucket=bucket, Key=lambda_package)
s3.delete_bucket(Bucket=bucket)
except botocore.exceptions.ClientError:
print(' S3 bucket {} could not be deleted.'.format(bucket)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def status(config_file):
"""Show deployment status for the project."""
config = _load_config(config_file)
_print_status(config) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def logs(stage, period, tail, config_file):
"""Dump logs to the console."""
config = _load_config(config_file)
if stage is None:
stage = config['devstage']
cfn = boto3.client('cloudformation')
try:
stack = cfn.describe_stacks(StackName=config['name'])['Stacks'][0]
except botocore.exceptions.ClientError:
print('{} has not been deployed yet.'.format(config['name']))
return
function = _get_from_stack(stack, 'Output', 'FunctionArn').split(':')[-1]
version = _get_from_stack(stack, 'Parameter', stage.title() + 'Version')
api_id = _get_from_stack(stack, 'Output', 'ApiId')
try:
start = float(period[:-1])
except ValueError:
raise ValueError('Invalid period ' + period)
if period[-1] == 's':
start = time.time() - start
elif period[-1] == 'm':
start = time.time() - start * 60
elif period[-1] == 'h':
start = time.time() - start * 60 * 60
elif period[-1] == 'd':
start = time.time() - start * 60 * 60 * 24
elif period[-1] == 'w':
start = time.time() - start * 60 * 60 * 24 * 7
else:
raise ValueError('Invalid period ' + period)
start = int(start * 1000)
logs = boto3.client('logs')
lambda_log_group = '/aws/lambda/' + function
log_groups = [lambda_log_group]
if api_id:
log_groups.append('API-Gateway-Execution-Logs_' + api_id + '/' + stage)
log_version = '[' + version + ']'
log_start = {g: start for g in log_groups}
while True:
kwargs = {}
events = []
for log_group in log_groups:
while True:
try:
filtered_logs = logs.filter_log_events(
logGroupName=log_group,
startTime=log_start[log_group],
interleaved=True, **kwargs)
except botocore.exceptions.ClientError:
# the log group does not exist yet
filtered_logs = {'events': []}
if log_group == lambda_log_group:
events += [ev for ev in filtered_logs['events']
if log_version in ev['logStreamName']]
else:
events += filtered_logs['events']
if len(filtered_logs['events']):
log_start[log_group] = \
filtered_logs['events'][-1]['timestamp'] + 1
if 'nextToken' not in filtered_logs:
break
kwargs['nextToken'] = filtered_logs['nextToken']
events.sort(key=lambda ev: ev['timestamp'])
for ev in events:
tm = datetime.fromtimestamp(ev['timestamp'] / 1000)
print(tm.strftime('%b %d %X ') + ev['message'].strip())
if not tail:
break
time.sleep(5) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def template(config_file):
"""Print the default Cloudformation deployment template."""
config = _load_config(config_file)
print(get_cfn_template(config, pretty=True)) | miguelgrinberg/slam | [
71,
11,
71,
4,
1482636088
] |
def get_logging_level(level):
logging_level = None
if isinstance(level, (str, unicode)):
level = level.upper()
try:
logging_level = getattr(logging, level.upper())
except AttributeError:
raise AttributeError('Tried to grab logging level "%s"'
' but it does not exist' % level)
elif isinstance(level, int):
# Do nothing
logging_level = level
else:
raise TypeError('Invalid logging level. Must be string or int %s'
% str(level))
return logging_level | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def __init__(self):
self._handlers = []
self._log_lines = [] #storage before any handlers appear | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def debug(self, message, **kwargs):
self.log('debug', message, **kwargs) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def info(self, message, **kwargs):
self.log('info', message, **kwargs) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def critical(self, message, **kwargs):
self.log('critical', message, **kwargs) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def debug_exception(self, message, **kwargs):
"""Stores exception except using the debug level"""
exception_str = self._get_exception_str()
self.log('debug', '%s\n%s' % (message, exception_str)) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def log(self, level, message, new_line=True):
if new_line:
message = "%s\n" % message
handlers = self._handlers
if not handlers:
self._log_lines.append((level, message))
else:
for handler in handlers:
handler.log(level, message) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def __init__(self, level='debug'):
self._level = get_logging_level(level) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def log(self, level, message):
current_level = get_logging_level(level)
if current_level >= self._level:
self.emit(level, message) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def close(self):
pass | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def emit(self, level, message):
sys.stdout.write(message) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def emit(self, level, output):
color = self.level_colors.get(level, "black")
colored_function = getattr(colored, color, lambda text: text)
colored_output = colored_function(output)
puts(colored_output) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def __init__(self, filename):
self._file = open(filename, 'a') | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def close(self):
self._file.close()
self._file = None | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def __init__(self, outputter):
self._outputter = outputter
logging.Handler.__init__(self) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def write(self, output, level):
print(output) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def write(self, output, level):
color = self.level_colors.get(level, "black")
colored_function = getattr(colored, color, lambda text: text)
colored_output = colored_function(output)
puts(colored_output) | ravenac95/virtstrap | [
7,
2,
7,
3,
1330379410
] |
def __init__(self, tree, view, with_root=False):
self.tree = json.loads(tree.to_json(with_data=True))
self.view = view
self.with_root = with_root # For public sitemap
# Public sitemap
self.sitemap = self.get_public_sitemap(self.tree)
# Store a flat list of every ressources to build as pages
self.ressources = self.recursive_ressources([self.tree]) | sveetch/Sveetoy | [
1,
1,
1,
7,
1483118147
] |
def recursive_ressources(self, children, pages=[]):
"""
Return a flat ressources list from given children
"""
for branch in children:
for leaf_name, leaf_content in branch.items():
datas = leaf_content['data']
pages.append(self.view(
title=leaf_name,
template_name=datas['link'],
destination=datas['link'],
sitemap=self.sitemap,
))
if datas['is_dir']:
pages = self.recursive_ressources(leaf_content['children'])
return pages | sveetch/Sveetoy | [
1,
1,
1,
7,
1483118147
] |
def draw_axis(img, charuco_corners, charuco_ids, board):
vecs = np.load("./calib.npz") # I already calibrated the camera
mtx, dist, _, _ = [vecs[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')]
ret, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard(
charuco_corners, charuco_ids, board, mtx, dist)
if ret is not None and ret is True:
cv2.aruco.drawAxis(img, mtx, dist, rvec, tvec, 0.1) | makelove/OpenCV-Python-Tutorial | [
2997,
1086,
2997,
11,
1445060268
] |
def make_grayscale(img):
ret = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return ret | makelove/OpenCV-Python-Tutorial | [
2997,
1086,
2997,
11,
1445060268
] |
def __init__(self, *args, **kwargs): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def annotate_clips(self, clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _get_clip_lists(self, clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _annotate_clips(self, clips, classifier): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _get_new_classification(self, old_classification, auto_classification): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _set_clip_score(self, clip, score): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _show_classification_errors(self, triples): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _show_classification_errors_aux(self, category, errors, num_clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def __init__(self, clip_type): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _load_model(self):
path = classifier_utils.get_keras_model_file_path(self.clip_type)
logging.info(f'Loading classifier model from "{path}"...')
return tf.keras.models.load_model(path) | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _load_settings(self):
path = classifier_utils.get_settings_file_path(self.clip_type)
logging.info(f'Loading classifier settings from "{path}"...')
text = path.read_text()
d = yaml_utils.load(text)
return Settings.create_from_dict(d) | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def classify_clips(self, clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _slice_clip_waveforms(self, clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _get_clip_samples(self, clip): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def _classify_clip(self, index, score, clips): | HaroldMills/Vesper | [
43,
3,
43,
33,
1398351334
] |
def __init__(self):
# Fix some key bindings
self.bind("<Control-Key-a>", self.select_all)
# We will need Ctrl-/ for the "stroke", but it cannot be unbound, so
# let's prevent it from being passed to the standard handler
self.bind("<Control-Key-/>", lambda event: "break")
# Diacritical bindings
for a, k in self.accents:
# Little-known feature of Tk, it allows to bind an event to
# multiple keystrokes
self.bind("<Control-Key-%s><Key>" % k,
lambda event, a=a: self.insert_accented(event.char, a)) | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def __init__(self, master=None, **kwargs):
Entry.__init__(self, master=None, **kwargs)
Diacritical.__init__(self) | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def __init__(self, master=None, **kwargs):
ScrolledText.__init__(self, master=None, **kwargs)
Diacritical.__init__(self) | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def test():
frame = Frame()
frame.pack(fill=BOTH, expand=YES)
if os.name == "nt":
# Set default font for all widgets; use Windows typical default
frame.option_add("*font", "Tahoma 8")
# The editors
entry = DiacriticalEntry(frame)
entry.pack(fill=BOTH, expand=YES)
text = DiacriticalText(frame, width=76, height=25, wrap=WORD)
if os.name == "nt":
# But this looks better than the default set above
text.config(font="Arial 10")
text.pack(fill=BOTH, expand=YES)
text.focus()
frame.master.title("Diacritical Editor")
frame.mainloop() | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def __init__(self,
num_units,
num_layers,
lstm_impl,
use_peephole,
parameter_init,
clip_activation,
num_proj,
concat=False,
name='pblstm_encoder'):
assert num_proj != 0
assert num_units % 2 == 0, 'num_unit should be even number.'
self.num_units = num_units
self.num_proj = None
self.num_layers = num_layers
self.lstm_impl = lstm_impl
self.use_peephole = use_peephole
self.parameter_init = parameter_init
self.clip_activation = clip_activation
self.name = name | hirofumi0810/tensorflow_end2end_speech_recognition | [
311,
126,
311,
11,
1495618521
] |
def __init__(self, voice = 'Alex', rate = '300'):
self.voice = voice
self.rate = rate
super(AppleSay, self).__init__() | frastlin/PyAudioGame | [
5,
4,
5,
2,
1420973210
] |
def speak(self, text, interrupt = 0):
if interrupt:
self.silence()
os.system('say -v %s -r %s "%s" &' % (self.voice, self.rate, text)) | frastlin/PyAudioGame | [
5,
4,
5,
2,
1420973210
] |
def _jupyter_server_extension_paths():
return [{"module": "jupyterlmod"}] | cmd-ntrf/jupyter-lmod | [
26,
11,
26,
5,
1485456063
] |
def _jupyter_nbextension_paths():
return [
dict(
section="tree", src="static", dest="jupyterlmod", require="jupyterlmod/main"
)
] | cmd-ntrf/jupyter-lmod | [
26,
11,
26,
5,
1485456063
] |
def __init__(self, **kwargs):
self.__dict__.update(kwargs) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __eq__(self, other):
return self.__dict__ == other.__dict__ | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def client():
# The poller itself don't use it, so we don't need something functionnal
return AsyncPipelineClient("https://baseurl") | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def async_pipeline_client_builder():
"""Build a client that use the "send" callback as final transport layer
send will receive "request" and kwargs as any transport layer
"""
def create_client(send_cb):
class TestHttpTransport(AsyncHttpTransport):
async def open(self): pass
async def close(self): pass
async def __aexit__(self, *args, **kwargs): pass
async def send(self, request, **kwargs):
return await send_cb(request, **kwargs)
return AsyncPipelineClient(
'http://example.org/',
pipeline=AsyncPipeline(
transport=TestHttpTransport()
)
)
return create_client | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def deserialization_cb():
def cb(pipeline_response):
return json.loads(pipeline_response.http_response.text())
return cb | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def polling_response():
polling = AsyncLROBasePolling()
headers = {}
response = Response()
response.headers = headers
response.status_code = 200
polling._pipeline_response = PipelineResponse(
None,
AsyncioRequestsTransportResponse(
None,
response,
),
PipelineContext(None)
)
polling._initial_response = polling._pipeline_response
return polling, headers | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def mock_send(http_request, http_response, method, status, headers=None, body=RESPONSE_BODY):
if headers is None:
headers = {}
response = Response()
response._content_consumed = True
response._content = json.dumps(body).encode('ascii') if body is not None else None
response.request = Request()
response.request.method = method
response.request.url = RESOURCE_URL
response.request.headers = {
'x-ms-client-request-id': '67f4dd4e-6262-45e1-8bed-5c45cf23b6d9'
}
response.status_code = status
response.headers = headers
response.headers.update({"content-type": "application/json; charset=utf8"})
response.reason = "OK"
if is_rest(http_request):
request = http_request(
response.request.method,
response.request.url,
headers=response.request.headers,
content=body,
)
else:
request = CLIENT._request(
response.request.method,
response.request.url,
None, # params
response.request.headers,
body,
None, # form_content
None # stream_content
)
response = create_transport_response(http_response, request, response)
if is_rest(http_response):
response.body()
return PipelineResponse(
request,
response,
None # context
) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def mock_update(http_request, http_response, url, headers=None):
response = Response()
response._content_consumed = True
response.request = mock.create_autospec(Request)
response.request.method = 'GET'
response.headers = headers or {}
response.headers.update({"content-type": "application/json; charset=utf8"})
response.reason = "OK"
if url == ASYNC_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = ASYNC_BODY.encode('ascii')
response.randomFieldFromPollAsyncOpHeader = None
elif url == LOCATION_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = LOCATION_BODY.encode('ascii')
response.randomFieldFromPollLocationHeader = None
elif url == ERROR:
raise BadEndpointError("boom")
elif url == RESOURCE_URL:
response.request.url = url
response.status_code = POLLING_STATUS
response._content = RESOURCE_BODY.encode('ascii')
else:
raise Exception('URL does not match')
request = http_request(
response.request.method,
response.request.url,
)
response = create_transport_response(http_response, request, response)
if is_rest(http_response):
response.body()
return PipelineResponse(
request,
response,
None # context
) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def mock_outputs(pipeline_response):
response = pipeline_response.http_response
try:
body = json.loads(response.text())
except ValueError:
raise DecodeError("Impossible to deserialize")
body = {TestBasePolling.convert.sub(r'\1_\2', k).lower(): v
for k, v in body.items()}
properties = body.setdefault('properties', {})
if 'name' in body:
properties['name'] = body['name']
if properties:
properties = {TestBasePolling.convert.sub(r'\1_\2', k).lower(): v
for k, v in properties.items()}
del body['properties']
body.update(properties)
resource = SimpleResource(**body)
else:
raise DecodeError("Impossible to deserialize")
resource = SimpleResource(**body)
return resource | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def mock_deserialization_no_body(pipeline_response):
"""Use this mock when you don't expect a return (last body irrelevant)
"""
return None | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def no_update_allowed(url, headers=None):
raise ValueError("Should not try to update") | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
parser.add_argument('vid_file')
parser.add_argument('box_file')
parser.add_argument('annot_file', default=None,
help='Ground truth annotation file. [None]')
parser.add_argument('save_file', help='Save zip file')
parser.add_argument('--job', dest='job_id', help='Job slot, GPU ID + 1. [1]',
default=1, type=int)
parser.add_argument('--length', type=int, default=20,
help='Propagation length. [20]')
parser.add_argument('--window', type=int, default=5,
help='Prediction window. [5]')
parser.add_argument('--sample_rate', type=int, default=1,
help='Temporal subsampling rate. [1]')
parser.add_argument('--offset', type=int, default=0,
help='Offset of sampling. [0]')
parser.add_argument('--overlap', type=float, default=0.5,
help='GT overlap threshold for tracking. [0.5]')
parser.add_argument('--wait', dest='wait',
help='wait until net file exists',
default=True, type=bool)
parser.set_defaults(vis=False, zip=False, keep_feat=False)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args | myfavouritekk/TPN | [
69,
18,
69,
7,
1460708833
] |
def set_user(self, user):
for field in self.filters:
self.filters[field].field.label = u'<strong>{0}</strong>'.format(self.filters[field].field.label)
groups = user.group_set.all()
courses = Course.objects.filter(groups__in=groups)
course_choices = set()
year_choices = set()
teacher_set = set()
status_set = set()
for course in courses:
course_choices.add((course.id, course.name))
year_choices.add((course.year.id, unicode(course.year)))
for teacher in course.get_teachers():
teacher_set.add(teacher)
for status in course.issue_status_system.statuses.all():
status_set.add(status)
self.filters['is_active'].field.choices = ((u'', _(u'luboj')),
(1, _(u'aktivnyj')),
(0, _(u'arhiv')))
self.filters['years'].field.choices = tuple(year_choices)
self.filters['courses'].field.choices = tuple(course_choices)
teacher_choices = [(teacher.id, teacher.get_full_name()) for teacher in teacher_set]
self.filters['responsible'].field.choices = tuple(teacher_choices)
lang = user.profile.language
status_choices = [(status.id, status.get_name(lang)) for status in status_set]
for status_id in sorted(IssueStatus.HIDDEN_STATUSES.values(), reverse=True):
status_field = IssueStatus.objects.get(pk=status_id)
status_choices.insert(0, (status_field.id, status_field.get_name(lang)))
self.filters['status_field'].field.choices = tuple(status_choices) | znick/anytask | [
31,
27,
31,
51,
1416913630
] |
def __init__(self, **kwargs):
self.repo_name = kwargs['repo_name']
self.repo_path = kwargs['repo_path']
self.pipelines = kwargs['pipelines'] | MetaSUB/ModuleUltra | [
1,
1,
1,
1,
1506196367
] |
def get_pipeline_list(self):
"""Return a list of (pipe_name, version)."""
return [(pipe['name'], pipe['version']) for pipe in self.pipelines] | MetaSUB/ModuleUltra | [
1,
1,
1,
1,
1506196367
] |
def get_pipeline_endpts(self, pipe_name):
"""Return a list of endpts or None."""
return None | MetaSUB/ModuleUltra | [
1,
1,
1,
1,
1506196367
] |
def __init__(self, repos, total_jobs=10, run_local=True, pipeline_configs={}):
self.repos = repos
self.total_jobs = int(total_jobs)
self.run_local = run_local
self.pipeline_configs = pipeline_configs | MetaSUB/ModuleUltra | [
1,
1,
1,
1,
1506196367
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.