query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
sequencelengths
19
20
metadata
dict
Sets zero mode parameters
def setZeroModeParameters(self, zmp): if not len(zmp) == len(self.bins): raise IndexError("Mismatch in number of t' bins") for i,pp in enumerate(zmp): self.bins[i].setZeroModeParameters(pp)
[ "def set_model_parameters_to_zero(self):\n\n p = self.get_model_parameters()\n if p is not None:\n for key in p:\n val = p[key]\n if torch.is_tensor(val):\n val.zero_()\n elif type(val) == torch.nn.parameter.Parameter or type(val)==torch.Tensor:\n val.data.zero_()", "def setZero(self):\n self.image.setZero()", "def set_zero(self):\n self._zero = self._motor.pos[self._index] if not self._reverse else self.reverse_pos(self._motor.pos[self._index])\n self._vel_sp = 0\n self._pos_sp = None\n self._on_sp = True\n self._log.info(\"Zero angle: {:.2f}\".format(self._zero))", "def zero(self):\n self.set(0.0)", "def clear_zero(self):\n self._zero = None\n self._vel_sp = 0\n self._pos_sp = None\n self._on_sp = False\n self._log.info(\"Zero angle cleared\")", "def restoreZeroCalibration(self):\n return self._setAttr(\"calibrationParam\", \"0\")", "def setZeroConfig(self, sleep_time=-1):\r\n # there is a glitch with the Dobot where q1 must be set to 1 before\r\n # it can be set to 0, Don't question it\r\n self._dobot.setJointPositions(-89,0,0,0,0)\r\n sleep(1)\r\n self._dobot.setJointPositions(-90,0,0,0,0)\r\n if sleep_time != -1:\r\n sleep(sleep_time)", "def set_mode(self, mode = \"CHP\"):\n return self.echo(\":INIT:\" + mode)", "def set_default_values(self):\n self.vmin.set(0)\n self.vmax.set(0)\n self.dq_show.set('184')\n self.segment.set('A')\n self.N_degraded.set(0)\n self.extract.set('None')\n self.draw.set('Modal Gain')\n self.extract_offset.set(0)\n self.cmap.set('gist_yarg')\n self.grid_limits.set(1)", "def set_zero(\n self,\n var_name,\n scope=fluid.global_scope(),\n place=fluid.CPUPlace(),\n param_type=\"int64\",\n ):\n param = scope.var(var_name).get_tensor()\n param_array = np.zeros(param._get_dims()).astype(param_type)\n param.set(param_array, place)", "def set_zero_point(self):\n self.current_position = 0.0\n self.goal_position = 0.0", "def set_mode(self, val):\n # self.property_set(register_name, val)\n self.property_set(\"mode\", Sample(0, value=val, unit=\"dF\"))\n \n try:\n self.serial_send(\"A=1,Z=1,M=\" + str(self.modes[val.title()]) + \"\\x0D\")\n except:\n print \"error setting thermostat\"", "def __zero_axis(self, axis):\r\n if axis in (\"x\", \"y\", \"z\"):\r\n channelno = self.axis_chan_mapping[axis]\r\n print(\"\\t- zeroing channel %d (%s axis) -->\" % (channelno, axis), end=\"\")\r\n channel = self.__get_chan(axis)\r\n channel.SetZero()\r\n print(\" done\")\r\n else:\r\n print(\"\\t- axis invalid)\")", "def set_mode(self, mode: OutputMode | None) -> None:\n if mode is None:\n lib.wlr_output_set_mode(self._ptr, ffi.NULL)\n else:\n lib.wlr_output_set_mode(self._ptr, mode._ptr)", "def set_zero(self):\n for y in range(self.length):\n for x in range(self.length):\n self.grid[x, y] = 0", "def reset(self):\n\n # Save all the settings, which were not given by above-lying logic\n tmp_interleave = self._dev.get_interleave()\n tmp_samp_rate = self._dev.get_samp_rate()\n tmp_anlg_level = self._dev.get_analog_level()\n tmp_digital_level = self._dev.get_digital_level()\n\n # Full hardware reset\n self._dev.reset()\n\n # Restore these settings\n self._dev.set_mode(mode_string='S')\n self._dev.set_interleave(state=tmp_interleave)\n self._dev.set_samp_rate(samp_rate=tmp_samp_rate)\n self._dev.set_analog_level(level_dict=tmp_anlg_level)\n self._dev.set_digital_level(level_dict=tmp_digital_level)\n\n return 0", "def zero(self):\n v = np.zeros(self.get_dimension())\n self.set_vector(v)", "def set_zero_it_cfg(self, auth_server):\n self._set_zero_it_cfg(auth_server)", "def setMode( self, aMode ):\n if ( aMode != 0 ) and ( aMode != 1 ):\n raise VibroP_GraphCorrupted( \"ERROR: a wrong mode was set\" )\n else:\n self.__Mode = aMode" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initialize every role in roles and at it to the database
def insert_roles(): roles = { 'Author' : [Permission.WRITE_ARTICLES], 'Admin' : [Permission.WRITE_ARTICLES, Permission.MODIFY_ACCOUNTS, Permission.SEND_INVITATIONS], } default_role = 'Author' for r in roles: role = Role.query.filter_by(name=r).first() if role is None: role = Role(name=r) role.reset_permissions() for perm in roles[r]: role.add_permission(perm) role.default = (role.name == default_role) db.session.add(role) db.session.commit()
[ "def setup_roles(self):\n\t\tif self.data.restricted_roles:\n\t\t\tuser = frappe.get_doc(\"User\", frappe.session.user)\n\t\t\tfor role_name in self.data.restricted_roles:\n\t\t\t\tuser.append(\"roles\", {\"role\": role_name})\n\t\t\t\tif not frappe.db.get_value(\"Role\", role_name):\n\t\t\t\t\tfrappe.get_doc(dict(doctype=\"Role\", role_name=role_name)).insert()\n\t\t\t\t\tcontinue\n\n\t\t\t\trole = frappe.get_doc(\"Role\", role_name)\n\t\t\t\trole.disabled = 0\n\t\t\t\trole.save()\n\t\t\tuser.save()", "def init_role(role): # -> None:\n ...", "def dbParse(self):\n self.roles = []\n for role in Role.query.all():\n self.roles.append(role)\n roleNameList = []\n for role in self.roles:\n roleNameList.append(role.name)\n if len(set(roleNameList)) != len(roleNameList):\n raise AssertionError('Roles with duplicate names exist in Role store.')\n self.roleNames = set(roleNameList)\n\n if self.rootRole not in self.roleNames:\n print(f'WARNING: given root Role <{self.rootRole}> not found in Role store. '\n f'It can be generated by running .init_permissions() or .create_root().', file=sys.stderr)\n\n self.dbPerms = Permission.query.all()\n\n self.users = []\n for user in User.query.all():\n self.users.append(user)\n dbPermNameList = []\n for perm in self.dbPerms:\n dbPermNameList.append(perm.name)\n if len(set(dbPermNameList)) != len(dbPermNameList):\n raise AssertionError('Permissions with duplicate values exist in Permission store.')\n self.dbPermNames = set(dbPermNameList)", "def add_roles(self, roles):\n for role in roles:\n self.add_role(role)", "def setRoles(self, roles):\n pass", "async def roles(self, ctx):\n pass", "def apply_roles(self):\n minion_sets = []\n role_sets = []\n for instance in self.instances:\n minion = instance.get('minion')\n roles = set(minion.roles or [])\n for role in instance.get('roles', []):\n roles.add(role)\n roles = list(roles)\n minion_sets.append([minion])\n role_sets.append(roles)\n self.client.set_roles(minion_sets, role_sets, timeout=30)", "def update_roles(db, entity, roles):\n for rolename in roles:\n grant_role(db, entity=entity, rolename=rolename)", "def reset_roles(self, new_roles):\n self.roles = new_roles", "def create_root(self):\n\n if self.rootRole not in self.roleNames:\n print(f'Specified root Role <{self.rootRole}> not found in Role store; generating new role.')\n rootRole = Role(name=self.rootRole, description=f\"{self.rootRole} role. Has all permissions. \"\n f\" Should be considered as root.\")\n else:\n rootRole = Role.query.filter_by(name=self.rootRole).first()\n\n if self.missingPerms:\n self.populate_from_app()\n\n perms = Permission.query.all()\n rolePerms = rootRole.allPermissionsRoles()[0]\n\n for perm in perms:\n if perm.name in rolePerms:\n continue\n rootRole.addPermission(perm)", "def seed(user_datastore, db):\n roleAdmin = user_datastore.create_role(\n name='admin',\n description='Manage other users on the system')\n roleStandard = user_datastore.create_role(\n name='standard',\n description='Manage the system')\n userAdmin = user_datastore.create_user(\n username='admin',\n first_name='admin',\n surname='admin',\n email='admin@aimlackies.com',\n password=hash_password('password'),\n confirmed_at=func.now()\n )\n userAdmin.roles.append(roleAdmin)\n db.session.commit()", "def initialize_tables(self):\n self.initialize_users_table()\n self.initialize_sessions_table()\n self.initialize_queries_table()\n self.initialize_parsetrees_table()", "def add_roles(self, role_list):\n # TODO: refactor the assertions into the add_role() case, possibly key off an expected result\n # needs more DRYness\n for role in role_list:\n add_role(self, role)", "def test__Guild__update_roles():\n guild_id = 202306230110\n \n old_roles = [\n Role.precreate(202306230111),\n Role.precreate(202306230112),\n ]\n \n new_roles = [\n Role.precreate(202306230113),\n Role.precreate(202306230114),\n ]\n \n guild = Guild.precreate(guild_id, roles = old_roles)\n \n data = [role.to_data(include_internals = True) for role in new_roles]\n \n guild._update_roles(data)\n \n vampytest.assert_eq(guild.roles, {role.id: role for role in new_roles})", "def test_roles_create(self):\n pass", "def test_roles_update(self):\n pass", "def _populate_mods_table(self):\n\n self._dbman.populate_mods_table(self.modcollection)\n\n # mods table now only contains mod directory, managed status\n # with self._dbman.conn as con:\n # con.executemany(\n # \"INSERT INTO mods VALUES (?, ?)\",\n # ((m.directory, m.managed)\n # for m in self.modcollection)\n # )", "async def _courses_roles(self, ctx):\n pass", "async def _courses_create(self, ctx, *, roles: str):\n if roles is None:\n return await ctx.send(error(\"Role cannot be blank\"))\n\n roles = roles.split(\" \")\n\n # regisiter the course with the database\n for role in roles:\n await self._courses_register(ctx, role.lower(), sort=False)\n\n # sort courses after all courses have been added\n await ctx.invoke(self._courses_sort)\n\n await ctx.channel.send(\"Done.\")\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Validates that the VPC connector can be safely removed. Does nothing if 'clear_vpc_connector' is not present in args with value True.
def ValidateClearVpcConnector(service, args): if (service is None or not flags.FlagIsExplicitlySet(args, 'clear_vpc_connector') or not args.clear_vpc_connector): return if flags.FlagIsExplicitlySet(args, 'vpc_egress'): egress = args.vpc_egress elif container_resource.EGRESS_SETTINGS_ANNOTATION in service.template_annotations: egress = service.template_annotations[ container_resource.EGRESS_SETTINGS_ANNOTATION] else: # --vpc-egress flag not specified and egress settings not set on service. return if (egress != container_resource.EGRESS_SETTINGS_ALL and egress != container_resource.EGRESS_SETTINGS_ALL_TRAFFIC): return if console_io.CanPrompt(): console_io.PromptContinue( message='Removing the VPC connector from this service will clear the ' 'VPC egress setting and route outbound traffic to the public internet.', default=False, cancel_on_no=True) else: raise exceptions.ConfigurationError( 'Cannot remove VPC connector with VPC egress set to "{}". Set' ' `--vpc-egress=private-ranges-only` or run this command ' 'interactively and provide confirmation to continue.'.format(egress))
[ "def AddClearVpcNetworkFlags(parser, resource_kind='service'):\n parser.add_argument(\n '--clear-network',\n action='store_true',\n help=(\n 'Disconnect this Cloud Run {kind} from the VPC network it is'\n ' connected to.'.format(kind=resource_kind)\n ),\n )", "def remove_in_connector(self, connector_name: str):\n\n if connector_name in self.in_connectors:\n connectors = self.in_connectors\n del connectors[connector_name]\n self.in_connectors = connectors\n return True", "def remove_out_connector(self, connector_name: str):\n\n if connector_name in self.out_connectors:\n connectors = self.out_connectors\n del connectors[connector_name]\n self.out_connectors = connectors\n return True", "def remove_virtual_connector(self):\n self.virtual_connector = None", "def remove_connector(self, reservation_id: ReservationId, port1: str, port2: str):\n logger.info(f\"Removing connector between {port1} and {port2}\")\n self._api.DisconnectRoutesInReservation(reservation_id, [port1, port2])\n self._api.RemoveConnectorsFromReservation(reservation_id, [port1, port2])", "def disconnect_connector(self): \n if self.itemA is not None:\n if self in self.itemA.connectorList:\n self.itemA.connectorList.remove(self)\n if self.itemB is not None:\n if self in self.itemB.connectorList:\n self.itemB.connectorList.remove(self)", "def delete_vpn_connection(DryRun=None, VpnConnectionId=None):\n pass", "def delete_vpc(DryRun=None, VpcId=None):\n pass", "def remove(self, name, *, type_hint=None):\n # prefix for all config variables of this credential\n prefix = _get_cred_cfg_var(name, '')\n\n to_remove = [\n k[len(prefix):] for k in self._cfg.keys()\n if k.startswith(prefix)\n ]\n removed = False\n if to_remove:\n self._unset_credprops_anyscope(name, to_remove)\n removed = True\n\n # delete the secret from the keystore, if there is any\n def del_field(name, field):\n global removed\n try:\n self._keyring.delete(name, field)\n removed = True\n except Exception as e:\n if self._keyring.get(name, field) is None:\n # whatever it was, the target is reached\n CapturedException(e)\n else:\n # we could not delete the field\n raise # pragma: nocover\n\n del_field(name, 'secret')\n if type_hint:\n # remove legacy records too\n for field in self._cred_types.get(\n type_hint, {}).get('fields', []):\n del_field(name, field)\n return removed", "def check_can_live_migrate_destination_cleanup(self, *args, **kwargs):\n pass", "def _disconnect(self, array, volume, connector, remove_remote_hosts=False,\n is_multiattach=False):\n vol_name = self._get_vol_name(volume)\n if connector is None:\n # If no connector was provided it is a force-detach, remove all\n # host connections for the volume\n LOG.warning(\"Removing ALL host connections for volume %s\",\n vol_name)\n connections = array.list_volume_private_connections(\n vol_name, remote=True)\n\n for connection in connections:\n self._disconnect_host(array, connection['host'], vol_name)\n return False\n else:\n # Normal case with a specific initiator to detach it from\n hosts = self._get_host(array, connector,\n remote=remove_remote_hosts)\n if hosts:\n any_in_use = False\n host_in_use = False\n for host in hosts:\n host_name = host[\"name\"]\n if not is_multiattach:\n host_in_use = self._disconnect_host(array,\n host_name,\n vol_name)\n else:\n LOG.warning(\"Unable to disconnect host from volume. \"\n \"Volume is multi-attached.\")\n any_in_use = any_in_use or host_in_use\n return any_in_use\n else:\n LOG.error(\"Unable to disconnect host from volume, could not \"\n \"determine Purity host on array %s\",\n array.backend_id)\n return False", "def test_only_deletes_vm_rules(self, fake_call_api, fake_generate_token):\n fake_generate_token.return_value = b'aa.bb.cc'\n data = {'content': {'ports': {'1234': {'name': 'beer'}, '2345': {'name': 'foo'}}}}\n fake_call_api.return_value = data\n\n vm._delete_portmap_rules(user='max', vm_name='beer')\n\n all_args = fake_call_api.call_args_list\n _, delete_kwarg = all_args[1]\n deleted_port = delete_kwarg['payload']['conn_port']\n expected_port = 1234\n\n self.assertEqual(len(all_args), 2) # One to lookup all rules, another to delete\n self.assertEqual(deleted_port, expected_port)", "def test_remove_creds(self):\n FLAGS.unparse_flags()\n self.assertFalse(auth._remove_creds())\n flags.FLAGS(sys.argv[:1] + ['--remove_creds'])\n FLAGS.mark_as_parsed()\n self.assertTrue(auth._remove_creds())", "def deletevpc(vpc_choices):\n progressbar(\"Deleting VPC\")\n vpcname=vpc_choices['vpc'][0]\n try:\n ec2.delete_vpc(VpcId=str(vpcname))\n print(\"\\n \\n vpc \" +vpcname +\" has been deleted \\n \\n\")\n except botocore.exceptions.ClientError as e:\n coloredtext(\"There was an error while deleting vpc: \\n\\n\\n\")\n print(e)", "def _disconnect_input(self, step_arg_name):\n self._connected_inputs[step_arg_name] = False", "def msg_disconnect_relay_peer(self, _msg: DisconnectRelayPeerMessage) -> None:\n if self.peer_ip in set([outbound_peer.ip for outbound_peer in self.node.opts.outbound_peers if outbound_peer]):\n self.log_info(\"Received disconnect request. Not dropping because relay peer is static.\")\n else:\n self.log_info(\"Received disconnect request. Dropping.\")\n self.mark_for_close(should_retry=False)", "def AddClearVpcNetworkTagsFlags(parser, resource_kind='service'):\n parser.add_argument(\n '--clear-network-tags',\n action='store_true',\n help=(\n 'Clears all existing Compute Engine tags from the Cloud Run {kind}. '\n .format(kind=resource_kind)\n ),\n )", "def removeConnection(self, f: 'SoField') -> \"void\":\n return _coin.SoEngineOutput_removeConnection(self, f)", "def delete_vpc_endpoint_resources():\n print('Deleting VPC endpoints')\n ec2 = boto3.client('ec2')\n endpoint_ids = []\n for endpoint in ec2.describe_vpc_endpoints()['VpcEndpoints']:\n print('Deleting VPC Endpoint - {}'.format(endpoint['ServiceName']))\n endpoint_ids.append(endpoint['VpcEndpointId'])\n\n if endpoint_ids:\n ec2.delete_vpc_endpoints(\n VpcEndpointIds=endpoint_ids\n )\n\n print('Waiting for VPC endpoints to get deleted')\n while ec2.describe_vpc_endpoints()['VpcEndpoints']:\n time.sleep(5)\n\n print('VPC endpoints deleted')\n\n # VPC endpoints connections\n print('Deleting VPC endpoint connections')\n service_ids = []\n for connection in ec2.describe_vpc_endpoint_connections()['VpcEndpointConnections']:\n service_id = connection['ServiceId']\n state = connection['VpcEndpointState']\n\n if state in ['PendingAcceptance', 'Pending', 'Available', 'Rejected', 'Failed', 'Expired']:\n print('Deleting VPC Endpoint Service - {}'.format(service_id))\n service_ids.append(service_id)\n\n ec2.reject_vpc_endpoint_connections(\n ServiceId=service_id,\n VpcEndpointIds=[\n connection['VpcEndpointId'],\n ]\n )\n\n if service_ids:\n ec2.delete_vpc_endpoint_service_configurations(\n ServiceIds=service_ids\n )\n\n print('Waiting for VPC endpoint services to be destroyed')\n while ec2.describe_vpc_endpoint_connections()['VpcEndpointConnections']:\n time.sleep(5)\n\n print('VPC endpoint connections deleted')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Function accepts a filename, a data dictionary and lists of prefractions and strains, and writes a file with prefraction rows and strain columns with the value in the corresponding row/column.
def write_heatmap(filename, data, prefractions, strains): with open(filename, 'w') as f: f.write("Genes\t{}\n".format("\t".join(strains))) for pref in prefractions: f.write("{}\t{}\n".format(pref, "\t".join([str(data[pref][strn]) for strn in strains]))) return
[ "def write_data(num, data):\n file_num = \"%05d\" % num\n filename = data_file_statistics + file_num + \".dat\"\n fh = open(filename, mode='w')\n i = 0\n while i < 5:\n j = 0\n while j < 34:\n fh.write(\"%13.5f\" % 0.0)\n j += 1\n i += 1\n fh.write('\\n')\n i = 0\n data_row = 0\n while i < 20:\n j = 0\n while j < 5:\n fh.write(\"%13.5f\" % 0.0)\n j += 1\n data_i = 0\n while data_i < 27:\n fh.write(\"%13.5f\" % float(data[data_row * 27 + data_i]))\n data_i += 1\n data_row += 1\n j = 0\n while j < 2:\n fh.write(\"%13.5f\" % 0.0)\n j += 1\n fh.write('\\n')\n i += 1\n i = 0\n while i < 5:\n j = 0\n while j < 34:\n fh.write(\"%13.5f\" % 0.0)\n j += 1\n i += 1\n fh.write('\\n')\n fh.close()", "def Write2File(fileNum, data, time, chNum):\n f = open(\"Data%s.txt\" % fileNum, 'w+')\n for row in range(len(data) / chNum):\n for col in range(chNum):\n # f.write(\"%i %f \" % (data[row*chNum + col], time[row*chNum + col]))s\n f.write(\"%s \" % (data[row * chNum + col]))\n f.write(\"\\n\")\n f.close()", "def write_data(natoms, lx,ly, tpe, molid, x, y, bid, btpe, b1, b2, aid, atpe, a1, a2, a3,ofname):\n ofile = open(ofname, 'w')\n ### write down header information\n ofile.write('LAMMPS data file filaments in 2D\\n\\n')\n ofile.write(str(natoms) + ' atoms\\n')\n ofile.write('1 atom types\\n')\n ofile.write(str(max(bid)) + ' bonds\\n')\n ofile.write('1 bond types\\n')\n ofile.write(str(max(aid)) + ' angles\\n')\n ofile.write('1 angle types\\n\\n')\n ofile.write('0.0 ' + str(lx) + ' xlo xhi\\n')\n ofile.write('0.0 ' + str(ly) + ' ylo yhi\\n')\n ofile.write('-2.5 2.5 zlo zhi\\n\\n')\n ofile.write('Masses\\n\\n')\n ofile.write('1 1\\n\\n')\n ### Atoms section\n ofile.write('Atoms\\n\\n')\n for i in range(natoms):\n ofile.write(str(i+1) + ' ' + str(molid[i]) + ' ' + str(tpe[i]) + ' ' + str(x[i]) + ' ' + str(y[i]) + ' 0.0\\n')\n ofile.write('\\n')\n ### Bonds section\n ofile.write('Bonds\\n\\n')\n for i in range(len(bid)):\n ofile.write(str(bid[i]) + ' ' + str(btpe[i]) + ' ' + str(b1[i]) + ' ' + str(b2[i]) + '\\n')\n ofile.write('\\n')\n ### Angles section\n ofile.write('Angles\\n\\n')\n for i in range(len(aid)):\n ofile.write(str(aid[i]) + ' ' + str(atpe[i]) + ' ' + str(a1[i]) + ' ' + str(a2[i]) + ' ' + str(a3[i]) + '\\n')\n ofile.write('\\n')\n ofile.close()\n return", "def update_pointings_file(self, infile, ra, dec, prob_fov, skymap):\n \n with open(infile, 'a') as pointing:\n pointing.write(str(ra) + ' ' + str(dec)+ ' ' + str(prob_fov) + ' ' + skymap +'\\n')", "def put_data(file,variables,header='',format='',append='no'): \n if type(variables)<>type((2,)):\n raise 'Need a tuple of variables'\n if format=='' : format='%.6g '*len(variables)\n if append=='yes': f=open(file,'a')\n else: f=open(file,'w')\n if header<>\"\":\n if header[0] <>'#' : header='#'+header\n if header[-1]<>'\\n': header=header+'\\n'\n f.write(header)\n for i in range(len(variables[0])):\n cosas=[]\n for j in range(len(variables)):\n cosas.append(variables[j][i])\n line=format % tuple(cosas) \n f.write(\"\\t\"+line+'\\n')\n f.close()", "def write_to_file(data:list):\r\n if not isinstance(data, list):\r\n raise TypeError\r\n\r\n with open('score_table.txt', 'w+') as table_file:\r\n table_file.write(str(data))", "def write_to_file(filename, output):\n path = \"../data/\" + \"scored_\" + filename + \".tsv\"\n fObj = open(path, \"w+\")\n fObj.write(output)\n fObj.close()", "def write_to_file(file, sweetberry, inas):\n\n with open(file, 'w') as pyfile:\n\n pyfile.write('inas = [\\n')\n\n for rec in inas:\n if rec['sweetberry'] != sweetberry:\n continue\n\n # EX : ('sweetberry', 0x40, 'SB_FW_CAM_2P8', 5.0, 1.000, 3, False),\n channel, i2c_addr = Spower.CHMAP[rec['channel']]\n record = (\" ('sweetberry', 0x%02x, '%s', 5.0, %f, %d, 'True')\"\n \",\\n\" % (i2c_addr, rec['name'], rec['rs'], channel))\n pyfile.write(record)\n\n pyfile.write(']\\n')", "def circuits_and_jugglers_to_file(num_circuits, num_jugglers, num_preferred_circuits,\n file_name=const_t.FILE_NAME):\n\n circuits, jugglers = gen_rand_circuits_and_jugglers(\n num_circuits, num_jugglers, num_preferred_circuits\n )\n\n with open(file_name, \"w\") as file:\n for _, circuit in circuits.items():\n print(circuit.input_file_format, file=file)\n\n print(file=file)\n\n for juggler in jugglers:\n print(juggler.input_file_format, file=file)\n\n return circuits, jugglers", "def put_data(fname, variables, header='', fmt='', append='no'):\n if isinstance(variables, tuple):\n raise Exception('Need a tuple of variables')\n if not fmt:\n fmt = '%.8e ' * len(variables)\n if append == 'yes':\n f = open(fname, 'a')\n else:\n f = open(fname, 'w')\n if header:\n if not header.startswith('#'):\n header = '#' + header\n if not header.endswith('\\n'):\n header += '\\n'\n f.write(header)\n for i in range(len(variables[0])):\n cosas = []\n for j in range(len(variables)):\n cosas.append(variables[j][i])\n line = fmt % tuple(cosas)\n f.write(line + '\\n')\n f.close()", "def to_file(filename, dicts):\n\n with open(filename, \"w\") as f:\n for order, dictionary in dicts:\n f.write(\"%s \" % order)", "def write_challenges_to_file(challenges, file, show_collision):\n challenge_table = []\n counter = 1\n with open(file, 'w', newline='') as f:\n fieldnames = ['Number','Points','Description','Hash','Alg','Collision','Method' ]\n writer = csv.writer(f)\n writer.writerow(fieldnames)\n for challenge in challenges:\n collision = ''\n if show_collision:\n collision = challenge.password\n writer.writerow([counter, challenge.points, challenge.desc, challenge.digest, challenge.alg, collision, ''])\n counter = counter + 1", "def write_data_to_file(file_name, list_of_rows):\r\n shelf = shelve.open(file_name, 'n')\r\n for row in list_of_rows:\r\n shelf['Width'] = row['Width']\r\n shelf['Height'] = row['Height']\r\n shelf.close()\r\n return list_of_rows", "def write(self, file_name) :\n\n # Add the data\n Col = pyfits.Column(name='DATA', format=self.data_format, \n array=self.data)\n columns = [Col,]\n \n # Add all the other stored fields.\n for field_name in self.field.iterkeys() :\n Col = pyfits.Column(name=field_name,\n format=self.formats[field_name],\n array=self.field[field_name])\n columns.append(Col)\n coldefs = pyfits.ColDefs(columns)\n # Creat fits header data units, one for the table and the mandatory\n # primary.\n tbhdu = pyfits.new_table(coldefs)\n prihdu = pyfits.PrimaryHDU()\n # Add the write history.\n fname_abbr = ku.abbreviate_file_path(file_name)\n self.history.add('Written to file.', ('File name: ' + fname_abbr,))\n # Add the history to the header.\n bf.write_history_header(prihdu.header, self.history)\n\n # Combine the HDUs and write to file.\n hdulist = pyfits.HDUList([prihdu, tbhdu])\n hdulist.writeto(file_name, clobber=True)\n if self.feedback > 0 :\n print 'Wrote data to file: ' + fname_abbr", "def __write_to_file(output_dir, p_values, nans, fname):\n fname = output_dir + \"/\" + fname\n \n f = open(fname, 'w')\n f.write('name\\tp-val\\tenrinched in\\n')\n p_values.sort()\n \n for tp in p_values:\n pval = (\"%.12f\" % __round_sig(tp[0])).rstrip('0')\n attr_name = str(tp[1])\n enriched_in = str(tp[2])\n f.write(attr_name + \"\\t\" + pval + \"\\t\" + enriched_in + \"\\n\")\n\n for n in nans:\n attr_name = str(n[1])\n f.write(attr_name + \"\\tn/a\\n\")\n\n f.close()", "def write(filename, t):\n _cm._write(filename, t._tensor)", "def write_results(filename, scop_homology):\n with open(filename, \"w\") as f:\n for (p1, p2), value in scop_homology.items():\n f.write(\"\\t\".join([p1, p2, value]) + \"\\n\")", "def write_results(filename):", "def put_data2(file,variables,variables2,header='',format='',format2='',append='no'): \n if type(variables)<>type((2,)):\n raise 'Need a tuple of variables'\n if type(variables2)<>type((2,)):\n raise 'Need a tuple of variables2'\n if format=='' : format='%.6g '*len(variables)\n if format2=='' : format2='%s '*len(variables2)\n if append=='yes': f=open(file,'a')\n else: f=open(file,'w')\n if header<>\"\":\n if header[0] <>'#' : header='#'+header\n if header[-1]<>'\\n': header=header+'\\n'\n f.write(header)\n for i in range(len(variables[0])):\n cosas=[]\n cosas2=[]\n for j in range(len(variables)):\n cosas.append(variables[j][i])\n #pdb.set_trace()\n line=format % tuple(cosas)\n for j in range(len(variables2)):\n cosas2.append(variables2[j][i])\n #pdb.set_trace()\n line2=format2 % tuple(cosas2)\n #Outline=line+\" \"+line2\n #f.write(\"\\t\"+line+'\\n')\n f.write(\"\\t\"+line+\" \"+line2+'\\n')\n f.close()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run 'runtimeconfigs waiters create'.
def Run(self, args): waiter_client = util.WaiterClient() messages = util.Messages() waiter_resource = util.ParseWaiterName(args.name, args) project = waiter_resource.projectsId config = waiter_resource.configsId success = messages.EndCondition( cardinality=messages.Cardinality( path=args.success_cardinality_path, number=args.success_cardinality_number, ) ) if args.failure_cardinality_path: failure = messages.EndCondition( cardinality=messages.Cardinality( path=args.failure_cardinality_path, number=args.failure_cardinality_number, ) ) else: failure = None result = waiter_client.Create( messages.RuntimeconfigProjectsConfigsWaitersCreateRequest( parent=util.ConfigPath(project, config), waiter=messages.Waiter( name=waiter_resource.RelativeName(), timeout='{0}s'.format(args.timeout), success=success, failure=failure, ) ) ) log.CreatedResource(waiter_resource) if args.async: # In async mode, we return the current waiter representation. # The waiter resource exists immediately after creation; the # operation resource returned from CreateWaiter only tracks the # waiting process. self._async_resource = waiter_resource request = (waiter_client.client.MESSAGES_MODULE .RuntimeconfigProjectsConfigsWaitersGetRequest( name=waiter_resource.RelativeName())) result = waiter_client.Get(request) else: self._async_resource = None result = util.WaitForWaiter(waiter_resource) if util.IsFailedWaiter(result): self.exit_code = 2 # exit with code 2 if the result waiter failed. return util.FormatWaiter(result)
[ "def generate_runtime_container(self):\n for version in self.versions:\n self.display('docker build -f {}/dockerfiles/{}_{}.d -t {} {}'.format(\n self.tmp, self.runtime, version, 'continuous:{}_{}'.format(self.runtime, version), self.tmp), \"yellow\")\n self.exec('docker build -f {}/dockerfiles/{}_{}.d -t {} {}'.format(\n self.tmp, self.runtime, version, 'continuous:{}_{}'.format(self.runtime, version), self.tmp), not self.verbose)", "def create_eb_environment():\n creation_response = client.create_environment(\n ApplicationName=app_name,\n EnvironmentName=environment_name,\n Description=\"Manheim test deployment\",\n CNAMEPrefix=environment_name,\n Tier={\n 'Name': 'WebServer',\n 'Type': 'Standard'\n },\n SolutionStackName=solution_stack,\n OptionSettings=[\n {\n 'Namespace': 'aws:autoscaling:asg',\n 'OptionName': 'Custom Availability Zones',\n 'ResourceName': 'AWSEBAutoScalingGroup',\n 'Value': 'us-east-1a'\n },\n {\n 'Namespace': 'aws:autoscaling:asg',\n 'OptionName': 'MaxSize',\n 'ResourceName': 'AWSEBAutoScalingGroup',\n 'Value': '3'\n },\n {\n 'Namespace': 'aws:autoscaling:asg',\n 'OptionName': 'MinSize',\n 'ResourceName': 'AWSEBAutoScalingGroup',\n 'Value': '1'\n },\n {\n 'Namespace': 'aws:autoscaling:launchconfiguration',\n 'OptionName': 'InstanceType',\n 'Value': 't2.micro'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'BreachDuration',\n 'ResourceName': 'AWSEBCloudwatchAlarmLow',\n 'Value': '1'\n },\n {\n u'Namespace': 'aws:autoscaling:trigger',\n u'OptionName': 'EvaluationPeriods',\n u'ResourceName': 'AWSEBCloudwatchAlarmLow',\n u'Value': '1'\n },\n {\n u'Namespace': 'aws:autoscaling:trigger',\n u'OptionName': 'LowerBreachScaleIncrement',\n u'ResourceName': 'AWSEBAutoScalingScaleDownPolicy',\n u'Value': '-1'\n },\n {\n u'Namespace': 'aws:autoscaling:trigger',\n u'OptionName': 'LowerThreshold',\n u'ResourceName': 'AWSEBCloudwatchAlarmLow',\n u'Value': '25'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'MeasureName',\n 'ResourceName': 'AWSEBCloudwatchAlarmLow',\n 'Value': 'CPUUtilization'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'Period',\n 'ResourceName': 'AWSEBCloudwatchAlarmLow',\n 'Value': '1'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'Statistic',\n 'ResourceName': 'AWSEBCloudwatchAlarmLow',\n 'Value': 'Average'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'Unit',\n 'ResourceName': 'AWSEBCloudwatchAlarmLow',\n 'Value': 'Percent'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'UpperBreachScaleIncrement',\n 'ResourceName': 'AWSEBAutoScalingScaleUpPolicy',\n 'Value': '1'\n },\n {\n 'Namespace': 'aws:autoscaling:trigger',\n 'OptionName': 'UpperThreshold',\n 'ResourceName': 'AWSEBCloudwatchAlarmHigh',\n 'Value': '85'\n },\n {\n 'Namespace': 'aws:autoscaling:updatepolicy:rollingupdate',\n 'OptionName': 'RollingUpdateEnabled',\n 'ResourceName': 'AWSEBAutoScalingGroup',\n 'Value': 'false'\n },\n {\n 'Namespace': 'aws:autoscaling:updatepolicy:rollingupdate',\n 'OptionName': 'RollingUpdateType',\n 'ResourceName': 'AWSEBAutoScalingGroup',\n 'Value': 'Time'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:command',\n 'OptionName': 'BatchSize',\n 'Value': '50'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:command',\n 'OptionName': 'BatchSizeType',\n 'Value': 'Percentage'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:command',\n 'OptionName': 'DeploymentPolicy',\n 'Value': 'Rolling'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:command',\n 'OptionName': 'IgnoreHealthCheck',\n 'Value': 'false'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:command',\n 'OptionName': 'Timeout',\n 'Value': '600'\n },\n {\n 'Namespace': 'aws:elasticbeanstalk:container:python',\n 'OptionName': 'WSGIPath',\n 'Value': application_path\n }\n ]\n )\n return creation_response", "def workflow_config(name: str,\n partition=['sequana_cpu', 'sequana_cpu',\n 'sequana_cpu', 'sequana_cpu_long'],\n nodes=[1, 4, 1, 1],\n cores_per_node=[48, 48, 48, 48],\n walltime=['03:00:00', '04:00:00', '06:00:00', '06:00:00'],\n interval=30,\n monitor=False) -> parsl.config.Config:\n\n parsl.set_stream_logger(level=logging.ERROR)\n parsl.set_file_logger(f'{name}_script.output', level=logging.DEBUG)\n\n logging.info('Configuring Parsl Workflow Infrastructure')\n\n # Read where datasets are...\n env_str = str()\n with open('parsl.env', 'r') as reader:\n env_str = reader.read()\n\n logging.info(f'Task Environment {env_str}')\n\n mon_hub = parsl.monitoring.monitoring.MonitoringHub(\n workflow_name=name,\n hub_address=address_by_interface('ib0'),\n hub_port=60001,\n resource_monitoring_enabled=True,\n monitoring_debug=False,\n resource_monitoring_interval=interval,\n ) if monitor else None\n\n return parsl.config.Config(\n executors=[\n HighThroughputExecutor(\n label='single_thread',\n # Optional: The network interface on node 0 which compute nodes can communicate with.\n # address=address_by_interface('enp4s0f0' or 'ib0')\n address=address_by_interface('ib0'),\n max_workers=int(cores_per_node[0]),\n cores_per_worker=1,\n worker_debug=False,\n interchange_port_range=(50000, 55000),\n provider=SlurmProvider(\n partition=partition[0],\n # scheduler_options='',\n parallelism=1,\n init_blocks=1,\n max_blocks=1,\n nodes_per_block=nodes[0],\n cmd_timeout=120,\n worker_init=env_str,\n move_files=False,\n walltime=walltime[0],\n launcher=SrunLauncher(overrides=f'-c {cores_per_node[0]}'),\n ),\n ),\n HighThroughputExecutor(\n label=f'raxml',\n # Optional: The network interface on node 0 which compute nodes can communicate with.\n # address=address_by_interface('enp4s0f0' or 'ib0')\n address=address_by_interface('ib0'),\n max_workers=int(cores_per_node[1]),\n cores_per_worker=6,\n worker_debug=False,\n interchange_port_range=(55000, 60000),\n provider=SlurmProvider(\n partition=partition[1],\n # scheduler_options='',\n parallelism=1,\n init_blocks=1,\n max_blocks=1,\n nodes_per_block=nodes[1],\n cmd_timeout=120,\n worker_init=env_str,\n move_files=False,\n walltime=walltime[1],\n launcher=SrunLauncher(overrides=f'-c {cores_per_node[1]}'),\n ),\n ),\n HighThroughputExecutor(\n label=f'snaq',\n # Optional: The network interface on node 0 which compute nodes can communicate with.\n # address=address_by_interface('enp4s0f0' or 'ib0')\n address=address_by_interface('ib0'),\n max_workers=int(cores_per_node[2]),\n cores_per_worker=6,\n worker_debug=False,\n interchange_port_range=(40000, 45000),\n provider=SlurmProvider(\n partition=partition[2],\n # scheduler_options='',\n parallelism=1,\n init_blocks=1,\n max_blocks=1,\n nodes_per_block=nodes[2],\n cmd_timeout=120,\n worker_init=env_str,\n move_files=False,\n walltime=walltime[2],\n launcher=SrunLauncher(overrides=f'-c {cores_per_node[2]}'),\n ),\n ),\n HighThroughputExecutor(\n label=f'snaq_l',\n # Optional: The network interface on node 0 which compute nodes can communicate with.\n # address=address_by_interface('enp4s0f0' or 'ib0')\n address=address_by_interface('ib0'),\n max_workers=int(cores_per_node[3]),\n cores_per_worker=6,\n worker_debug=False,\n interchange_port_range=(45000, 50000),\n provider=SlurmProvider(\n partition=partition[3],\n scheduler_options='',\n parallelism=1,\n init_blocks=1,\n max_blocks=1,\n nodes_per_block=nodes[3],\n cmd_timeout=120,\n worker_init=env_str,\n move_files=False,\n walltime=walltime[3],\n launcher=SrunLauncher(overrides=f'-c {cores_per_node[3]}'),\n ),\n ),\n ],\n monitoring=mon_hub,\n strategy=None,\n )", "def build_configs():\n # Pull in the master config as dict\n user_config = yaml.load(open(\"cluster_config.yaml\", \"r\"))\n my_token = \"\"\n if 'token' in user_config['kubeadm']:\n my_token = user_config['kubeadm']['token']\n else:\n my_token = gen_token()\n\n LOG.info('Using Token: {0}'.format(my_token))\n build_master(user_config, my_token)\n build_all_nodes(user_config, my_token)\n LOG.info('Configs are generated')", "def SetupBuildersAndSchedulers(c, builders, slaves, ActiveMaster):\n # List of dicts for every builder.\n builder_dicts = []\n\n # Builder names by scheduler.\n builders_by_scheduler = {s: [] for s in SCHEDULERS}\n # Maps a triggering builder to its triggered builders.\n triggered_builders = collections.defaultdict(list)\n\n def process_builder(builder, is_trybot=False):\n \"\"\"Create a dict for the given builder and place its name in the\n appropriate scheduler list.\n \"\"\"\n builder_name = builder['name']\n if is_trybot:\n builder_name = builder_name_schema.TrybotName(builder_name)\n\n # Categorize the builder based on its role.\n try:\n category = builder_name_schema.DictForBuilderName(builder_name)['role']\n subcategory = builder_name.split(builder_name_schema.BUILDER_NAME_SEP)[1]\n category = '|'.join((category, subcategory))\n except ValueError:\n # Assume that all builders whose names don't play by our rules are named\n # upstream and are therefore canaries.\n category = builder_name_schema.BUILDER_ROLE_CANARY\n\n properties = builder.get('properties', {})\n cc = builder.get('cc')\n if cc:\n if isinstance(cc, basestring):\n cc = [cc]\n properties['owners'] = cc\n builder_dict = {\n 'name': builder_name,\n 'auto_reboot': builder.get('auto_reboot', DEFAULT_AUTO_REBOOT),\n 'slavenames': slaves.GetSlavesName(builder=builder['name']),\n 'category': category,\n 'recipe': builder.get('recipe', DEFAULT_RECIPE),\n 'remote_run': builder.get('remote_run', DEFAULT_REMOTE_RUN),\n 'properties': properties,\n 'mergeRequests': builder.get('can_merge_requests', CanMergeBuildRequests),\n 'slavebuilddir': SLAVE_WORKDIR,\n }\n builder_dicts.append(builder_dict)\n\n parent_builder = builder.get('triggered_by')\n if parent_builder is not None:\n assert builder.get('scheduler') is None\n if is_trybot:\n parent_builder = builder_name_schema.TrybotName(parent_builder)\n triggered_builders[parent_builder].append(builder_name)\n elif is_trybot:\n builders_by_scheduler[FAKE_TRY_SCHEDULER_NAME].append(builder_name)\n else:\n scheduler = builder.get('scheduler', BUILDBUCKET_SCHEDULER_NAME)\n # Setting the scheduler to BUILDBUCKET_SCHEDULER_NAME indicates that\n # BuildBucket is the only way to schedule builds for this bot; just\n # pretend to add a scheduler in those cases.\n builders_by_scheduler[scheduler].append(builder_name)\n\n # Create builders and trybots.\n for builder in builders:\n if builder.get('trybot_only', DEFAULT_TRYBOT_ONLY):\n # trybot_only=True should only be used in combination with do_trybot=True\n # Also, the buildername then needs to already have the '-Trybot' suffix.\n assert builder.get('do_trybot', DEFAULT_DO_TRYBOT)\n assert builder['name'] == builder_name_schema.TrybotName(builder['name'])\n else:\n process_builder(builder)\n if builder.get('do_trybot', DEFAULT_DO_TRYBOT):\n process_builder(builder, is_trybot=True)\n\n # Verify that all parent builders exist.\n buildbucket_builders = set(\n builders_by_scheduler[BUILDBUCKET_SCHEDULER_NAME]\n )\n trybots = set(builders_by_scheduler[FAKE_TRY_SCHEDULER_NAME])\n trigger_parents = set(triggered_builders.keys())\n nonexistent_parents = trigger_parents - buildbucket_builders - trybots\n if nonexistent_parents:\n raise Exception('Could not find parent builders: %s' %\n ', '.join(nonexistent_parents))\n\n # Create the schedulers.\n skia_master_only_change_filter = change_filter.ChangeFilter(\n project='skia', repository=ActiveMaster.repo_url, branch=MASTER_BRANCH)\n\n c['schedulers'] = []\n\n s = Scheduler(\n name=MASTER_ONLY_SCHEDULER_NAME,\n treeStableTimer=60,\n change_filter=skia_master_only_change_filter,\n builderNames=builders_by_scheduler[MASTER_ONLY_SCHEDULER_NAME])\n c['schedulers'].append(s)\n\n s = timed.Nightly(\n name=PERIODIC_15MINS_SCHEDULER_NAME,\n branch=MASTER_BRANCH,\n builderNames=builders_by_scheduler[PERIODIC_15MINS_SCHEDULER_NAME],\n minute=[i*15 for i in xrange(60/15)],\n hour='*',\n dayOfMonth='*',\n month='*',\n dayOfWeek='*')\n c['schedulers'].append(s)\n\n s = timed.Nightly(\n name=NIGHTLY_SCHEDULER_NAME,\n branch=MASTER_BRANCH,\n builderNames=builders_by_scheduler[NIGHTLY_SCHEDULER_NAME],\n minute=0,\n hour=22,\n dayOfMonth='*',\n month='*',\n dayOfWeek='*')\n c['schedulers'].append(s)\n\n s = timed.Nightly(\n name=WEEKLY_SCHEDULER_NAME,\n branch=MASTER_BRANCH,\n builderNames=builders_by_scheduler[WEEKLY_SCHEDULER_NAME],\n minute=0,\n hour=0,\n dayOfMonth='*',\n month='*',\n dayOfWeek=6) # Sunday (Monday = 0).\n c['schedulers'].append(s)\n\n # Don't add triggerable schedulers for triggered_builders; triggers are now\n # handled on the slave-side through buildbucket.\n\n # Create the BuildFactorys.\n annotator = annotator_factory.AnnotatorFactory(ActiveMaster)\n\n for builder_dict in builder_dicts:\n if builder_dict['remote_run']:\n factory = remote_run_factory.RemoteRunFactory(\n active_master=ActiveMaster,\n repository='https://chromium.googlesource.com/chromium/tools/build.git',\n recipe=builder_dict['recipe'],\n factory_properties={'path_config': 'kitchen'})\n else:\n factory = annotator.BaseFactory(\n builder_dict['recipe'],\n timeout=2400)\n factory.properties.update(builder_dict['properties'], 'BuildFactory')\n builder_dict['factory'] = factory\n\n # Finished!\n c['builders'] = builder_dicts", "def do_init(args):\n template_files = [\"task.py\", \"task.yaml\"]\n exp = Experiment(args.root)\n\n for fname in template_files:\n if exp.exists(fname):\n logger.info(\"Skipping creating task.py because it already \"\n \"exists. Please delete or rename it if you would like to \"\n \"restore the template version\")\n else:\n _run(f\"cp {TEMPLATE_DIR}/{fname} {args.root}\")\n logger.info(\"Ready to start running experiments! Run `new` to create a new batch.\")", "def create(type_):\n\n rev = type_.create_rev()\n click.echo(\"Created {}\".format(rev.revision))\n click.echo(\n (\n \"You must create a virtual environment in {} \"\n \"before you can run any jobs.\"\n ).format(rev.venv_path)\n )", "def create_pipelines_environments(self):\n for env,details in self.merged_config['pipelines-environments'].items():\n click.secho(\"Creating '{}' environment...\".format(env), fg='green', nl=False)\n r = requests.post(\n f\"{self.repo_endpoint}/environments/\",\n headers = {'Content-Type': 'application/json'},\n data = json.dumps(details),\n auth = (self.login, self.passwd)\n )\n if str(r.status_code).startswith('20'):\n click.secho(\"done.\", fg='green')\n else:\n click.secho(\"HTTP {}\".format(r.status_code), fg='bright_red')", "def run_startup(self, wait: bool) -> None:\n for cmd in self.startup:\n try:\n self.node.cmd(cmd, wait=wait)\n except CoreCommandError as e:\n raise ConfigServiceBootError(\n f\"node({self.node.name}) service({self.name}) failed startup: {e}\"\n )", "def create(ctx,\n # Mandatory main parameter\n initiators,\n # Mandatory subparameter of a mandatory main parameter (Not fully decomposed)\n name,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n alias = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n volumeaccessgroupid = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n attributes = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n requirechap = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n chapusername = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n initiatorsecret = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n targetsecret = None,\n # Non mandatory subparameter of a mandatory main parameter (not fully decomposed)\n virtualnetworkids = None):\n \"\"\"aliases and attributes. When you use CreateInitiators to create new initiators, you can also add them to volume access groups.\"\"\"\n \"\"\"If CreateInitiators fails to create one of the initiators provided in the parameter, the method returns an error and does not create\"\"\"\n \"\"\"any initiators (no partial completion is possible).\"\"\"\n \n cli_utils.establish_connection(ctx)\n\n # Converting the virtualnetworkids to list of lists.\n if virtualnetworkids[0] is not None:\n len_virtualnetworkids = len(virtualnetworkids)\n modified_virtualnetworkids = []\n for virtualnetworkid in range(0,len_virtualnetworkids):\n modified_virtualnetworkids.append(virtualnetworkids[virtualnetworkid].split(','))\n virtualnetworkids = modified_virtualnetworkids\n \n # If we have a submember that's an attributes array, we need to handle it.\n attributes_json = [simplejson.loads(v) if v is not None else None for v in attributes]\n \n initiatorsArray = None\n if len(initiators) == 1 and name[0] is None and alias[0] is None and volumeaccessgroupid[0] is None and attributes_json[0] is None and requirechap[0] is None and chapusername[0] is None and initiatorsecret[0] is None and targetsecret[0] is None and virtualnetworkids[0] is None:\n initiatorsArray = []\n elif(initiators is not None and initiators != ()):\n initiatorsArray = []\n try:\n for i, _initiators in enumerate(initiators):\n attributes_json = None\n if attributes[i] != None:\n attributes_json = simplejson.loads(attributes[i])\n initiatorsArray.append(CreateInitiator(name=name[i], alias=alias[i], volume_access_group_id=volumeaccessgroupid[i], attributes=attributes_json, require_chap=requirechap[i], chap_username=chapusername[i], initiator_secret=initiatorsecret[i], target_secret=targetsecret[i], virtual_network_ids=virtualnetworkids[i], ))\n except Exception as e:\n ctx.logger.error(e.__str__())\n exit(1)\n \n \n ctx.logger.info(\"\"\": \"\"\"\"\"\"initiators = \"\"\" + str(initiatorsArray)+\"\"\";\"\"\"+\"\")\n try:\n _CreateInitiatorsResult = ctx.element.create_initiators(initiators=initiatorsArray)\n except common.ApiServerError as e:\n ctx.logger.error(e.message)\n exit()\n except BaseException as e:\n ctx.logger.error(e.__str__())\n exit()\n if ctx.json:\n print(simplejson.dumps(simplejson.loads(_CreateInitiatorsResult), indent=4))\n return\n else:\n cli_utils.print_result(_CreateInitiatorsResult, ctx.logger, as_json=ctx.json, as_pickle=ctx.pickle, depth=ctx.depth, filter_tree=ctx.filter_tree)", "def test_containers_are_built(self,\n mocked_redis_runner,\n mocked_consul_runner,\n mocked_postgres_runner,\n mocked_gunicorn_runner,\n mocked_registrator_runner,\n mocked_solr_runner\n ):\n\n config = {}\n services = config.setdefault('services', [\n {\n 'name': 'adsws',\n 'repository': 'adsabs',\n 'tag': '0596971c755855ff3f9caed2f96af7f9d5792cc2'\n }\n ])\n\n dependencies = config.setdefault('dependencies', [\n {\n \"name\": \"redis\",\n \"image\": \"redis:2.8.9\",\n },\n {\n \"name\": \"postgres\",\n \"image\": \"postgres:9.3\",\n },\n {\n \"name\": \"registrator\",\n \"image\": \"gliderlabs/registrator:latest\"\n },\n {\n \"name\": \"solr\",\n \"image\": \"adsabs/montysolr:v48.1.0.3\"\n },\n {\n \"name\": \"consul\",\n \"image\": \"adsabs/consul:v1.0.0\",\n }\n ])\n\n instance_gunicorn_runner = mocked_gunicorn_runner.return_value\n instance_redis_runner = mocked_redis_runner.return_value\n instance_consul_runner = mocked_consul_runner.return_value\n instance_postgres_runner = mocked_postgres_runner.return_value\n instance_registrator_runner = mocked_registrator_runner.return_value\n instance_solr_runner = mocked_solr_runner.return_value\n\n instance_list = [\n instance_gunicorn_runner,\n instance_redis_runner,\n instance_consul_runner,\n instance_postgres_runner,\n instance_registrator_runner,\n instance_solr_runner\n ]\n\n for instance in instance_list:\n instance.start.return_value = None\n instance.provision.return_value = None\n\n instance_registrator_runner.start.return_value = None\n instance_registrator_runner.provision.return_value = None\n\n start_test_environment(test_id=None, config=config)\n\n for instance in instance_list:\n\n self.assertTrue(\n instance.start.called,\n msg='Instance {} was not called'.format(instance)\n )\n instance.provision.has_calls(\n [call(callback=s['name']) for s in services]\n )", "def construct_env_configs(config: Config) -> List[Config]:\n\n config.freeze()\n num_processes = config.NUM_PROCESSES\n configs = []\n dataset = habitat.make_dataset(config.DATASET.TYPE)\n scenes = dataset.get_scenes_to_load(config.DATASET)\n\n # scenes = [\n # \"sT4fr6TAbpF\",\n # \"HxpKQynjfin\",\n # \"8WUmhLawc2A\",\n # \"r47D5H71a5s\",\n # \"Pm6F8kyY3z2\",\n # \"17DRP5sb8fy\",\n # \"Vvot9Ly1tCj\",\n # \"GdvgFV5R1Z5\",\n # \"sT4fr6TAbpF\",\n # \"HxpKQynjfin\",\n # \"8WUmhLawc2A\",\n # \"r47D5H71a5s\",\n # \"Pm6F8kyY3z2\",\n # \"17DRP5sb8fy\",\n # \"Vvot9Ly1tCj\",\n # \"GdvgFV5R1Z5\",\n # \"sT4fr6TAbpF\",\n # \"HxpKQynjfin\",\n # \"8WUmhLawc2A\",\n # \"r47D5H71a5s\",\n # \"Pm6F8kyY3z2\",\n # \"17DRP5sb8fy\",\n # \"Vvot9Ly1tCj\",\n # \"GdvgFV5R1Z5\",\n # ]\n\n # scenes = ['rPc6DW4iMge', 'e9zR4mvMWw7', 'uNb9QFRL6hY', 'sKLMLpTHeUy', 's8pcmisQ38h', '759xd9YjKW5',\n # 'XcA2TqTSSAj', 'SN83YJsR3w2', '8WUmhLawc2A', 'JeFG25nYj2p', '17DRP5sb8fy', 'Uxmj2M2itWa',\n # 'b8cTxDM8gDG', 'sT4fr6TAbpF', 'S9hNv5qa7GM', '82sE5b5pLXE', 'pRbA3pwrgk9', 'aayBHfsNo7d',\n # 'cV4RVeZvu5T', 'i5noydFURQK', 'jh4fc5c5qoQ', 'VVfe2KiqLaN', '29hnd4uzFmX', 'Pm6F8kyY3z2',\n # 'JF19kD82Mey', 'GdvgFV5R1Z5', 'HxpKQynjfin']\n\n # scenes = ['rPc6DW4iMge', 'e9zR4mvMWw7', 'uNb9QFRL6hY', 'qoiz87JEwZ2', 'sKLMLpTHeUy', 's8pcmisQ38h', '759xd9YjKW5',\n # '5q7pvUzZiYa', 'XcA2TqTSSAj', 'SN83YJsR3w2', '8WUmhLawc2A', 'JeFG25nYj2p', '17DRP5sb8fy', 'Uxmj2M2itWa',\n # 'D7N2EKCX4Sj', 'b8cTxDM8gDG', 'sT4fr6TAbpF', 'S9hNv5qa7GM', '82sE5b5pLXE', 'pRbA3pwrgk9', 'aayBHfsNo7d',\n # 'cV4RVeZvu5T', 'i5noydFURQK', 'YmJkqBEsHnH', 'jh4fc5c5qoQ', 'VVfe2KiqLaN', '29hnd4uzFmX', 'Pm6F8kyY3z2',\n # 'JF19kD82Mey', 'GdvgFV5R1Z5', 'HxpKQynjfin', 'vyrNrziPKCB']\n\n # scenes = ['29hnd4uzFmX', 'i5noydFURQK', 'cV4RVeZvu5T', '82sE5b5pLXE', 'JeFG25nYj2p', '8WUmhLawc2A', 'VFuaQ6m2Qom',\n # 'rPc6DW4iMge', '29hnd4uzFmX', 'i5noydFURQK', 'cV4RVeZvu5T', '82sE5b5pLXE',\n # 'JeFG25nYj2p', '8WUmhLawc2A', 'VFuaQ6m2Qom', 'rPc6DW4iMge']\n\n if len(scenes) > 0:\n # random.shuffle(scenes)\n\n assert len(scenes) >= num_processes, (\n \"reduce the number of processes as there \" \"aren't enough number of scenes\"\n )\n\n scene_splits: List[List] = [[] for _ in range(num_processes)]\n for idx, scene in enumerate(scenes):\n scene_splits[idx % len(scene_splits)].append(scene)\n\n assert sum(map(len, scene_splits)) == len(scenes)\n\n for i in range(num_processes):\n\n task_config = config.clone()\n task_config.defrost()\n if len(scenes) > 0:\n task_config.DATASET.CONTENT_SCENES = scene_splits[i]\n\n task_config.SIMULATOR.HABITAT_SIM_V0.GPU_DEVICE_ID = config.SIMULATOR_GPU_IDS[\n i % len(config.SIMULATOR_GPU_IDS)\n ]\n\n task_config.freeze()\n\n configs.append(task_config.clone())\n\n return configs", "def main():\n\n azure_platform_lib = get_platform_lib(Platform.AZURE)\n\n if azure_platform_lib.check_manager_exists(ci_env['GITHUB_RUN_ID']):\n print(\"There is an existing manager vm for this CI workflow:\")\n print(azure_platform_lib.get_manager_metadata_string(ci_env['GITHUB_RUN_ID']))\n sys.exit(0)\n\n credential = DefaultAzureCredential()\n\n resource_client = ResourceManagementClient(credential, ci_env['AZURE_SUBSCRIPTION_ID'])\n rg_result = resource_client.resource_groups.get(ci_env['AZURE_RESOURCE_GROUP'])\n print(f\"Provisioned resource group {rg_result.name} in the {rg_result.location} region\")\n\n with open(ci_env['GITHUB_WORKSPACE'] + \"/scripts/machine-launch-script.sh\", \"r\") as file:\n ml_file_raw = file.read().encode('utf-8')\n ml_file_encoded = base64.b64encode(ml_file_raw).decode('latin-1')\n\n workflow_id = ci_env['GITHUB_RUN_ID']\n\n # Networking related variables\n ip_name = workflow_id + \"-ip\"\n ip_config_name = ip_name + \"-config\"\n nic_name = workflow_id + \"-nic\"\n\n # VM-relate\n vm_name = workflow_id + \"-vm\"\n username = \"centos\"\n image_name = \"xilinx_alveo_u250_deployment_vm_centos78_032321\"\n vm_size = \"Standard_E8ds_v5\" #8 vcpus, 64 gb should be sufficient for CI purposes\n\n tags = azure_platform_lib.get_manager_tag_dict(ci_env['GITHUB_SHA'], ci_env['GITHUB_RUN_ID'])\n\n network_client = NetworkManagementClient(credential, ci_env['AZURE_SUBSCRIPTION_ID'])\n poller = network_client.public_ip_addresses.begin_create_or_update(ci_env['AZURE_RESOURCE_GROUP'],\n ip_name,\n {\n \"location\": ci_env['AZURE_DEFAULT_REGION'],\n \"tags\": tags,\n \"sku\": {\"name\": \"Standard\"},\n \"public_ip_allocation_method\": \"Static\",\n \"public_ip_address_version\" : \"IPV4\"\n }\n )\n ip_address_result = poller.result()\n print(f\"Provisioned public IP address {ip_address_result.name} with address {ip_address_result.ip_address}\")\n poller = network_client.network_interfaces.begin_create_or_update(ci_env['AZURE_RESOURCE_GROUP'],\n nic_name,\n {\n \"location\": ci_env['AZURE_DEFAULT_REGION'],\n \"tags\": tags,\n \"ip_configurations\": [ {\n \"name\": ip_config_name,\n \"subnet\": { \"id\": ci_env['AZURE_CI_SUBNET_ID'] },\n \"properties\" : {\n \"publicIPAddress\" : {\n \"id\" : ip_address_result.id,\n \"properties\" : {\n \"deleteOption\" : \"Delete\" # deletes IP when NIC is deleted\n }\n }\n }\n }],\n \"networkSecurityGroup\": {\n \"id\": ci_env['AZURE_CI_NSG_ID']\n }\n }\n )\n nic_result = poller.result()\n print(f\"Provisioned network interface client {nic_result.name}\")\n\n print(f\"Provisioning virtual machine {vm_name}; this operation might take a few minutes.\")\n compute_client = ComputeManagementClient(credential, ci_env['AZURE_SUBSCRIPTION_ID'])\n poller = compute_client.virtual_machines.begin_create_or_update(ci_env['AZURE_RESOURCE_GROUP'], vm_name,\n {\n \"location\": ci_env['AZURE_DEFAULT_REGION'],\n \"tags\": tags,\n \"plan\": {\n \"name\": image_name,\n \"publisher\": \"xilinx\",\n \"product\": image_name\n },\n \"storage_profile\": {\n \"image_reference\": {\n \"publisher\": 'xilinx',\n \"offer\": image_name,\n \"sku\": image_name,\n \"version\": \"latest\"\n },\n \"osDisk\": {\n \"diskSizeGB\": 300,\n \"createOption\": \"FromImage\",\n \"deleteOption\": \"Delete\" # deletes OS Disk when VM is deleted\n }\n },\n \"hardware_profile\": {\n \"vm_size\": vm_size\n },\n \"os_profile\": {\n \"computer_name\": vm_name,\n \"admin_username\": username,\n \"linux_configuration\": {\n \"disable_password_authentication\": True,\n \"ssh\": {\n \"public_keys\": [{\n \"path\": f\"/home/{username}/.ssh/authorized_keys\",\n \"key_data\": ci_env['FIRESIM_PEM_PUBLIC'] # use some public key, like firesim.pem, from github secrets\n }]\n }\n },\n \"custom_data\": ml_file_encoded\n },\n \"network_profile\": {\n \"network_interfaces\": [{\n \"id\": nic_result.id,\n \"properties\": { \"deleteOption\": \"Delete\" } # deletes NIC when VM is deleted\n }]\n }\n }\n )\n vm_result = poller.result()\n print(f\"Provisioned virtual machine {vm_result.name}\")", "def create_run_config(cpu_cluster, docker_proc_type, conda_env_file):\n\n # runconfig with max_run_duration_seconds did not work, check why:\n # run_amlcompute = RunConfiguration(max_run_duration_seconds=60*30)\n run_amlcompute = RunConfiguration()\n run_amlcompute.target = cpu_cluster\n run_amlcompute.environment.docker.enabled = True\n run_amlcompute.environment.docker.base_image = docker_proc_type\n\n # Use conda_dependencies.yml to create a conda environment in\n # the Docker image for execution\n # False means the user will provide a conda file for setup\n # True means the user will manually configure the environment\n run_amlcompute.environment.python.user_managed_dependencies = False\n run_amlcompute.environment.python.conda_dependencies = CondaDependencies(\n conda_dependencies_file_path=conda_env_file\n )\n return run_amlcompute", "def generateAllConfigs():\n generateStepperConfig()\n generateProgramConfig()", "def create_or_resume(name, spec, **_):\n\n # deploy mysql for placement\n utils.ensure_mysql_cluster(\"placement\", spec[\"mysql\"])\n\n # deploy placement api\n utils.create_or_update('placement/daemonset.yml.j2', spec=spec)\n utils.create_or_update('placement/service.yml.j2', spec=spec)\n\n # Create application credential\n identity.ensure_application_credential(name=\"placement\")\n\n url = None\n if \"ingress\" in spec:\n utils.create_or_update('placement/ingress.yml.j2',\n name=name, spec=spec)\n url = spec[\"ingress\"][\"host\"]\n\n if \"endpoint\" not in spec:\n spec[\"endpoint\"] = True\n if spec[\"endpoint\"]:\n identity.ensure_service(name=\"placement\", service_type=\"placement\",\n url=url, desc=\"Placement Service\")", "def init_v1(config: NsJailConfig) -> None:\n # If the config doesn't \"have\" a value, then it's set to the default value, which means the\n # controller is not being used.\n if config.HasField(\"cgroup_cpu_ms_per_sec\"):\n pids = Path(config.cgroup_cpu_mount, config.cgroup_cpu_parent)\n pids.mkdir(parents=True, exist_ok=True)\n\n if (\n config.HasField(\"cgroup_mem_max\")\n or config.HasField(\"cgroup_mem_memsw_max\")\n or config.HasField(\"cgroup_mem_swap_max\")\n ):\n mem = Path(config.cgroup_mem_mount, config.cgroup_mem_parent)\n mem.mkdir(parents=True, exist_ok=True)\n\n if config.HasField(\"cgroup_net_cls_classid\"):\n net_cls = Path(config.cgroup_net_cls_mount, config.cgroup_net_cls_parent)\n net_cls.mkdir(parents=True, exist_ok=True)\n\n if config.HasField(\"cgroup_pids_max\"):\n pids = Path(config.cgroup_pids_mount, config.cgroup_pids_parent)\n pids.mkdir(parents=True, exist_ok=True)", "def _ready_to_apply_runtime_config(\n self, context, personalities=None, host_uuids=None,\n filter_classes=None, filter_files=None):\n\n if filter_classes is None:\n filter_classes = set()\n if filter_files is None:\n filter_files = set()\n\n if personalities is None:\n personalities = []\n if host_uuids is None:\n host_uuids = []\n\n check_required = False\n if constants.CONTROLLER in personalities:\n check_required = True\n if constants.WORKER in personalities and cutils.is_aio_system(self.dbapi):\n check_required = True\n if host_uuids and self.host_uuid not in host_uuids:\n check_required = False\n\n if not check_required and not filter_classes:\n return True\n\n if not os.path.exists(constants.SYSINV_REPORTED):\n LOG.info(\"_ready_to_apply_runtime_config path does not exist: %s\" %\n constants.SYSINV_REPORTED)\n return False\n\n # check if needed to wait for filter class\n for filter_class in filter_classes:\n if filter_class == self.PUPPET_RUNTIME_CLASS_ROUTES:\n if not self._check_ready_route_runtime_config():\n LOG.info(\"config type %s filter_mapping %s False (wait)\" %\n (CONFIG_APPLY_RUNTIME_MANIFEST, filter_class))\n return False\n if filter_class == self.PUPPET_RUNTIME_CLASS_DOCKERDISTRIBUTION:\n if self.check_restoring_apps_in_progress():\n LOG.info(\"config type %s filter_mapping %s False (wait)\" %\n (CONFIG_APPLY_RUNTIME_MANIFEST, filter_class))\n return False\n LOG.info(\"config type %s filter_mapping %s True (continue)\" %\n (CONFIG_APPLY_RUNTIME_MANIFEST, filter_class))\n\n # check if needed to wait for filter files\n for filter_file in filter_files:\n if filter_file in self.PUPPET_FILTER_FILES_RESTORING_APPS:\n if self.check_restoring_apps_in_progress():\n LOG.info(\"config type %s filter_mapping %s False (wait)\" %\n (CONFIG_UPDATE_FILE, filter_file))\n return False\n LOG.info(\"config type %s filter_mapping %s True (continue)\" %\n (CONFIG_UPDATE_FILE, filter_file))\n\n return True", "def create_training_run(self, run: DkubeTraining, wait_for_completion=True):\n\n assert (\n type(run) == DkubeTraining\n ), \"Invalid type for run, value must be instance of rsrcs:DkubeTraining class\"\n valid_fw = False\n fw_opts = [\"custom\"]\n if run.executor_dkube_framework.choice == \"custom\":\n valid_fw = True\n else:\n fws = self.get_training_capabilities()\n for fw in fws:\n for v in fw[\"versions\"]:\n if (\n run.executor_dkube_framework.choice == fw[\"name\"]\n and run.dkube_framework_details.version == v[\"name\"]\n ):\n valid_fw = True\n break\n else:\n name = fw[\"name\"] + \"_\" + v[\"name\"]\n fw_opts.append(name)\n if valid_fw == True:\n break\n\n assert valid_fw == True, (\n \"Invalid choice for framework, select oneof(\" + str(fw_opts) + \")\"\n )\n\n super().update_tags(run.training_def)\n super().create_run(run)\n while wait_for_completion:\n status = {}\n try:\n status = super().get_run(\n \"training\", run.user, run.name, fields=\"status\"\n )\n except ValueError as ve:\n ve_without_num = \"\".join(i for i in str(ve) if not i.isdigit())\n if \"Invalid value for `state` (Waiting for gpu(s))\" in ve_without_num:\n num = \"\".join(i for i in str(ve) if i.isdigit())\n status[\"state\"] = \"Waiting for {} gpu(s)\".format(num)\n status[\"reason\"] = \"\"\n else:\n raise ve\n state, reason = status[\"state\"], status[\"reason\"]\n if state.lower() in [\"complete\", \"failed\", \"error\", \"stopped\", \"created\"]:\n print(\n \"run {} - completed with state {} and reason {}\".format(\n run.name, state, reason\n )\n )\n break\n else:\n print(\n \"run {} - waiting for completion, current state {}\".format(\n run.name, state\n )\n )\n time.sleep(self.wait_interval)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of people in this classification, filtered by the current context
def getPeople(self): secman = getSecurityManager() #There *has* to be a better way to do this... localPeople = self.getReferences(relationship='classifications_people') #Get the intersection of people referenced to this classification and people within/referenced to the parent classificationPeople = list(set(localPeople) & set(self.aq_parent.getPeople())) #Determine the valid people to show visiblePeople = [] currentDateTime = DateTime() for person in classificationPeople: if currentDateTime >= person.getEffectiveDate() and (currentDateTime < person.getExpirationDate() or person.getExpirationDate() is None): if secman.checkPermission(View, person): visiblePeople.append(person) #Return only the visible people return visiblePeople
[ "def people(self):\n\n if not self.content.get('people'):\n return None\n\n people = OrderedDict(self.content['people'])\n\n query = PersonCollection(object_session(self)).query()\n query = query.filter(Person.id.in_(people.keys()))\n\n result = []\n\n for person in query.all():\n person.context_specific_function = people[person.id.hex]\n result.append(person)\n\n order = list(people.keys())\n result.sort(key=lambda p: order.index(p.id.hex))\n\n return result", "def people(self):\n\n if not self.content.get('people'):\n return None\n\n people = OrderedDict(self.content['people'])\n\n query = PersonCollection(object_session(self)).query()\n query = query.filter(Person.id.in_(people.keys()))\n\n result = []\n\n for person in query.all():\n function, show_function = people[person.id.hex]\n person.context_specific_function = function\n person.display_function_in_person_directory = show_function\n result.append(person)\n\n order = list(people.keys())\n result.sort(key=lambda p: order.index(p.id.hex))\n\n return result", "def people(self):\n return self.items", "def get_people(self):\n \n return set(self.people)", "def detect_people(self, img_filename):\n\n results = detect(self.net, self.meta, bytes(img_filename, 'utf-8'))\n\n people = []\n\n for cat, score, bounds in results:\n if cat == b'person':\n\n x, y, w, h = bounds\n\n center = (int(x), int(y))\n size = (w, h)\n top_left = (int(x - w / 2), int(y - h / 2))\n bottom_right = (int(x + w / 2), int(y + h / 2))\n target = (int(x), int(y-h/6))\n\n people.append(dict(\n category=cat.decode(\"utf-8\"), \n score=score, \n center=center,\n top_left=top_left,\n bottom_right=bottom_right,\n target=target,\n ))\n\n return people", "def getPeople(self):\n organizer = self.store.findUnique(people.Organizer, default=None)\n if organizer is None:\n return iter(())\n return iter(self.store.query(\n people.Person, sort=people.Person.name.ascending))", "def get_candidate_persons(organization=None):\n if not organization:\n organization = getActiveDatabaseUser().get_active_organization()\n #no \"or\" in filtering so get two lists and combine.\n query = Teacher.all(keys_only=True)\n query.filter(\"organization =\", organization)\n keys = query.fetch(500)\n query = Administrator.all(keys_only=True)\n query.filter(\"organization =\", organization)\n keys.extend(query.fetch(500))\n #list now includes all teachers and administrators in the organization\n # now choose only those that have no current reference\n candidates = []\n persons = db.get(keys)\n for person in persons:\n try:\n if not person.databaseuser_set.get():\n candidates.append(person)\n except:\n pass\n candidate_selection_list = []\n for person in candidates:\n name = person.full_name_lastname_first()\n entry = {\"value\":name, \"label\":name, \"key\": str(person.key())}\n candidate_selection_list.append(entry)\n return candidate_selection_list", "def get_all_family_persons(self):\n print('ALL PERSONS IN FAMILY')\n family_db = JsonDatabase(self.DATABASE_NAME, self.DATABASE_DIR)\n pp(family_db.fetch_all())\n self.print_line()", "def study_people(self):\n if not self._people:\n self._people = []\n # add the overall_contact\n if self.overall_contact:\n self._people.append(self.overall_contact)\n if self.overall_contact_backup:\n self._people.append(self.overall_contact_backup)\n if self.overall_officials:\n for official in self.overall_officials:\n self._people.append(official)\n for location in self.locations:\n # load the location people\n if location.investigators:\n for investigator in location.investigators:\n if investigator not in self.study_people:\n self._people.append(investigator)\n if location.contact and location.contact not in self.study_people:\n self._people.append(location.contact)\n if (\n location.contact_backup\n and location.contact_backup not in self.study_people\n ):\n self._people.append(location.contact_backup)\n return self._people", "def filter_by(criteria):\n\n employees = []\n [employees.append(v) for k, v in directory.items() if criteria(v) == True]\n\n return employees", "def get_queryset(self):\n user = self.request.user\n preferences = models.UserPref.objects.get(user=user.id)\n dogs = models.Dog.objects.filter(\n gender__in=preferences.gender.split(','),\n size__in=preferences.size.split(','),\n age__in=convert_dog_age(preferences.age),\n ).order_by('pk')\n return dogs", "def get_people(self) -> List[PersonEntity]:\n url = \"{}/people\".format(self.BASE_URL)\n params = {\"fields\": \"id,name,films\", \"limit\": 250}\n response = requests.get(url, params=params)\n\n if response.status_code != 200:\n # OP: use proper logger here, throw error and catch in caller\n return []\n return response.json()", "def persons_at_home(self, home_id=None):\n home_data = self.homeById(home_id)\n atHome = []\n for p in home_data[\"persons\"]:\n # Only check known personshome\n if \"pseudo\" in p:\n if not p[\"out_of_sight\"]:\n atHome.append(p[\"pseudo\"])\n return atHome", "def find_all():\n return ItopapiPrototype.find_all(ItopapiPerson)", "def get_all_users(self):\n return self.byte_decode_search_results(\n self.search(\n self.domain_base, \"(&(objectCategory=person)(objectClass=user))\"\n )\n )", "def people(age):\n if age in ages_to_names:\n return ages_to_names[age]\n return []", "def get_people(self, company_id, only_disabled=False):\n company = self.__find_company(company_id)\n if company is None:\n return None\n company_name = company[1].get('cn')[0]\n active = 'FALSE' if only_disabled else 'TRUE'\n ldap_filter = ldap.filter.filter_format('(&(o=%s)(active=%s))', [company_name, active])\n ldap_response = self.ldap_connection.search_ext_s(LdapService.LDAP_BASES['people'],\n ldap.SCOPE_SUBTREE, ldap_filter)\n\n if ldap_response is None:\n return []\n people = map_ldap_response(ldap_response, 'people')\n\n return sorted(people, key=lambda k: k['name'].lower())", "def findUsers(self, searchFilter):\n raise NotImplementedError", "def find_all_users(self):\n token = self.config.get('PEOPLE_GATEWAY_APP_TOKEN')\n headers = {'app_token': token}\n\n url = '%s/cit/api/v2/people' % self.config.get('PEOPLE_GATEWAY_HOST')\n\n logger.debug('Retreive all users')\n logger.debug('url = %s' %url)\n response = requests.get(url=url, headers=headers)\n\n logger.info('status %s' % response.status_code)\n\n return response.json()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a list of people, sorted by SortableName
def getSortedPeople(self): people = self.getPeople() return sorted(people, cmp=lambda x,y: cmp(x.getSortableName(), y.getSortableName()))
[ "def sort_by_name(people):\n return sorted(people, key=get_name)", "def print_by_names():\n print \"\\nSorted by names:\"\n new_contacts = sorted(contacts)\n for i in new_contacts:\n print i + \" : \" + contacts[i]", "def sort_by_name(fathers_of_the_founders):\n sorting = sorted(fathers_of_the_founders.items(), key=lambda t: t[0])\n return print(sorting)", "def _get_names(self, persons, name_displayer):\n return [name_displayer.display(person) for person in persons]", "def SortNames(self, nameList):\n nameList.sort()", "def listPersons():\n\n print 'Directory'\n sql.execute('SELECT * FROM persons ORDER BY name')\n allRows = sql.fetchall()\n for row in allRows:\n print row[0], row[1]", "def people(self):\n\n if not self.content.get('people'):\n return None\n\n people = OrderedDict(self.content['people'])\n\n query = PersonCollection(object_session(self)).query()\n query = query.filter(Person.id.in_(people.keys()))\n\n result = []\n\n for person in query.all():\n function, show_function = people[person.id.hex]\n person.context_specific_function = function\n person.display_function_in_person_directory = show_function\n result.append(person)\n\n order = list(people.keys())\n result.sort(key=lambda p: order.index(p.id.hex))\n\n return result", "def sort_by_surname_desc(names):\r\n sorted_names = dedup_and_title_case_names(names)\r\n function_get_surname = (lambda full_name: full_name.split()[1])\r\n sorted_names.sort(key=function_get_surname, reverse=True)\r\n return sorted_names", "def sort_by_surname_desc(names):\n\n names = dedup_and_title_case_names(names)\n return sorted(names, key=lambda x: x.split(\" \")[-1], reverse=True)", "def people(age):\n if age in ages_to_names:\n return ages_to_names[age]\n return []", "def get_presenters(self):\r\n return self.presenters.order_by('-leader', 'last_name')", "def people(self):\n return self.items", "def sorted_organisms(organisms, sort_key=\"name\"):\n organisms = sorted(organisms, key=attrgetter(sort_key))\n return sorted(organisms, key=lambda organism: str(organism.status), reverse=True)", "def people(self):\n\n if not self.content.get('people'):\n return None\n\n people = OrderedDict(self.content['people'])\n\n query = PersonCollection(object_session(self)).query()\n query = query.filter(Person.id.in_(people.keys()))\n\n result = []\n\n for person in query.all():\n person.context_specific_function = people[person.id.hex]\n result.append(person)\n\n order = list(people.keys())\n result.sort(key=lambda p: order.index(p.id.hex))\n\n return result", "def sort_people_world(self):\n people_ids = np.array([person.id for person in self.world.people])\n ids_sorted_idx = np.argsort(people_ids)\n self.world.people.people = np.array(self.world.people)[ids_sorted_idx]", "def sort_species(species_dict):\n # first sort by Genus name using the \"sort\" attribute\n sorted_list = list(species_dict.items())\n sorted_list.sort()\n return sorted_list", "def get_presenters_print(self):\r\n return self.presenters.order_by('last_name')", "def sortByTitle(self,keys_list=None):\n\t\tif not keys_list:\n\t\t\tkeys_list = self.getEntryList()\n\t\t\t\n\t\tr_list = self.searchObjTypeDerive(keys_list,query_objType=\".obj.pub\")\n\t\tr_list.sort(key = lambda x: self.entries[x].title )\n\t\treturn r_list", "def title_owners(record, last_only=False, filter_companies=False):\n\n all_names = record['owners'].split(', ')\n result = set()\n for name in all_names:\n if filter_companies and name_is_company(name):\n continue\n\n if last_only:\n name = last_name(name)\n\n result.add(name)\n\n\n return list(result)", "def sortByFirstAuthor(self,keys_list=None):\n\t\tif not keys_list:\n\t\t\tkeys_list = self.getEntryList()\n\t\t\t\n\t\tr_list = self.searchObjTypeDerive(keys_list,query_objType=\".obj.pub\")\n\t\tr_list.sort(key = lambda x: self.entries[x].authors[0] )\n\t\treturn r_list" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Retrieve a message from IMAP and return it as an email object
def fetch_message(imap, imap_msg_id): typ, msgdata = imap.fetch(imap_msg_id, '(RFC822)') encoded_msg = msgdata[0][1] return email.message_from_string(encoded_msg)
[ "def email_fetch(imap, uid):\n message = None\n status, response = imap.fetch(str(uid), '(BODY.PEEK[])')\n\n if status != 'OK':\n return status, response\n\n for i in response:\n if isinstance(i, tuple):\n s = i[1]\n if isinstance(s, bytes):\n s = s.decode('utf-8')\n message = email.message_from_string(s)\n\n return status, message", "def fetch_message_body(self, id):\n\n status, response = self.imap.uid('fetch', id, '(RFC822)')\n if response[0] is None:\n return None\n\n return response[0][1]", "def get_imap(mailbox='INBOX'):\n imap = IMAP4_SSL(settings.EMAIL_HOST)\n imap.login(settings.EMAIL_USER, settings.EMAIL_PASSWORD)\n imap.select(mailbox)\n return imap", "def get_email_by_id(self, message_id, user=\"me\") -> dict:\n return self.gmail.users().messages().get(userId=\"me\", id=message_id).execute()", "def get_inbox(self):\n resp, data = self.mail.select(pub_config['gmail']['mailbox'])\n self.check_resp(resp)\n return data", "def fetch_message_bytes(self, msg_id):\n try:\n message = self.service.users().messages().get(\n userId='me',\n id=msg_id,\n format='raw').execute()\n return base64.urlsafe_b64decode(message['raw'].encode('ASCII'))\n except errors.HttpError as error:\n raise GMailError(f'An error occurred: {error}')", "def get_unread_messages(self):\n resp, msgs = self.mail.search(None, '(UNSEEN)')\n self.check_resp(resp)\n for msg_num in msgs[0].split():\n resp, msg = self.mail.fetch(msg_num, '(RFC822)')\n self.check_resp(resp)\n # contains the subject, body, and a bunch of other data\n msg_info = email.message_from_bytes(msg[0][1])\n yield msg_info", "def getMessages(imap, newOnly=True):\n messages = []\n errors = []\n\n # Set search type based on what we're expecting to find\n if newOnly:\n criterion = \"NOT SEEN\"\n else:\n criterion = \"ALL\"\n\n # Get messages to look at\n ok, result = imap.search(None, criterion)\n if ok != OK:\n errors.append(\"getMessages search result: {}: {}\".format(ok, result))\n return (messages, errors)\n\n # Iterate through messages and grab the subjects\n for messageNumber in result[0].split():\n ok, data = imap.fetch(messageNumber, '(RFC822.HEADER)')\n if ok != OK:\n errors.append(\"failed to get message {}: {}\".format(messageNumber,\n data))\n # There's some work to decode these...\n for item in data:\n if len(item) < 2: # the closing bit is too short, skip it\n continue\n fromline = \"\"\n subject = \"\"\n for line in item[1].decode(\"utf-8\").splitlines():\n if line.strip().startswith(SUBJECT):\n subject = decodeHeader(line[len(SUBJECT):])\n if line.strip().startswith(FROMLINE):\n fromline = decodeHeader(line[len(FROMLINE):])\n messages.append(fromline + \": \" + subject)\n # And go home\n return messages, errors", "def fetch_from_imap(conn: imaplib.IMAP4, *args, cmd='fetch', **kwargs) -> \\\n Tuple[bytes, List[bytes]]:\n # first lets execute CHECK IMAP command. It will flush any pending\n # operation in session\n conn.check()\n status, data = getattr(conn, cmd)(*args, **kwargs)\n if status != 'OK':\n raise Exception('Failed to execute fetch command: {}'.format(data))\n return build_imap_response_line(data)", "async def get_email_by_id(self, email_id):\n\n async with self.postgres.acquire() as conn:\n async with conn.cursor() as curs:\n await curs.execute('''\n SELECT * FROM mailitem\n LEFT JOIN body ON body.id = mailitem.bodyid\n WHERE mailitem.id=%s;\n ''',\n (email_id,))\n email = await curs.fetchone()\n return {\n \"_id\": email[0],\n \"date_sent\": email[1],\n \"subject\": email[2],\n \"from_address\": email[3],\n \"recipients\": await self.get_email_recipients(email[0]),\n \"body_id\": email[4],\n \"body_sha256\": email[6],\n \"body_content\": email[7],\n \"attachments\": await self.get_email_attachments(email_id)\n }", "def get_email_message(self):\n msg = self.get_message_parser()\n to = self.to() or mailparser_utils.get_addresses(msg.to)\n cc = self.cc() or mailparser_utils.get_addresses(msg.cc)\n bcc = self.bcc()\n\n # Process headers, but ignore address headers - these are processed explicitly.\n headers = {\n header: value\n for header, value in msg.headers.items()\n if header.lower() not in PARSED_HEADERS_TO_IGNORE\n }\n\n Email = EmailMultiAlternatives if msg.text_html else EmailMessage\n email = Email(\n subject=msg.subject,\n body='\\n'.join(msg.text_plain),\n from_email=mailparser_utils.get_address(msg.from_),\n to=to,\n bcc=bcc,\n headers=headers,\n cc=cc,\n reply_to=mailparser_utils.get_addresses(msg.reply_to),\n )\n\n # set the multipart subtype\n content_type = msg.headers[\"Content-Type\"].split(\";\", 1)[0] # discard boundary\n main_type, subtype = content_type.split(\"/\", 1)\n if main_type == \"multipart\":\n email.mixed_subtype = subtype\n\n # NOTE - mailparser only supports text and HTML, any other content types are\n # considered not_managed.\n if msg.text_html:\n email.attach_alternative('<br>'.join(msg.text_html), mimetype='text/html')\n\n # attachment is a dict with fixed keys:\n # filename, payload, binary, mail_content_type, content-id, content-disposition,\n # charset and content_transfer_encoding\n #\n # This performs generic handling of attachments, respecting the original various\n # ways the attachment can be used.\n for attachment in msg.attachments:\n basetype, subtype = attachment[\"mail_content_type\"].split(\"/\", 1)\n binary = attachment[\"binary\"]\n content = attachment['payload']\n transfer_encoding = attachment[\"content_transfer_encoding\"]\n\n mime_attachment = MIMEBase(basetype, subtype)\n mime_attachment.set_payload(content)\n if not binary:\n Encoders.encode_base64(mime_attachment)\n else:\n mime_attachment.add_header(\"Content-Transfer-Encoding\", transfer_encoding)\n for header in (\"Content-ID\", \"Content-Disposition\"):\n value = attachment[header.lower()]\n if value:\n mime_attachment.add_header(header, value)\n email.attach(mime_attachment)\n\n return email", "def get_full_message(self):\n if self.got_substance:\n return self\n\n assert self.id, \"Cannot get substance of message without an id\"\n assert self.uid, \"Cannot get substance of message without an UID\"\n assert self.imap_server, \"Cannot do anything without IMAP connection\"\n\n # First, try to select the folder that the message is in.\n try:\n self.imap_server.SelectFolder(self.folder.name)\n except BadIMAPResponseError:\n # Can't select the folder, so getting the substance will not\n # work.\n self.could_not_retrieve = True\n print(\"Could not select folder %s for message \" \\\n \"%s (uid %s)\" % (self.folder.name, self.id, self.uid), file=sys.stderr)\n return self\n\n # Now try to fetch the substance of the message.\n try:\n response = self.imap_server.uid(\"FETCH\", self.uid,\n self.rfc822_command)\n except MemoryError:\n # Really big messages can trigger a MemoryError here.\n # The problem seems to be line 311 (Python 2.3) of socket.py,\n # which has \"return \"\".join(buffers)\". This has also caused\n # problems with Mac OS X 10.3, which apparently is very stingy\n # with memory (the malloc calls fail!). The problem then is\n # line 301 of socket.py which does\n # \"data = self._sock.recv(recv_size)\".\n # We want to handle this gracefully, although we can't really\n # do what we do later, and rewrite the message, since we can't\n # load it in the first place. Maybe an elegant solution would\n # be to get the message in parts, or just use the first X\n # characters for classification. For now, we just carry on,\n # warning the user and ignoring the message.\n self.could_not_retrieve = True\n print(\"MemoryError with message %s (uid %s)\" % \\\n (self.id, self.uid), file=sys.stderr)\n return self\n\n command = \"uid fetch %s\" % (self.uid,)\n response_data = self.imap_server.check_response(command, response)\n data = self.imap_server.extract_fetch_data(response_data)\n # The data will be a dictionary - hopefully with only one element,\n # but maybe more than one. The key is the message number, which we\n # do not have (we use the UID instead). So we look through the\n # message and use the first data of the right type we find.\n rfc822_data = None\n for msg_data in data.values():\n if self.rfc822_key in msg_data:\n rfc822_data = msg_data[self.rfc822_key]\n break\n if rfc822_data is None:\n raise BadIMAPResponseError(\"FETCH response\", response_data)\n\n try:\n new_msg = email.message_from_string(rfc822_data, IMAPMessage)\n # We use a general 'except' because the email package doesn't\n # always return email.errors (it can return a TypeError, for\n # example) if the email is invalid. In any case, we want\n # to keep going, and not crash, because we might leave the\n # user's mailbox in a bad state if we do. Better to soldier on.\n except:\n # Yikes! Barry set this to return at this point, which\n # would work ok for training (IIRC, that's all he's\n # using it for), but for filtering, what happens is that\n # the message ends up blank, but ok, so the original is\n # flagged to be deleted, and a new (almost certainly\n # unsure) message, *with only the spambayes headers* is\n # created. The nice solution is still to do what sb_server\n # does and have a X-Spambayes-Exception header with the\n # exception data and then the original message.\n self.invalid = True\n text, details = message.insert_exception_header(\n rfc822_data, self.id)\n self.invalid_content = text\n self.got_substance = True\n\n # Print the exception and a traceback.\n print(details, file=sys.stderr)\n\n return self\n\n new_msg.folder = self.folder\n new_msg.previous_folder = self.previous_folder\n new_msg.rfc822_command = self.rfc822_command\n new_msg.rfc822_key = self.rfc822_key\n new_msg.imap_server = self.imap_server\n new_msg.uid = self.uid\n new_msg.setId(self.id)\n new_msg.got_substance = True\n\n if options[\"Headers\", \"mailid_header_name\"] not in new_msg:\n new_msg[options[\"Headers\", \"mailid_header_name\"]] = self.id\n\n if options[\"globals\", \"verbose\"]:\n sys.stdout.write(chr(8) + \"*\")\n return new_msg", "def get_mailbox_contents(self, mailbox):\n # Open mailbox\n typ, dat = self._conn.select(mailbox)\n if typ != 'OK':\n raise Exception(f\"Could not open mailbox {mailbox}\")\n\n # Get all emails from mailbox\n typ, dat = self._conn.search(None, \"ALL\")\n if typ != 'OK':\n log.debug(f\"Mailbox {mailbox} is empty.\")\n return []\n\n emails = list()\n email_ids = dat[0]\n email_id_list = email_ids.split()\n for email_id in email_id_list:\n # Try to fetch email from the mailbox\n typ, dat = self._conn.fetch(email_id, '(UID RFC822)')\n if typ != 'OK':\n raise Exception(f\"Failed to get message {email_id}\")\n\n # Extract the UID from the email\n uid = re.findall(r\"(?<=UID )(\\d+)\", str(dat[0][0]))[0]\n # Add email content to the emails list\n emails.append((uid, email.message_from_bytes(dat[0][1])))\n\n return emails", "def fetch_mail(self):\n self.logger.debug(\"Checking mail for: %s\" % self.mailing_list.name)\n pop_client = poplib.POP3_SSL(self.mailing_list.pop_host, self.mailing_list.pop_port)\n try:\n response = pop_client.user(self.mailing_list.username).decode(\"utf-8\")\n if not response.startswith('+OK'):\n raise Exception('Username not accepted: %s' % response)\n try:\n response = pop_client.pass_(self.mailing_list.password).decode(\"utf-8\")\n if not response.startswith('+OK'):\n raise Exception('Password not accepted: %s' % response)\n except poplib.error_proto as e:\n # We get this back a lot, and we don't want it to flood our logs:\n # error_proto('-ERR [IN-USE] Unable to lock maildrop: Mailbox is locked by POP server',)\n if 'IN-USE' not in str(e):\n raise e\n self.logger.debug(\"Ignoring locked mailbox\")\n return\n\n stats = pop_client.stat()\n if stats[0] == 0:\n self.logger.debug(\"No mail\")\n return []\n\n results = []\n self.logger.info(\"Processing %d %s messages\" % (stats[0], self.mailing_list.name))\n for i in range(stats[0]):\n try:\n response, mail, _size = pop_client.retr(i + 1)\n parser = email.parser.BytesFeedParser()\n parser.feed(b'\\n'.join(mail))\n message = parser.close()\n\n # Delete and ignore auto responses\n if message['Auto-Submitted'] and message['Auto-Submitted'] != 'no':\n pop_client.dele(i + 1)\n continue\n\n # Delete and ignore messages sent from any list to avoid loops\n if message['List-ID']:\n pop_client.dele(i + 1)\n continue\n\n # TODO Delete and ignore soft bounces\n results.append(self.mailing_list.create_incoming(message))\n pop_client.dele(i + 1)\n except Exception as e:\n self.logger.error(\"Exception while processing email\")\n # self.logger.error(\"Message: \" + str(message))\n self.logger.error(\"Exception: \" + str(e))\n\n finally:\n pop_client.quit()\n\n return results", "def getCommand():\r\n command=\"error\"\r\n M = imaplib.IMAP4_SSL('imap.gmail.com')\r\n try:\r\n M.login('mass.checker@gmail.com', pswd)\r\n except imaplib.IMAP4.error:\r\n print \"LOGIN FAILED!!! \"\r\n rv, mailboxes = M.list()\r\n rv, data = M.select(\"INBOX\")\r\n if rv == 'OK':\r\n print \"Waiting for command ...\\n\"\r\n rv, data = M.search(None, \"(UNSEEN)\")\r\n if rv != 'OK':\r\n print \"No messages found!\"\r\n return\r\n for num in data[0].split():\r\n rv, data = M.fetch(num, '(RFC822)')\r\n if rv != 'OK':\r\n print \"ERROR getting message\", num\r\n return\r\n msg = em.message_from_string(data[0][1])\r\n command=msg['Subject']\r\n M.close()\r\n M.logout()\r\n if(command is None):\r\n command=\"error\"\r\n return(command)", "def get_attachement(detach_dir:str):\n # if 'attachments' not in os.listdir(detach_dir):\n # os.mkdir(detach_dir + \"/\" + 'attachments')\n # detach_dir = detach_dir + \"/attachments\"\n\n # user_name = input('Enter your GMail user_name:')\n user_name = os.environ.get(\"RESPONSYS_GMAIL\")\n # passwd = getpass.getpass('Enter your password: ')\n passwd = os.environ.get(\"RESPONSYS_GMAIL_PASSWD\")\n\n try:\n imap_session = imaplib.IMAP4_SSL('imap.gmail.com')\n return_code, account_details = imap_session.login(user_name, passwd)\n if return_code != 'OK':\n print('Not able to sign in!')\n raise\n \n labels = imap_session.list()[1]\n # imap_session.select('[Gmail]/All Mail')\n for l in labels:\n print(l)\n imap_session.select('INBOX')\n # return_code, data = imap_session.search(None, 'ALL')\n return_code, data = imap_session.search(None, '(UNSEEN)')\n if return_code != 'OK':\n print('Error searching Inbox.')\n raise\n \n # Iterating over all emails\n for msgId in data[0].split():\n return_code, message_parts = imap_session.fetch(msgId, '(RFC822)')\n if return_code != 'OK':\n print('Error fetching mail.')\n raise\n\n email_body = message_parts[0][1]\n mail = email.message_from_bytes(email_body)\n for part in mail.walk():\n if part.get_content_maintype() == 'multipart':\n # print(part.as_string())\n continue\n if part.get('Content-Disposition') is None:\n # print(part.as_string())\n continue\n file_name = part.get_filename()\n\n if bool(file_name):\n # print(\"Raw Date: \", mail[\"Date\"])\n # 'Sun, 15 Jul 2018 08:07:08 +0000'\n date_tuple = email.utils.parsedate_tz(mail['Date'])\n local_date = datetime.datetime.fromtimestamp(email.utils.mktime_tz(date_tuple))\n date_suffix = local_date.strftime(\"%Y-%m-%d\")\n file_name = file_name + \".\" + date_suffix\n\n file_path = os.path.join(detach_dir, file_name)\n if not os.path.isfile(file_path):\n print(file_name)\n fp = open(file_path, 'wb')\n fp.write(part.get_payload(decode=True))\n fp.close()\n else:\n print(file_name + \" was already downloaded.\")\n imap_session.close()\n imap_session.logout()\n call('pwd')\n call('/Users/charliezhu/work/bin/email_metrics_load.sh')\n except (Exception) as error:\n print(error)\n print('Not able to download all attachments.')", "def unread_mail(self):\n em = email.Email()\n emails = em.request(today = False, prnt = True)\n if emails:\n for lines in emails:\n self.log.info(lines)\n else:\n mws.recover()\n tc_fail(\"Failed requesting Unread Mail\")", "def parse_email(message):\n\n pass", "def get_email_content(self, email):\r\n\r\n if email.is_multipart():\r\n self.log('Extracting email contents from multipart message')\r\n\r\n magic_type = 'multipart/alternative'\r\n payload_types = dict((p.get_content_type(), i) for i, p in enumerate(email.get_payload()))\r\n if magic_type in payload_types.keys():\r\n self.log('Found magic content type: %s' % magic_type)\r\n index = payload_types[magic_type]\r\n payload = email.get_payload()[index].get_payload()\r\n else:\r\n payload = email.get_payload()\r\n\r\n for pl in payload:\r\n if pl.get_filename() is not None:\r\n # it's an attached file\r\n continue\r\n\r\n if pl.get_content_type() in ACCEPTABLE_TYPES:\r\n return pl.get_payload()\r\n else:\r\n return email.get_payload()\r\n\r\n return None" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Display content of a single Note (specified by IMAP message ID)
def show_note(args): imap = connect_to_imap_server(args) msg = fetch_message(imap, args.messageId) print(msg.as_string())
[ "def get_note(self, note_id):\n return self.__get_object('notes', None, note_id)", "def get_note(self, id):\n response = requests.get(self.notes_url, params = {'id':id}, headers = self.headers)\n response = self.__handle_response(response)\n n = response.json()['notes'][0]\n return Note.from_json(n)", "def fetch_message(imap, imap_msg_id):\n typ, msgdata = imap.fetch(imap_msg_id, '(RFC822)') \n encoded_msg = msgdata[0][1]\n return email.message_from_string(encoded_msg)", "def snippet_from_id(ID):\n results = service.users().messages().get(userId='me', id=ID).execute()\n \n info = dict()\n \n # Extract emails subject to find out which type of charge was performed\n headers = results['payload']['headers']\n subject = list(filter(lambda header: header['name'] == 'Subject', headers))\n info['type'] = get_type(subject[0]['value'])\n \n return results['snippet'], info", "def show(request, pMessageSerial):\n lCurrentUser = User.objects.filter(username=request.user.username)[0]\n try:\n lMessage = Message.objects.filter(id=pMessageSerial).filter(to_user=lCurrentUser)[0]\n except:\n raise Http404()\n lMessage.read = True\n lMessage.save()\n return render_auth(request, 'messages/show.html', {\"Message\":lMessage})", "def get_one_note(self,idt):\n q=\"select * from note where id=%d\"%(idt)\n try:\n NoteDB.cursor.execute(q)\n result=NoteDB.cursor.fetchall()\n obj=Note(idt=result[0],msg=result[1],time=result[2])\n return obj\n except Exception as e:\n raise", "def fetch_message_body(self, id):\n\n status, response = self.imap.uid('fetch', id, '(RFC822)')\n if response[0] is None:\n return None\n\n return response[0][1]", "async def viewnotes(self, ctx, user: discord.Member):\n notes = await self.config.guild(ctx.guild).notes()\n userid = str(user.id)\n if userid not in notes:\n return await ctx.send(\"No note for {}.\".format(user.display_name))\n else:\n return await ctx.send(\"Notes: {}\".format(\" / \".join(str(x) for x in notes[userid])))", "def view(request, message_id, template_name='django_messages/view.html',\n extra_context=None):\n message = get_object_or_404(Message, pk=message_id, owner=request.user)\n if message.is_unread():\n message.mark_read()\n message.save()\n ctx = extra_context or {}\n ctx.update({\n 'message': message,\n })\n return render_to_response(template_name, RequestContext(request, ctx))", "async def edit_reminder_content(self, ctx: Context, id_: int, *, content: str | None = None) -> None:\n if not content:\n content = await self.try_get_content_from_reply(ctx)\n if not content:\n # Message doesn't have a reply to get content from\n return\n await self.edit_reminder(ctx, id_, {\"content\": content})", "def __callShowNote(self): \r\n idSubject=input(\" Give id subject:\") \r\n try:\r\n sub=self.__table.searchSubject(idSubject)\r\n marks=self.__notes.getSubNote(sub)\r\n if len(marks)>0:\r\n newlist = mergeSort(marks, key=lambda k: k['note']) \r\n for el in newlist:\r\n print(el['name'] + \" - \" + str(el['note'])) \r\n else:\r\n print(\"No students.\") \r\n except IdNotFound as ex:\r\n print(ex.getErrors())", "def display_submit_note(inbox, topic):\r\n if not storage.Inbox.does_exist(inbox):\r\n abort(404)\r\n elif not storage.Inbox.is_enabled(inbox):\r\n abort(404)\r\n\r\n fake_name = get_full_name()\r\n topic_string = topic\r\n if topic_string:\r\n topic_string = \" about \" + topic\r\n return render_template(\r\n 'submit_note.htm.j2',\r\n user=inbox,\r\n topic=topic_string,\r\n fake_name=fake_name)", "def fetch_note(server, uuid, *, session=None):\n r = session.get(f\"{server}/api/node/{uuid}/note\")\n r.raise_for_status()\n return r.json()[\"note\"]", "def get_note_by_id(self, nId):\n return self.notes_dict[nId]", "def get_info_about_message(message_id):\n data = get_data()\n for channel in data[\"channels\"]:\n for message in channel[\"messages\"]:\n if message[\"message_id\"] == message_id:\n return message\n\n return None", "def get_detail(request, issue_id):\n detailissue = get_object_or_404(Issue, pk=issue_id)\n comments = Comment.objects.all()\n return render(request, 'issue/detail.html', {'issue': detailissue})", "def get(self, realtor_id, message_id):\n for realtor in REALTORS:\n if realtor[\"id\"] == realtor_id:\n break\n else:\n abort(404, message=\"Realtor not found.\")\n\n for message in MESSAGES:\n if message[\"id\"] == message_id:\n return message\n else:\n abort(404, message=\"Message not found.\")", "def download_notes(self):\r\n \r\n UI = self._input_ui.get()\r\n friendname = self._lastselectedfriend\r\n participants = [UI, friendname]\r\n\r\n msg=['download notes', participants]\r\n encoded = json.dumps(msg) \r\n self._client._s.send(encoded)\r\n\r\n encoded_chat = self._client._s.recv(4096)\r\n unencoded = json.loads(encoded_chat)\r\n self._localnotes = unencoded\r\n self._notearea.delete(\"1.0\", tk.END)\r\n self._notearea.insert(tk.END, self._localnotes)\r\n self._notearea.see(tk.END)", "def get_content(self, notification_id):\n url = \"%s/%s/content\" % (self.uri, notification_id)\n\n resp = self.session.get(url)\n\n if resp.status_code >= 400:\n raise CourierAPIException(resp)\n\n return resp.json()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets tgt[key] to src[key] if key in src and is nonempty, else sets it to default
def _set_header(src, tgt, key, default): if key in src and src[key]: tgt[key] = src[key] else: tgt[key] = default
[ "def _remember_source(self, src_key: str, dest_key: str) -> None:\n\n src_map = self.source_map.setdefault(src_key, {})\n src_map[dest_key] = True", "def deepupdate(target, src):\n print(\"Target:\",target)\n print(\"SRC:\",src)\n for k, v in src.items():\n if type(v) == list:\n print(k,'k',target, '\\n\\n')\n target[k] = copy.copy(v)\n #if not k in target:\n # target[k] = copy.deepcopy(v)\n #else:\n # target[k].extend(v)\n elif type(v) == dict:\n if not k in target:\n target[k] = copy.deepcopy(v)\n else:\n deepupdate(target[k], v)\n elif type(v) == set:\n if not k in target:\n target[k] = v.copy()\n else:\n target[k].update(v.copy())\n else:\n target[k] = copy.copy(v)", "def __merge_dict(self, src, target, ignore_pubs=True):\n\n for p in src:\n if p not in target:\n target[p] = src[p]\n continue\n\n def build_dic(arr):\n \"\"\"Builds a dictionary of fmri:action entries\"\"\"\n dic = {}\n for (pfmri, action) in arr:\n if pfmri in dic:\n dic[pfmri].append(action)\n else:\n dic[pfmri] = [action]\n return dic\n\n src_dic = build_dic(src[p])\n targ_dic = build_dic(target[p])\n\n for src_pfmri in src_dic:\n # we want to remove entries deemed older than\n # src_pfmri from targ_dic.\n for targ_pfmri in targ_dic.copy():\n sname = src_pfmri.get_name()\n tname = targ_pfmri.get_name()\n if lint_fmri_successor(src_pfmri,\n targ_pfmri,\n ignore_pubs=ignore_pubs):\n targ_dic.pop(targ_pfmri)\n targ_dic.update(src_dic)\n l = []\n for pfmri in targ_dic:\n for action in targ_dic[pfmri]:\n l.append((pfmri, action))\n target[p] = l", "def replace_media(orig: Dict, new: Dict) -> None:\n orig[\"lysrc\"] = copy.deepcopy(new[\"lysrc\"])\n orig[\"text\"] = new[\"text\"] if \"text\" in new else None", "def __merge_keys(\n self, kv_src_bucket, kv_dest_bucket, kvs_num=1, filter_exp=None):\n valid_keys_src, deleted_keys_src = kv_src_bucket[\n kvs_num].key_set()\n valid_keys_dest, deleted_keys_dest = kv_dest_bucket[\n kvs_num].key_set()\n\n self.log.info(\"src_kvstore has %s valid and %s deleted keys\"\n % (len(valid_keys_src), len(deleted_keys_src)))\n self.log.info(\"dest kvstore has %s valid and %s deleted keys\"\n % (len(valid_keys_dest), len(deleted_keys_dest)))\n\n if filter_exp:\n filtered_src_keys = [key for key in valid_keys_src if re.search(str(filter_exp), key) is not None]\n valid_keys_src = filtered_src_keys\n self.log.info(\n \"{0} keys matched the filter expression {1}\".format(\n len(valid_keys_src),\n filter_exp))\n\n for key in valid_keys_src:\n # replace/add the values for each key in src kvs\n if key not in deleted_keys_dest:\n partition1 = kv_src_bucket[kvs_num].acquire_partition(key)\n partition2 = kv_dest_bucket[kvs_num].acquire_partition(key)\n # In case of lww, if source's key timestamp is lower than\n # destination than no need to set.\n # with Synchronized(dict(part1)) as partition1, Synchronized(dict(part2)) as partition2:\n if self.get_lww() and partition1.get_timestamp(\n key) < partition2.get_timestamp(key):\n continue\n key_add = partition1.get_key(key)\n partition2.set(\n key,\n key_add[\"value\"],\n key_add[\"expires\"],\n key_add[\"flag\"])\n kv_src_bucket[kvs_num].release_partition(key)\n kv_dest_bucket[kvs_num].release_partition(key)\n\n for key in deleted_keys_src:\n if key not in deleted_keys_dest:\n partition1 = kv_src_bucket[kvs_num].acquire_partition(key)\n partition2 = kv_dest_bucket[kvs_num].acquire_partition(key)\n # In case of lww, if source's key timestamp is lower than\n # destination than no need to delete.\n # with Synchronized(dict(part1)) as partition1, Synchronized(dict(part2)) as partition2:\n if self.__lww and partition1.get_timestamp(\n key) < partition2.get_timestamp(key):\n continue\n partition2.delete(key)\n kv_src_bucket[kvs_num].release_partition(key)\n kv_dest_bucket[kvs_num].release_partition(key)\n\n\n valid_keys_dest, deleted_keys_dest = kv_dest_bucket[\n kvs_num].key_set()\n self.log.info(\"After merging: destination bucket's kv_store now has {0}\"\n \" valid keys and {1} deleted keys\".\n format(len(valid_keys_dest), len(deleted_keys_dest)))", "def copy_attrs(self, src, overwrite=False):\n # check src.tile.lgtattrs\n if len(src.tile.lgtattrs) == 0:\n src.tile._get_lgt_attrs()\n for attr_name in src.tile.lgtattrs:\n self.set(attr_name, src.tile.get(attr_name), overwrite=overwrite)", "def add_merged_src(self, src, conf, optional_events=False, **kwargs):\n\n if not isinstance(conf, self.__class__):\n conf = self.__class__(conf=conf)\n\n def merge_conf(key, val, path):\n new = _merge_conf(key, val, path)\n try:\n existing = get_nested_key(self, path + [key])\n except KeyError:\n return (True, new)\n else:\n # Only add to the source if the result is different than what\n # is already set\n return (existing != new, new)\n\n def _merge_conf(key, val, path):\n\n def non_mergeable(key):\n if self.get(key, val) == val:\n return val\n else:\n raise KeyError(f'Cannot merge key \"{key}\": incompatible values specified: {self[key]} != {val}')\n\n if key == 'functions':\n return sorted(set(val) | set(self.get(key, [])))\n elif key == 'events-namespaces':\n # We already applied the namespaces to the events so the result\n # can be cleanly merged according to the original meaning.\n return []\n elif key == 'events':\n if not isinstance(val, TraceEventCheckerBase):\n val = AndTraceEventChecker.from_events(val)\n if optional_events:\n val = OptionalTraceEventChecker([val])\n\n # Merging has to take into account defaults, as we will then\n # set the namespace to be empty (None, )\n def get(conf, key):\n try:\n return conf.get(key)\n except KeyError:\n return conf.DEFAULT_SRC.get(key)\n\n val = val.expand_namespaces(\n namespaces=get(conf, 'events-namespaces')\n )\n\n self_val = self.get(key, [])\n if not isinstance(self_val, TraceEventCheckerBase):\n self_val = AndTraceEventChecker.from_events(self_val)\n\n self_val = self_val.expand_namespaces(\n namespaces=get(self, 'events-namespaces')\n )\n\n return AndTraceEventChecker([val, self_val])\n elif key == 'buffer-size':\n return max(val, self.get(key, 0))\n elif key == 'trace-clock':\n return non_mergeable(key)\n elif key == 'saved-cmdlines-nr':\n return max(val, self.get(key, 0))\n elif key == 'tracer':\n return non_mergeable(key)\n elif key == 'modules':\n return merge_level(val, path + [key])\n elif key == 'auto-load':\n return non_mergeable(key)\n else:\n raise KeyError(f'Cannot merge key \"{key}\"')\n\n def merge_level(conf, path=[]):\n return {\n k: v\n for k, (keep, v) in (\n (k, merge_conf(k, v, path))\n for k, v in conf.items()\n )\n if keep\n }\n\n merged = merge_level(conf)\n # Namespaces were expanded in events directly so we want to ensure they\n # will not undergo expansion again if there are cascading merges.\n merged['events-namespaces'] = []\n\n # We merge some keys with their current value in the conf\n return self.add_src(src, conf=merged, **kwargs)", "def merge(dst, src, filter=None, flags=MERGE_ADDITIVE, _path=\"\"):\n\n def _check_typesafe(obj1, obj2, key, path):\n if ( (flags & MERGE_TYPESAFE) and (type(obj1[key]) != type(obj2[key]))):\n raise TypeError(\"Cannot merge objects of type {} and {} at {}\"\n \"\".format(type(obj1[key]), type(obj2[key]), path))\n\n\n def _filter_assign(obj, key, value):\n if (not filter) or (filter and filter(value)):\n obj[key] = value\n\n if isinstance(src, dict):\n for (i, v) in enumerate(src):\n _check_typesafe(dst, src, v, \"/\".join([_path, str(v)]))\n\n if not v in dst:\n if not isinstance(src[v], (dict, list)):\n _filter_assign(dst, v, src[v])\n else:\n dst[v] = src[v]\n else:\n if not isinstance(src[v], (dict, list)):\n _filter_assign(dst, v, src[v])\n else:\n merge(dst[v], src[v], filter=filter, flags=flags,\n _path=\"/\".join([_path, str(v)]))\n elif isinstance(src, list):\n for (i, v) in enumerate(src):\n _check_typesafe(dst, src, i, \"/\".join([_path, str(i)]))\n\n dsti = i\n if ( flags & MERGE_ADDITIVE):\n dsti += len(src)\n if dsti >= len(dst):\n dst += [None] * (dsti - len(dst) + 1)\n if dst[dsti] == None:\n _filter_assign(dst, dsti, src[i])\n else:\n if not isinstance(src[i], (dict, list)):\n _filter_assign(dst, dsti, src[i])\n else:\n merge(dst[i], src[i], filter=filter, flags=flags,\n _path=\"/\".join(_path, v))", "def update_json(src, key, value):\n if key:\n src[key[0]] = update_json(src[key[0]], key[1:], value)\n else:\n # We've found the value we want to replace regardless of whether or not\n # the object we are replacing is another copmlicated data structure.\n src = value\n return src", "def override(left, right, key, default):\n return right.get(key, left.get(key, default))", "def merge_metadata(dst, src):\n for section in src.sections():\n for option in src.options(section):\n src_value = src.get(section, option)\n try:\n dst_value = dst.get(section, option)\n except ConfigParser.NoSectionError:\n dst.add_section(section)\n dst.set(section, option, src_value)\n except ConfigParser.NoOptionError:\n dst.set(section, option, src_value)\n else:\n if src_value != dst_value:\n log.warn(\"section={}, option={}, src value={}, dst value={}\"\n .format(section, option, src_value, dst_value))\n dst.set(section, option, src_value) \n return dst", "def update_node_helper(source, target):\n for node in target:\n if node not in source:\n continue\n target.node[node].update(source.node[node])", "def _ensure_source(self, conf, source):\n if source not in conf[K_SOURCES]:\n conf[K_SOURCES][source] = deepcopy(self._sources_schema)", "def test_default_route_set_src(duthosts, tbinfo):\n duthost = find_duthost_on_role(\n duthosts, get_upstream_neigh_type(tbinfo['topo']['type']), tbinfo)\n asichost = duthost.asic_instance(0 if duthost.is_multi_asic else None)\n\n config_facts = asichost.config_facts(\n host=duthost.hostname, source=\"running\")['ansible_facts']\n\n lo_ipv4 = None\n lo_ipv6 = None\n los = config_facts.get(\"LOOPBACK_INTERFACE\", {})\n logger.info(\"Loopback IPs: {}\".format(los))\n for k, v in list(los.items()):\n if k == \"Loopback0\":\n for ipstr in list(v.keys()):\n ip = ipaddress.ip_interface(ipstr)\n if ip.version == 4:\n lo_ipv4 = ip\n elif ip.version == 6:\n lo_ipv6 = ip\n\n pytest_assert(lo_ipv4, \"cannot find ipv4 Loopback0 address\")\n pytest_assert(lo_ipv6, \"cannot find ipv6 Loopback0 address\")\n\n rtinfo = asichost.get_ip_route_info(ipaddress.ip_network(\"0.0.0.0/0\"))\n pytest_assert(rtinfo['set_src'],\n \"default route do not have set src. {}\".format(rtinfo))\n pytest_assert(rtinfo['set_src'] == lo_ipv4.ip,\n \"default route set src to wrong IP {} != {}\".format(rtinfo['set_src'], lo_ipv4.ip))\n\n rtinfo = asichost.get_ip_route_info(ipaddress.ip_network(\"::/0\"))\n pytest_assert(\n rtinfo['set_src'], \"default v6 route do not have set src. {}\".format(rtinfo))\n pytest_assert(rtinfo['set_src'] == lo_ipv6.ip,\n \"default v6 route set src to wrong IP {} != {}\".format(rtinfo['set_src'], lo_ipv6.ip))", "def copy_keys_if_present(source, destination, keys):\n for key in keys:\n try:\n value = source[key]\n except KeyError:\n pass\n else:\n destination[key] = value", "def _iso_inv_map(d):\n _d = {}\n for src, d2 in d.items():\n for tgt, data in d2.items():\n if tgt is not None and src != tgt:\n if tgt not in _d:\n _d[tgt] = {}\n _d[tgt][src] = '--' + data\n for k, d2 in _d.items():\n d[k].update(d2)", "def populate_row(src_table, dest_table, row, key = None):\n if key:\n record = dest_table(key_name = str(key))\n else:\n record = dest_table()\n\n for field in src_table.properties():\n setattr(record, field, getattr(row, field))\n\n record.put()", "def add_defaults(self, source):\n for key, value in source.items():\n if key not in self:\n self.set_default(key, value)", "def env_getitem_transform(orig_graph, key, default):\n rel = getattr(key, \"node\", key)\n graph = transformable_clone(orig_graph, relation=f\"[{rel}]\")\n out = graph.output\n while out.is_apply(P.env_setitem):\n _, out, key2, value = out.inputs\n if key == key2.value:\n graph.output = value\n break\n else:\n with untested():\n graph.output = graph.apply(P.env_getitem, out, key, default)\n graph.return_.abstract = key.abstract\n return graph" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Open $EDITOR to edit note_msg
def _edit_note(username, note_msg): # write out a temporary file containing the subject and body # of note_msg filename = "" with tempfile.NamedTemporaryFile(delete=False) as f: temp_note = email.message.Message() temp_note['Subject'] = note_msg['Subject'] temp_note.set_payload(note_msg.get_payload()) f.write(temp_note.as_string()) filename = f.name if len(filename) == 0: exit("Couldn't create temporary file") # launch $EDITOR against temporary file edit_cmd = "vi" if 'EDITOR' in os.environ: edit_cmd = os.environ['EDITOR'] os.system("%s %s" % (edit_cmd, filename)) # read the edited message and remove the temporary file edited_message = "" with open(filename) as f: edited_message = f.read() os.remove(filename) if len(edited_message.strip()) == 0: exit("The edited note was empty - nothing to do!") # return a new message consisting of the edited message merged # with headers from the input message now = now_in_rfc_format() msg = email.message_from_string(edited_message) if 'Subject' not in msg or not msg['Subject']: msg['Subject'] = "Note" msg['From'] = username msg['To'] = username msg['Content-Type'] = "text/html; charset=utf-8" msg['Date'] = now _set_header(note_msg, msg, 'X-Uniform-Type-Identifier', 'com.apple.mail-note') _set_header(note_msg, msg, 'X-Mail-Created-Date', now) _set_header(note_msg, msg, 'X-Universally-Unique-Identifier', str(uuid.uuid4()).upper()) return msg
[ "def open(ctx, note):\n directory = ctx.obj[\"config\"][\"owner\"][\"dir\"]\n note = Note(directory, note)\n click.edit(filename=note.path)", "def _open_note(notes_dir, notes_file, editor):\n _makedir(notes_dir)\n\n editor_cmd = editor + [notes_file]\n try:\n subprocess.run(editor_cmd)\n except subprocess.CalledProcessError as error:\n raise BranchNotesError(f\"Failed to run editor: {error}\")", "def edit(self):\n self.msg_store = get_text_from_editor(postpend=self.msg_store)", "def edit(self, viewmode):\n\n if self.actions['split'].isChecked():\n self.actions['split'].setChecked(False)\n self.notesView.setVisible(not viewmode)\n self.notesEdit.setVisible(viewmode)\n\n # Gives the keyboard input focus to notesEdit/notesView.\n # Without this, keyboard input may change note text even when\n # notesEdit is invisible.\n if viewmode:\n self.notesEdit.setFocus()\n else:\n self.notesView.setFocus()\n\n self.saveCurrentNote()\n self.actions['insertImage'].setEnabled(viewmode)\n #self.actionLeftAndRight.setEnabled(True)\n #self.actionUpAndDown.setEnabled(True)\n\n # Render the note text as it is.\n self.notesView.updateView()", "def editor_download_editing(self):\n download_for_note(ask_user=True, note=self.note, editor=self)\n # Fix for issue #10.\n self.stealFocus = True\n self.loadNote()\n self.stealFocus = False", "def edit_note(session, title, text, note_id, user):\n note = get_note(session, note_id, user)\n\n check_permission(session, PermissionType.EDIT, user, note)\n\n note.title = title\n note.text = text", "def edit(citekey):\n config = Configuration.load_config()\n\n try:\n bbt = BetterBibtex(config)\n except BetterBibtexNotRunning as e:\n click.echo(e)\n sys.exit()\n\n if citekey:\n match = citekey_regex.match(citekey)\n if match is None:\n click.echo(\"The citekey provided is not valid\")\n sys.exit()\n else:\n citekey = bbt.citation_picker()\n if citekey is None:\n sys.exit()\n\n # Write output file\n notes_dir = Path(config[\"notes\"])\n outfile = notes_dir / f\"{citekey}.md\"\n\n if outfile.exists():\n os.system(f\"{config['editor']} {str(outfile)}\")\n else:\n choice = click.confirm(\"File does not exist yet. Create now?\")\n if choice:\n create_note(citekey, config)\n else:\n sys.exit()", "def open_with_editor(filepath):\n editor_cmd = editor().split()\n try:\n subprocess.call(editor_cmd + [filepath])\n except OSError:\n die('Could not launch ' + editor())", "def edit_note_selector_view(note_list, google_notes):\n global continue_printing_row\n\n title_list = []\n for i in range(len(note_list)):\n title_list.append(re.sub(\"[│]\", '', str(note_list[i][1])).rstrip(' ').lstrip(' '))\n\n title_list.append(consts.GO_BACK)\n\n list_prompt = [\n {\n 'type': 'list',\n 'name': 'options',\n 'message': consts.PICK_NOTE,\n 'choices': title_list\n }]\n\n list_selection = prompt(list_prompt)\n\n if list_selection.get('options') == consts.GO_BACK:\n note_view()\n\n notes = []\n for i in range(len(note_list)):\n for index in range(len(note_list[i])):\n title = list(filter(lambda x: list_selection.get('options') in note_list[i][index], note_list[i][index]))\n title_string = ''\n for y in range(len(title)):\n title_string += title[y]\n\n if title_string != '':\n notes.append(title_string)\n\n note_to_edit = []\n\n for index in range(len(note_list)):\n for i in range(len(note_list[index])):\n try:\n if len(note_list) == 1:\n note_to_edit.append(note_list[0])\n index_of_note = 0\n else:\n test_note_existence = note_list[i][index].index(notes[0])\n note_to_edit.append(note_list[i])\n index_of_note = i\n except:\n pass\n\n delete_white_space = False\n\n note_to_edit_accumulator = []\n\n for note in range(len(note_to_edit)):\n for item in range(len(note_to_edit[note])):\n if not delete_white_space:\n note_to_edit_accumulator.append(note_to_edit[note][item])\n if '┘' in note_to_edit[note][item]:\n delete_white_space = True\n else:\n pass\n\n note_to_edit = [note_to_edit_accumulator]\n note_edit_view(note_to_edit, google_notes, note_list, index_of_note)", "def handle(self, *args, **options):\n logger.info(\"launch joplin to edit the note %s\" % options['note_id'])\n\n joplin = JoplinCmdApi()\n out, err, exitcode = joplin.editnote(options['note_id'],\n options['parent_id'],\n options['title'],\n options['body'],\n options['is_todo'])\n return out.decode()", "def on_edit_todo(self, new_todo_line):\n self.todo_file.edit_todo(self.todo_file.todo_position, new_todo_line)", "def note(self, msg, raw=False):\n self._msg(('' if raw else 'NOTE: ') + str(msg), self.NOTE, raw)", "def newNote(self):\n self.note_ref = str(\"note_%d\" % StickyNotes.note_id)\n StickyNotes().show()\n StickyNotes.note_id += 1", "def get_notes(self):\n\t\tself.notes = input(\"Notes (Press enter if None): \")", "def edit_data(self):\n note_list = self.read_data()\n if note_list is not None:\n try:\n num = int(input(\"Enter ID of Note to Edit: \"))\n # get id from serial number\n for note in note_list:\n if note[\"S/N\"] == num:\n id = note[\"_id\"]\n\n # get note using id\n response = Note.find_one(id)\n note = Note(**response)\n\n # get key match that will be used for edit\n current_title = note.title\n current_content = note.content\n\n # prompt user to edit title or content\n ans = input(\"Edit\\n1. Title\\n2. Content\\n\")\n ans = ans.strip()\n\n # edit title or content based on user choice\n if ans == \"1\":\n title = input(\"Enter new title: \")\n note.title = title\n note.update_mongo(match={\"title\": current_title})\n print(\"Title successfully edited\")\n elif ans == \"2\":\n content = input(\"Enter new content: \")\n note.content = content\n note.update_mongo(match={\"content\": current_content})\n print(\"Content successfully edited\")\n else:\n print(\"Invalid option selected\")\n except:\n print(\"Error occurred during edit, check Id and try again\")", "def update_note(self,note):\n q=\"update note set msg='%s' where id=%d\"%(note.get_msg(),note.get_idt())\n try:\n NoteDB.cursor.execute(q)\n NoteDB.db.commit()\n except Exception as e:\n \n NoteDB.db.rollback()\n raise", "def open(self) -> None:\n\t\t#TODO check OS type\n self.topWindow.setStatus('Calling external editor...')\n subprocess.run('npp/notepad++.exe '+self.settingsFile)\n self.topWindow.setStatus('Returned from external editor.')", "def note_view():\n google_notes = keep.all()\n\n os.system('clear')\n print(consts.YELLOW_BOLD)\n print(fig.renderText('Keep...'))\n\n if len(google_notes) == 0:\n print(consts.RED, end='')\n print('You don\\'t have any notes!'.center(consts.WIDTH))\n # choices = [\n # '✎ Make a New Note ✎',\n # '✎ Make a New List ✎',\n # '⛔ Exit ⛔'\n # ]\n choices = [\n consts.MAKE_NOTE,\n consts.MAKE_LIST,\n consts.EXIT\n ]\n note_list = []\n else:\n global continue_printing_row\n\n note_list = NoteGrid.listify_google_notes(google_notes)\n note_list = NoteGrid.wrap_text(note_list)\n note_list = NoteGrid.add_list_border(note_list)\n NoteGrid.print_grid(note_list, continue_printing_row)\n print('\\n')\n continue_printing_row = True\n # choices = [\n # '✎ Make a New Note ✎',\n # '✎ Make a New List ✎',\n # 'Edit a Note',\n # '⛔ Exit ⛔']\n choices = [\n consts.MAKE_NOTE,\n consts.MAKE_LIST,\n consts.EDIT_NOTE,\n consts.EXIT\n ]\n\n initial_prompt = [\n {\n 'type': 'list',\n 'name': 'options',\n 'message': consts.SELECT_OPTION,\n 'choices': choices\n }]\n initial_selection = prompt(initial_prompt)\n\n if initial_selection.get('options') == consts.MAKE_NOTE:\n make_a_note(note_list)\n elif initial_selection.get('options') == consts.MAKE_LIST:\n make_a_list(note_list)\n elif initial_selection.get('options') == consts.EDIT_NOTE:\n edit_note_selector_view(note_list, google_notes)\n elif initial_selection.get('options') == consts.EXIT:\n raise SystemExit", "def emacs_snippet(m) -> str:" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the Manhattan distance + any turn moves needed to put target ahead of current heading
def manhattan_distance_with_heading(current, target): md = abs(current[0] - target[0]) + abs(current[1] - target[1]) if current[2] == 0: # heading north # Since the agent is facing north, "side" here means # whether the target is in a row above or below (or # the same) as the agent. # (Same idea is used if agent is heading south) side = (current[1] - target[1]) if side > 0: md += 2 # target is behind: need to turns to turn around elif side <= 0 and current[0] != target[0]: md += 1 # target is ahead but not directly: just need to turn once # note: if target straight ahead (curr.x == tar.x), no turning required elif current[2] == 1: # heading west # Now the agent is heading west, so "side" means # whether the target is in a column to the left or right # (or the same) as the agent. # (Same idea is used if agent is heading east) side = (current[0] - target[0]) if side < 0: md += 2 # target is behind elif side >= 0 and current[1] != target[1]: md += 1 # target is ahead but not directly elif current[2] == 2: # heading south side = (current[1] - target[1]) if side < 0: md += 2 # target is behind elif side >= 0 and current[0] != target[0]: md += 1 # target is ahead but not directly elif current[2] == 3: # heading east side = (current[0] - target[0]) if side > 0: md += 2 # target is behind elif side <= 0 and current[1] != target[1]: md += 1 # target is ahead but not directly return md
[ "def manhattan_heuristic(pos, problem):\n # print(\"mahattan\")\n return abs(pos[0] - problem.goal_pos[0]) + abs(pos[1] - problem.goal_pos[1])", "def manhattan_distance_to(self, x: int, y: int) -> int:\n return abs(self.location[0] - x) + abs(self.location[1] - y)", "def manhattan_distance_heuristic(nodes, possible_moves):\n\n raise NotImplementedError", "def manhattan_distance(row0, col0, row1, col1):\n return (abs(row0 - row1)) + (abs(col0 - col1))", "def get_manhattan_distance(self, otherPoint):\n return abs(self.x - otherPoint.x) + abs(self.y - otherPoint.y)", "def min_manhattan_distance(self):\n return self.span_remaining", "def _manhattan_distance_to_closest_ghost(self, state, row, col):\n\n \treturn self.distances[row][col]", "def manhattanDistance(loc1, loc2):\n # BEGIN_YOUR_CODE\n return abs(loc1[0]-loc2[0])+abs(loc1[1]-loc2[1])\n # use 'abs'\n # END_YOUR_CODE", "def distManhattan(p1, p2):\n if len(p1) == 3:\n (x1, y1, t1) = p1\n else:\n (x1, y1) = p1\n if len(p2) == 3:\n (x2, y2, t2) = p2\n else:\n (x2, y2) = p2\n return abs(x1 - x2) + abs(y1 - y2)", "def manhattan_distance_heuristic(state, puzzle_size):\n heuristic = 0\n for i, row in enumerate(state):\n for j, tile in enumerate(row):\n # Find the i, j values of where this tile should actually be.\n # Don't do this for the gap tile\n if tile == GAP:\n continue\n goal_i, goal_j = int((tile-1)//puzzle_size), (tile-1)%puzzle_size # -1 to account for 0-indexing\n # Add difference to heuristic\n heuristic += abs(goal_i - i) + abs(goal_j -j)\n\n return heuristic", "def manhattan(k):\n x, y = coord(k)\n return abs(x) + abs(y)", "def distManhattan(p1, p2):\n if len(p1) == 3:\n (x1, y1, t1) = p1\n else:\n (x1, y1) = p1\n if len(p2) == 3:\n (x2, y2, t2) = p2\n else:\n (x2, y2) = p2\n return abs(x1 - x2) + abs(y1 - y2)\n # (x1, y1) = p1\n # (x2, y2) = p2\n # return abs(x1 - x2) + abs(y1 - y2)", "def manhattan_distance(start, end):\n return sum(abs(e - s) for s, e in zip(start, end))", "def manhattan_distance(x, y):\n\n return sum(abs(a - b) for a, b in zip(x, y))", "def manhattan_heuristic(board, problem=None):\r\n return weight_heuristic(board, calculate_manhattan_distance_helper)", "def manhattan(self,n, m):\r\n # Define the coordinates in 3X3 board of each position in a tuple of size 9. \r\n coordinates = {0:(0,0), 1:(0,1), 2:(0,2),\r\n 3:(1,0), 4:(1,1), 5:(1,2),\r\n 6:(2,0), 7:(2,1), 8:(2,2)}\r\n \r\n x1,y1 = coordinates[n]\r\n x2,y2 = coordinates[m]\r\n return abs(x1-x2) + abs(y1-y2)", "def distance(board: np.array) -> int:\n total_distance = 0\n boxes_pos = np.argwhere(board == TYPE_LOOKUP['box not on target'])\n targets_pos = np.argwhere(board == TYPE_LOOKUP['box target']).tolist()\n\n for box in boxes_pos:\n distance_from_each_target = []\n for target in targets_pos:\n # Compute Manhattan distance with every empty target\n distance_from_each_target.append(np.sum(abs(box - target)))\n targets_pos.remove(targets_pos[np.argmin(distance_from_each_target)])\n total_distance += np.min(distance_from_each_target)\n\n return total_distance", "def manhattanDistance(loc1, loc2):\n # BEGIN_YOUR_ANSWER (our solution is 1 lines of code, but don't worry if you deviate from this)\n return sum(abs(comp1 - comp2) for comp1, comp2 in zip(loc1, loc2))\n # END_YOUR_ANSWER", "def manhattan_distance(self, X1, X2):\n return np.array([np.sum(np.absolute(X2 - instance), axis=1) for instance in X1])", "def min_manhattan_braced_distance(self):\n bn_ndx = list(self.get_braced_nodes(check_damaged=True))\n span_remaining = min([self._nodes[n_i].target_dist for n_i in bn_ndx])\n\n return span_remaining" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The 'expected initial states and solution pairs' below are provided as a sanity check, showing what the PlanRouteProblem soluton is expected to produce. Provide the 'initial state' tuple as the argument to test_PRP, and the associate solution list of actions is expected as the result. The test assumes the goals are [(2,3),(3,2)], that the heuristic fn defined in PlanRouteProblem uses the manhattan_distance_with_heading()
def test_PRP(initial): return plan_route((initial[0],initial[1]), initial[2], # Goals: [(2,3),(3,2)], # Allowed locations: [(0,0),(0,1),(0,2),(0,3), (1,0),(1,1),(1,2),(1,3), (2,0), (2,3), (3,0),(3,1),(3,2),(3,3)])
[ "def more_pour_problem(capacities, goal, start=None):\n # your code here\n def pp_is_goal(state):\n return goal in state\n\n def psuccessors(state):\n items = []\n for i in range(0, len(state)):\n if state[i] < capacities[i]:\n tpl = update_tuple(state, i, capacities[i])\n items.append((tpl, ('fill', i)))\n if state[i] > 0:\n tpl = update_tuple(state, i, 0)\n items.append((tpl, ('empty', i)))\n for j in range(0, len(state)):\n if i == j:\n continue\n if state[i] > 0:\n available_j = capacities[j] - state[j]\n if available_j <= 0:\n continue\n val_j = state[j] + state[i] if available_j >= state[i] else state[j] + available_j\n val_i = 0 if available_j >= state[i] else state[i] - available_j\n tpl = update_tuple(state, j, val_j)\n tpl = update_tuple(tpl, i, val_i)\n items.append((tpl, ('pour', i, j)))\n return dict(items)\n\n start = tuple([0 for x in range(0, len(capacities))]) if start == None else start\n return shortest_path_search(start, psuccessors, pp_is_goal) # <== your arguments here", "def manhattan_distance_heuristic(state, puzzle_size):\n heuristic = 0\n for i, row in enumerate(state):\n for j, tile in enumerate(row):\n # Find the i, j values of where this tile should actually be.\n # Don't do this for the gap tile\n if tile == GAP:\n continue\n goal_i, goal_j = int((tile-1)//puzzle_size), (tile-1)%puzzle_size # -1 to account for 0-indexing\n # Add difference to heuristic\n heuristic += abs(goal_i - i) + abs(goal_j -j)\n\n return heuristic", "def foodGhostLogicPlan(problem):\n \"*** YOUR CODE HERE ***\"\n DIRS = [game.Directions.NORTH, game.Directions.SOUTH, game.Directions.EAST, game.Directions.WEST]\n startPos, startGrid = problem.getStartState()\n minTime, maxTime = 0, 51 # change these numbers for debugging\n for x in range(1, problem.getWidth()+1):\n for y in range(1, problem.getHeight()+1):\n if startGrid[x][y]: minTime += 1\n\n for T in range(minTime, maxTime):\n #print(\"Trying to find a solution of length %d\" % T)\n\n clauses = [] # one action per time step\n clauses2 = [] # whether food is at (x,y) at start\n clauses3 = [] # whether food is at (x,y) at goal (should all be false)\n clauses4 = [] # action at specific time and place implies the same action at that time overall\n clauses5 = [] # action at specific time and place implies position at that time and place\n clauses6 = [] # action at specific time and place implies next position\n clauses7 = [] # only one forward action per position per time step\n clauses8 = [] # only one possible action to get to this position per time step\n clauses9 = [] # exactly one goal position\n clauses10 = [] # position at specific time and place implies no food at that time and place\n clauses12 = [] # food at specific time and place implies food prior in same place\n clauses13 = [] # food at specific time and place and no Pacman at that place in the next step implies food still there\n clauses14 = [] # JESUS whether ghost/s are at (x,y) at start\n clauses15 = [] # JESUS turnAround ghosts\n clauses16 = [] # JESUS nextGhostW >> ~currAct\n clauses17 = [] # JESUS nextGhostE >> ~currAct\n clauses18 = [] # JESUS ghost go east\n clauses19 = [] # JESUS ghost go weast\n clauses20 = [] # JESUS ghost \n clauses21 = [] # JESUS ghost \n\n clauses9.append(logic.PropSymbolExpr(\"Pos\", startPos[0], startPos[1], 0))\n\n #JESUS\n for ghost in problem.getGhostStartStates():\n clauses14.append(logic.PropSymbolExpr(\"Ge\", ghost.getPosition()[0], ghost.getPosition()[1], 0))\n\n for t in range(T+1):\n if t < T:\n clauses.extend(exactlyOneAsList([logic.PropSymbolExpr(direc, t) for direc in DIRS]))\n\n # Pacman must be in one of the initial food pellet positions in the end\n goalPositionAxioms = []\n goalPositions = []\n\n for x in range(1, problem.getWidth()+1):\n for y in range(1, problem.getHeight()+1):\n state = (x,y)\n\n #JESUS; All the Way East\n if (problem.isWall((x+1, y))):\n borderGhostE = logic.PropSymbolExpr(\"Ge\", x, y, t)\n turnAroundE = logic.PropSymbolExpr(\"Gw\", x - 1, y, t + 1)\n clauses15.append(logic.to_cnf(borderGhostE >> turnAroundE)) # Ge[3, 2, 3] >> Gw[2, 2, 4] if at east wall\n else:\n eastG = logic.PropSymbolExpr(\"Ge\", x, y, t)\n easterG = logic.PropSymbolExpr(\"Ge\", x + 1, y, t + 1)\n clauses18.append(logic.to_cnf(eastG >> easterG))\n\n #JESUS; All the Way West\n if (problem.isWall((x-1, y))):\n borderGhostW = logic.PropSymbolExpr(\"Gw\", x, y, t)\n turnAroundW = logic.PropSymbolExpr(\"Ge\", x + 1, y, t + 1)\n clauses15.append(logic.to_cnf(borderGhostW >> turnAroundW)) # Gw[1, 2, 3] >> Gw[2, 2, 4] if at west wall\n else:\n westG = logic.PropSymbolExpr(\"Gw\", x, y, t)\n westerG = logic.PropSymbolExpr(\"Gw\", x - 1, y, t + 1)\n clauses19.append(logic.to_cnf(westG >> westerG))\n\n # only add these axioms once\n if t == 0:\n if startGrid[x][y]:\n clauses2.append(logic.PropSymbolExpr(\"Food\", x, y, 0)) # food here initially\n goalPositionAxioms.append(logic.PropSymbolExpr(\"Pos\", x, y, T)) # Pacman could end here\n goalPositions.append(state)\n else:\n clauses2.append(~logic.PropSymbolExpr(\"Food\", x, y, 0)) # no food here initially\n clauses3.append(~logic.PropSymbolExpr(\"Food\", x, y, T)) # no food at every position in the end\n\n # if state is not a wall, we calculate all forward and backward implications for position AND food\n if not problem.isWall(state):\n currPos = logic.PropSymbolExpr(\"Pos\", x, y, t) # i.e. Pos[3,6,5]\n currPosNextTime = logic.PropSymbolExpr(\"Pos\", x, y, t+1) # i.e. Pos[3,6,6]\n currFood = logic.PropSymbolExpr(\"Food\", x, y, t) # i.e. Food[3,6,5]\n nextFood = logic.PropSymbolExpr(\"Food\", x, y, t+1) # i.e. Food[3,6,6]\n forwardActions = problem.actions((state, startGrid))\n actionsToGetHere = []\n\n if t < T:\n for action in forwardActions:\n (((nx, ny), nfood), cost) = problem.result((state, startGrid), action)\n rawAct = logic.PropSymbolExpr(action, t)\n currAct = logic.PropSymbolExpr(action, x, y, t)\n nextPos = logic.PropSymbolExpr(\"Pos\", nx, ny, t+1)\n\n nextGhostE = logic.PropSymbolExpr(\"Ge\", nx, ny, t) # JESUS Ge[3, 6, 5]\n nextGhostW = logic.PropSymbolExpr(\"Gw\", nx, ny, t) # JESUS Gw[3, 6, 5]\n waitingGhostNextE = logic.PropSymbolExpr(\"Gw\", nx, ny, t + 1)\n waitingGhostNextW = logic.PropSymbolExpr(\"Ge\", nx, ny, t + 1)\n clauses16.append(logic.to_cnf(nextGhostW >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n clauses17.append(logic.to_cnf(nextGhostE >> ~currAct)) # JESUS GE[3, 6 ,5] >> ~rawAct\n\n clauses20.append(logic.to_cnf(waitingGhostNextW >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n clauses21.append(logic.to_cnf(waitingGhostNextE >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n\n axiom1 = currAct >> rawAct # i.e. West[3,6,5] implies West[5]\n axiom5 = currAct >> currPos # i.e. West[3,6,5] implies Pos[3,6,5]\n axiom2 = currAct >> nextPos # i.e. West[3,6,5] implies Pos[2,6,6]\n axiom1, axiom2, axiom5 = logic.to_cnf(axiom1), logic.to_cnf(axiom2), logic.to_cnf(axiom5)\n clauses4.append(axiom1)\n clauses5.append(axiom5)\n clauses6.append(axiom2)\n onlyOneForwardAction = exactlyOne([logic.PropSymbolExpr(direc, x, y, t) for direc in forwardActions])\n axiom3 = currPos >> onlyOneForwardAction # i.e. Pos[3,6,5] implies North[3,6,5] or West[3,6,5] if South and East are walls\n axiom8 = nextFood >> currFood # i.e. Food[3,6,5] implies Food[3,6,4]\n axiom9 = (currFood & ~currPosNextTime) >> nextFood # i.e. Food[3,6,5] and not Pos[3,6,6] implies Food[3,6,6]\n axiom3 = logic.to_cnf(axiom3)\n axiom8 = logic.to_cnf(axiom8)\n axiom9 = logic.to_cnf(axiom9)\n clauses7.append(axiom3)\n clauses12.append(axiom8)\n clauses13.append(axiom9)\n\n axiom6 = currPos >> ~currFood # i.e. Pos[3,6,5] implies not Food[3,6,5]\n axiom6 = logic.to_cnf(axiom6)\n clauses10.append(axiom6)\n\n # we have to consider t in [1,T] for backward implications, instead of t in [0,T-1]\n if t > 0:\n for action in forwardActions:\n (((px, py), pfood), cost) = problem.result((state, startGrid), action)\n if not isReachableFromStart((px, py), startPos, t-1):\n continue\n willAppend = True\n for goalPos in goalPositions:\n if not isReachableFromGoal((px, py), goalPos, t-1, T):\n willAppend = False\n if willAppend:\n actionsToGetHere.append(logic.PropSymbolExpr(game.Directions.REVERSE[action], px, py, t-1))\n onlyOneActionToGetHere = exactlyOne(actionsToGetHere)\n axiom4 = currPos >> onlyOneActionToGetHere # i.e. Pos[3,6,5] implies South[3,7,4] or East[2,6,4] if South and East are walls\n axiom4 = logic.to_cnf(axiom4)\n clauses8.append(axiom4)\n\n # Pacman must be in one of the initial food pellet positions in the end\n if t == 0:\n clauses9.extend(exactlyOneAsList(goalPositionAxioms))\n\n clauses += clauses2 + clauses3 + clauses4 + clauses5 + clauses6 + clauses7 + clauses8 + clauses9 + clauses10 + clauses12 + clauses13 + clauses14 + clauses15 + clauses16 + clauses17 + clauses18 + clauses19 + clauses20 + clauses21\n model = logic.pycoSAT(clauses)\n if model:\n plan = extractActionSequence(model, DIRS)\n return plan\n\n print(\"NO PLAN, THIS SHOULD NOT BE HAPPENING\")\n return [] # should never get here if maze has solution", "def solve_puzzle(initial_state):\n queue = PriorityQueue()\n visited_states = set()\n\n queue.put((0, uuid.uuid4(), StateWithParent(state=initial_state, parent=None)))\n\n while not queue.empty():\n parent_cost, _, current_state_with_parent = queue.get()\n\n current_state = current_state_with_parent.state\n visited_states.add(state_to_tuple(current_state))\n\n actions = get_available_actions(current_state)\n successor_states = map(lambda action: action(current_state), actions)\n\n for state in successor_states:\n if state_to_tuple(state) not in visited_states:\n new_state_with_parent = StateWithParent(state=state,\n parent=current_state_with_parent)\n cost = heuristic_cost(state)\n if cost == 0:\n # If the heuristic cost of the given state equals 0, then\n # the goal state is found and we can return it immediately.\n return new_state_with_parent\n total_cost = cost + parent_cost\n\n queue.put((total_cost, uuid.uuid4(), new_state_with_parent))\n\n return None", "def trajOptRRT(self, state_initial, state_final, goal=False, verbose=False):\n # TODO: reconcile trajOpt and trajOptRRT (shouldn't take long)\n\n # stopwatch for solver time\n tsolve_pre = time.time()\n\n # number of knot points - proportional to x-distance seems to work well\n N = int(max([np.floor(0.8 * np.abs(state_final[0] - state_initial[0])), 6]))\n\n # optimization problem: variables t_f, u[k], x[k]\n mp = MathematicalProgram()\n\n # variable for time to reach goal\n t_f = mp.NewContinuousVariables(1, \"t_f\")\n dt = t_f[0] / N\n\n k = 0\n u = mp.NewContinuousVariables(2, \"u_%d\" % k)\n input_trajectory = u\n\n x = mp.NewContinuousVariables(6, \"x_%d\" % k)\n state_trajectory = x\n\n for k in range(1, N):\n u = mp.NewContinuousVariables(2, \"u_%d\" % k)\n x = mp.NewContinuousVariables(6, \"x_%d\" % k)\n input_trajectory = np.vstack((input_trajectory, u))\n state_trajectory = np.vstack((state_trajectory, x))\n\n x = mp.NewContinuousVariables(6, \"x_%d\" % N)\n state_trajectory = np.vstack((state_trajectory, x))\n\n if verbose:\n print \"Number of decision vars\", mp.num_vars()\n\n # cost function: penalize electric energy use and overall control effort\n thrust = input_trajectory[:, 0]\n elev = input_trajectory[:, 1]\n vel = state_trajectory[:, 2]\n allvars = np.hstack((t_f[0], thrust, elev, vel))\n def totalcost(X):\n dt = X[0] / N\n u0 = X[1:N + 1]\n u1 = X[N + 1:2 * N + 1]\n v = X[2 * N + 1:3 * N + 1]\n return dt * (1.0 * u0.dot(u0) + 1.0 * u1.dot(u1) + 10.0 * X[0] * (u0.dot(v)))\n\n mp.AddCost(totalcost, allvars)\n\n # initial state constraint\n for i in range(len(state_initial)):\n mp.AddLinearConstraint(state_trajectory[0, i] == state_initial[i])\n\n # final state constraints\n if goal:\n # final state constraint (x position)\n mp.AddLinearConstraint(state_trajectory[-1, 0] == 0.0)\n\n # final state constraint (z position) NOTE: range is acceptable\n mp.AddLinearConstraint(state_trajectory[-1, 1] <= 1.5)\n mp.AddLinearConstraint(state_trajectory[-1, 1] >= 0.5)\n\n # final state constraint (velocity) NOTE: range is acceptable\n mp.AddLinearConstraint(state_trajectory[-1, 2] <= 9.0)\n mp.AddLinearConstraint(state_trajectory[-1, 2] >= 6.0)\n\n # final state constraint (flight path angle) NOTE: small range here\n mp.AddLinearConstraint(state_trajectory[-1, 3] <= 1.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[-1, 3] >= - 1.0 * np.pi / 180.0)\n\n # final state constraint (pitch rate)\n mp.AddLinearConstraint(state_trajectory[-1, 5] == 0.0)\n else:\n for i in range(len(state_initial)):\n mp.AddLinearConstraint(state_trajectory[-1, i] == state_final[i])\n\n # input constraints\n for i in range(len(input_trajectory[:, 0])):\n mp.AddLinearConstraint(input_trajectory[i, 0] >= 0.0)\n mp.AddLinearConstraint(input_trajectory[i, 0] <= 1.2 * self.m * self.g)\n mp.AddLinearConstraint(input_trajectory[i, 1] >= -30.0)\n mp.AddLinearConstraint(input_trajectory[i, 1] <= 30.0)\n\n # state constraints\n for i in range(len(state_trajectory[:, 0])):\n # x position\n mp.AddLinearConstraint(state_trajectory[i, 0] >= state_initial[0])\n mp.AddLinearConstraint(state_trajectory[i, 0] <= state_final[0])\n # z position\n mp.AddLinearConstraint(state_trajectory[i, 1] >= 0.3)\n mp.AddLinearConstraint(state_trajectory[i, 1] <= 2.0)\n # velocity\n mp.AddLinearConstraint(state_trajectory[i, 2] >= 2.0)\n mp.AddLinearConstraint(state_trajectory[i, 2] <= 18.0)\n # flight path angle\n mp.AddLinearConstraint(state_trajectory[i, 3] >= -30.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 3] <= 30.0 * np.pi / 180.0)\n # pitch angle\n mp.AddLinearConstraint(state_trajectory[i, 4] >= -20.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 4] <= 40.0 * np.pi / 180.0)\n # pitch rate\n mp.AddLinearConstraint(state_trajectory[i, 5] >= -20.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 5] <= 20.0 * np.pi / 180.0)\n\n # dynamic constraints (direct transcription)\n for j in range(1, N + 1):\n dynamic_prop = dt * self.airplaneLongDynamics(state_trajectory[j - 1, :], input_trajectory[j - 1, :])\n for k in range(len(state_initial)):\n mp.AddConstraint(state_trajectory[j, k] == state_trajectory[j - 1, k] + dynamic_prop[k])\n\n # initial guess for time\n t_guess = np.abs(state_final[0] - state_initial[0]) / (0.5 * (state_final[2] + state_initial[2]))\n mp.SetInitialGuess(t_f[0], t_guess)\n\n # initial guesses for state\n if goal:\n state_final_dummy = np.array(state_final)\n state_final_dummy[1] = state_initial[1]\n state_final_dummy[4] = state_initial[4]\n for i in range(len(state_trajectory[:, 0])):\n state_guess = ((N - i) / N) * state_initial + (i / N) * state_final_dummy\n for j in range(len(state_guess)):\n mp.SetInitialGuess(state_trajectory[i, j], state_guess[j])\n else:\n for i in range(len(state_trajectory[:, 0])):\n state_guess = ((N - i) / N) * state_initial + (i / N) * state_final\n for j in range(len(state_guess)):\n mp.SetInitialGuess(state_trajectory[i, j], state_guess[j])\n\n # initial guesses for input\n for i in range(N):\n mp.SetInitialGuess(input_trajectory[i, 0], self.m * self.g / 3.5)\n mp.SetInitialGuess(input_trajectory[i, 1], 0.01)\n\n # time constraints\n mp.AddLinearConstraint(t_f[0] <= 2.0 * t_guess)\n mp.AddLinearConstraint(t_f[0] >= 0.5 * t_guess)\n\n # set SNOPT iteration limit\n it_limit = int(max(20000, 40 * mp.num_vars()))\n mp.SetSolverOption(SolverType.kSnopt, 'Iterations limit', it_limit)\n\n if verbose:\n print(\"** solver begin with N = %d **\" % N)\n # solve nonlinear optimization problem (w/SNOPT)\n result = mp.Solve()\n if verbose:\n print result\n\n # convert from symbolic to float\n utraj = mp.GetSolution(input_trajectory)\n t_f = mp.GetSolution(t_f)\n xtraj = mp.GetSolution(state_trajectory)\n ttraj = t_f[0] * np.linspace(0.0, 1.0, (N + 1))\n\n tsolve_post = time.time()\n tsolve = tsolve_post - tsolve_pre\n\n solver_id = mp.GetSolverId()\n\n if verbose:\n print (\"** %s solver finished in %.1f seconds **\\n\" % (solver_id.name(), tsolve))\n print (\"t_f computed: %.3f seconds\" % t_f[0])\n\n cost = -1\n # get total cost of solution\n if result == SolutionResult.kSolutionFound:\n thrust = utraj[:, 0]\n elev = utraj[:, 1]\n vel = xtraj[:, 2]\n allvars = np.hstack((t_f[0], thrust, elev, vel))\n cost = totalcost(allvars)\n if verbose:\n print (\"cost computed: %.3f\" % cost)\n\n return utraj, xtraj, ttraj, result, cost", "def heuristic(self,state):\n return self.heur(state.assignment, self.goal)", "def test_set_goals_ht_input(self):\r\n\r\n self.DUT.mission_time = 10.0\r\n self.DUT.hazard_rate_goal = 0.00015\r\n self.DUT.goal_measure = 2\r\n\r\n self.assertFalse(self.DUT.calculate_goals())\r\n self.assertAlmostEqual(self.DUT.reliability_goal, 0.99850112)\r\n self.assertAlmostEqual(self.DUT.mtbf_goal, 6666.66666666)", "def defineFirstGoal(self, gameState):\n\n #The tuples below represent points along the bottom and top walls\n if gameState.isOnRedTeam(self.index):\n return (21, 1)\n else:\n return (10, 14)", "def test():\n assert csuccessors((2, 2, 1, 0, 0, 0)) == {(2, 1, 0, 0, 1, 1): 'C->',\n (1, 2, 0, 1, 0, 1): 'M->',\n (0, 2, 0, 2, 0, 1): 'MM->',\n (1, 1, 0, 1, 1, 1): 'MC->',\n (2, 0, 0, 0, 2, 1): 'CC->'}\n assert csuccessors((1, 1, 0, 4, 3, 1)) == {(1, 2, 1, 4, 2, 0): '<-C',\n (2, 1, 1, 3, 3, 0): '<-M',\n (3, 1, 1, 2, 3, 0): '<-MM',\n (1, 3, 1, 4, 1, 0): '<-CC',\n (2, 2, 1, 3, 2, 0): '<-MC'}\n assert csuccessors((1, 0, 0, 4, 1, 1)) == {(1, 1, 1, 4, 0, 0): '<-C',\n (2, 0, 1, 3, 1, 0): '<-M',\n (2, 1, 1, 3, 0, 0): '<-MC',\n (3, 0, 1, 2, 1, 0): '<-MM'}\n assert csuccessors((1, 4, 1, 2, 2, 0)) == {}\n assert mc_problem((1, 0, 1, 0, 0, 0)) == [(1, 0, 1, 0, 0, 0), 'M->', (0, 0, 0, 1, 0, 1)]\n assert mc_problem((1, 1, 1, 0, 0, 0)) == [(1, 1, 1, 0, 0, 0), 'MC->', (0, 0, 0, 1, 1, 1)]\n assert mc_problem((2, 1, 1, 0, 0, 0)) == [(2, 1, 1, 0, 0, 0), 'MC->', (1, 0, 0, 1, 1, 1), '<-C', (1, 1, 1, 1, 0, 0), 'MC->', (0, 0, 0, 2, 1, 1)]\n assert mc_problem((1, 2, 1, 0, 0, 0)) == None\n return 'tests pass'", "def minimax(self, state):\n\n self.init_food_list = state.getFood().asList()\n closed = set()\n\n # first step necessary to have different path depending\n # on the type of ghost. We go to the position the most distant of the\n # ghost . The reaction of the ghost will be different\n # depending of it type (smarty and dumby / greedy)\n if state.getGhostDirection(1) == 'Stop':\n successors = state.generatePacmanSuccessors()\n max_dist = -math.inf\n chosen_action = 'Stop'\n for next_state, action in successors:\n dist = manhattanDistance(\n state.getGhostPosition(1), state.getPacmanPosition())\n if max_dist < dist:\n max_dist = dist\n chosen_action = action\n return [chosen_action]\n\n final_score, final_path = self.minimax_rec(state, 0, 0, closed)\n\n return final_path", "def test_set_goals_rt_input(self):\r\n\r\n self.DUT.mission_time = 10.0\r\n self.DUT.reliability_goal = 0.975\r\n self.DUT.goal_measure = 1\r\n\r\n self.assertFalse(self.DUT.calculate_goals())\r\n self.assertAlmostEqual(self.DUT.hazard_rate_goal, 0.00253178)\r\n self.assertAlmostEqual(self.DUT.mtbf_goal, 394.97890205)", "def heuristic(state):\n # First fill the state with 1s, and count the connect fours for player 1.\n # Then fill the state with -1s and count the connect fours for player 2.\n state_1 = np.array(state)\n state_1 = np.where(state_1 == 0, 1, state_1)\n line_sums = ConnectFour._calculate_line_sums(state_1)\n num_wins_1 = np.sum(line_sums == 4)\n\n state_2 = np.array(state)\n state_2 = np.where(state_2 == 0, -1, state_2)\n line_sums = ConnectFour._calculate_line_sums(state_2)\n num_wins_2 = np.sum(line_sums == -4)\n\n return num_wins_1 - num_wins_2", "def solvable(self, domain, initial_state, goal_state):\n last_state = set([])\n reachable_literals = set(initial_state)\n positive_goals = set(goal_state[0])\n actions = domain\n\n positive_effects = set([])\n negative_effects = set([])\n for a in actions:\n positive_effects = positive_effects.union(set(a.add_effects))\n negative_effects = negative_effects.union(set(a.del_effects))\n # First check the obvious stuff\n for p in goal_state[0]:\n if p not in reachable_literals and p not in positive_effects:\n return False\n for p in goal_state[1]:\n if p in reachable_literals and p not in negative_effects:\n return False\n\n while last_state != reachable_literals:\n last_state = reachable_literals.copy()\n if positive_goals.issubset(reachable_literals):\n return True\n for a in actions:\n if a.applicable(reachable_literals):\n reachable_literals = reachable_literals.union(a.add_effects)\n\n return False", "def biDirectionalAStarSearch(problem, heuristic):\n # Initialiate necessary data structures for following algorithm\n pq1, pq2 = [PriorityQueue() for _ in range(2)]\n visited_states1, visited_states2 = [dict() for _ in range(2)]\n\n pq1.push((problem.getStartState(), [], 0), heuristic(problem.getStartState(), problem, ))\n pq2.push((problem.goal, [], 0), heuristic(problem.goal, problem, ))\n # Key is the state and the item stores all the actions to get the corresponding state\n visited_states1[problem.getStartState()] = []\n visited_states2[problem.goal] = []\n\n while not pq1.isEmpty() and not pq2.isEmpty():\n # Start process from the start state\n current_state, path, current_g = pq1.pop()\n\n # When there is a matching state, we then return the path\n if problem.isGoalState(current_state, visited_states2):\n return path + reverseActions(visited_states2[current_state])\n\n for successor_state, action, cost in problem.getSuccessors(current_state):\n if successor_state in visited_states1:\n continue\n next_path = path + [action]\n next_g = current_g + cost\n next_f = next_g + heuristic(successor_state, problem, 'goal')\n pq1.push((successor_state, next_path, next_g), next_f)\n visited_states1[successor_state] = next_path\n\n # Process from the goal state\n current_state, path, current_g = pq2.pop()\n if problem.isGoalState(current_state, visited_states1):\n return visited_states1[current_state] + reverseActions(path)\n\n for successor_state, action, cost in problem.getSuccessors(current_state):\n if successor_state in visited_states2:\n continue\n next_path = path + [action]\n next_g = current_g + cost\n next_f = next_g + heuristic(successor_state, problem, 'start')\n pq2.push((successor_state, next_path, next_g), next_f)\n visited_states2[successor_state] = next_path\n\n return []", "def parse_planning_problem(self) -> None:\n assert isinstance(self.planningProblem, PlanningProblem), \"Given planning problem is not valid!\"\n\n # get lanelet id of the initial state\n self.list_ids_lanelets_initial = self.scenario.lanelet_network.find_lanelet_by_position(\n [self.planningProblem.initial_state.position])[0]\n\n # get lanelet id of the goal region, which can be of different types\n self.list_ids_lanelets_goal = None\n if hasattr(self.planningProblem.goal.state_list[0], 'position'):\n if hasattr(self.planningProblem.goal.state_list[0].position, 'center'):\n self.list_ids_lanelets_goal = self.scenario.lanelet_network.find_lanelet_by_position(\n [self.planningProblem.goal.state_list[0].position.center])[0]\n\n elif hasattr(self.planningProblem.goal.state_list[0].position, 'shapes'):\n self.list_ids_lanelets_goal = self.scenario.lanelet_network.find_lanelet_by_position(\n [self.planningProblem.goal.state_list[0].position.shapes[0].center])[0]\n self.planningProblem.goal.state_list[0].position.center = \\\n self.planningProblem.goal.state_list[0].position.shapes[0].center\n\n # set attributes with given planning problem\n if hasattr(self.planningProblem.goal.state_list[0], 'time_step'):\n self.time_desired = self.planningProblem.goal.state_list[0].time_step\n else:\n self.time_desired = Interval(0, np.inf)\n\n if hasattr(self.planningProblem.goal.state_list[0], 'position'):\n if hasattr(self.planningProblem.goal.state_list[0].position, 'vertices'):\n self.position_desired = self.calc_goal_interval(self.planningProblem.goal.state_list[0].position.vertices)\n else:\n self.position_desired = None\n else:\n self.position_desired = None\n\n if hasattr(self.planningProblem.goal.state_list[0], 'velocity'):\n self.velocity_desired = self.planningProblem.goal.state_list[0].velocity\n else:\n self.velocity_desired = Interval(0, np.inf)\n\n if hasattr(self.planningProblem.goal.state_list[0], 'orientation'):\n self.orientation_desired = self.planningProblem.goal.state_list[0].orientation\n else:\n self.orientation_desired = Interval(-math.pi, math.pi)\n\n # create necessary attributes\n if hasattr(self.planningProblem.goal.state_list[0], 'position'):\n self.distance_initial = SearchBaseClass.distance(self.planningProblem.initial_state.position,\n self.planningProblem.goal.state_list[0].position.center)\n else:\n self.distance_initial = 0", "def solve_missionaries_cannibals_prob():\n\n def h(state: tuple):\n \"\"\"\n h(x) heuristic which simply counts the number of missionaries and canibals on the\n destination left bank.\n \"\"\"\n return 6 - state[0] - state[1]\n\n def mc_expand(state: tuple):\n \"\"\"\n Generates child nodes by evaluating all possible moves, then filtering by\n rules of the puzzle.\n \"\"\"\n\n # Boat state is at index 2 of tuple; 1 indicates boat is on left bank.\n boat = state[2]\n\n # Missionaries on left bank.\n m_left = state[0]\n\n # Missionaries on right bank.\n m_right = 3 - m_left\n\n # Canibals on left bank.\n c_left = state[1]\n\n # Canibals on right bank.\n c_right = 3 - c_left\n\n # Next look at all posibilities of missionaries and canibals traveling by boat\n # across river with boat that has capacity for only 2 people.\n children = list()\n\n # Case 1: boat is on right bank.\n if boat is 0:\n children = [\n (m_left + 2, c_left, 1), # 2M go to left bank.\n (m_left + 1, c_left + 1, 1), # 1M & 1C go to left bank.\n (m_left, c_left + 2, 1), # 2C go to left bank.\n (m_left + 1, c_left, 1), # 1M goes to left bank.\n (m_left, c_left + 1, 1) # 1C goes to left bank.\n ]\n\n # Case 2: boat is on left bank.\n else:\n children = [\n (m_left - 2, c_left, 0), # 2M go to right bank.\n (m_left - 1, c_left - 1, 0), # 1M & 1C go to right bank.\n (m_left, c_left - 2, 0), # 2C go to right bank.\n (m_left - 1, c_left, 0), # 1M goes to right bank.\n (m_left, c_left - 1, 0) # 1C goes to right bank.\n ]\n\n # Rule A: number of M and C must be non-negative and add up to 3.\n def non_negative_rule(state: tuple):\n return (state[0] >= 0 and\n state[1] >= 0 and\n state[0] <= 3 and\n state[1] <= 3)\n\n # Rule B: if a bank has M > 0, M >= C otherwise C eats M.\n def dont_eat_me_rule(state: tuple):\n m_left = state[0]\n m_right = 3 - m_left\n c_left = state[1]\n c_right = 3 - c_left\n return ((m_left >= c_left or m_left is 0) and\n (m_right >= c_right or m_right is 0))\n\n # Filter to feasible child states by applying rule A and B.\n return list(filter(lambda s: non_negative_rule(s)\n and dont_eat_me_rule(s),\n children))\n\n # Initial state of puzzle is 3 missionaries, 3 canibals, and boat on right bank.\n init_state = (0, 0, 0)\n\n # Run A* search algorithm and return profiling metrics to caller.\n return astar.astar_search(init_state=init_state,\n expand_fn=mc_expand,\n heuristic_fn=h)", "def construct_optimized_traj(self, initial_state, desired_state, objects, walls):\n best_traj = []\n best_traj_cost = self.LARGE_NUMBER\n\n start_time = time.perf_counter()\n end_time = start_time + self.PLAN_TIME_BUDGET\n MAX_TRYS = 10\n cnt = 0\n\n while (time.perf_counter() < end_time and cnt < MAX_TRYS):\n end_time_per_trial = min(end_time, time.perf_counter() + (self.PLAN_TIME_BUDGET/2))\n traj, traj_dist = self.construct_traj(initial_state, desired_state, objects, walls, end_time_per_trial)\n if(traj_dist == self.LARGE_NUMBER):\n cnt+= 0.5\n print(\"NO PATHS FOUND (Generate Optimized Trajectory)\")\n if(traj_dist < best_traj_cost): \n cnt+=1\n best_traj = traj \n best_traj_cost = traj_dist \n # print(\"tries\", cnt)\n\n return best_traj, best_traj_cost", "def actions(state, goals, direction_dict):\n actions = []\n for piece in state:\n if piece in goals:\n new_state = state.difference(set([piece]))\n actions.append((new_state, f\"EXIT from {piece}.\"))\n continue\n for move, jump in direction_dict[piece]:\n if move:\n if move not in state:\n new_state = state.difference(\n set([piece])).union(set([move]))\n actions.append(\n (new_state, f\"MOVE from {piece} to {move}.\"))\n elif jump and jump not in state:\n new_state = state.difference(\n set([piece])).union(set([jump]))\n actions.append(\n (new_state, f\"JUMP from {piece} to {jump}.\"))\n elif jump and jump not in state:\n new_state = state.difference(set([piece])).union(set([jump]))\n actions.append((new_state, f\"JUMP from {piece} to {jump}.\"))\n return actions", "def test_steps_matching_solution(self):\n issue = Issue(failed_assertion=self.assertion_meta)\n issue.save()\n\n test_solution_1 = Solution(plan=[\n ('notify_client_it', {\n 'subject': 'foo',\n 'message': 'bar',\n }),\n ('do_email', {})\n ])\n test_solution_1.save()\n isr_1 = ResolutionStep.objects.create(solution=test_solution_1, issue=issue)\n\n test_solution_2 = Solution.objects.create(plan=[\n ('notify_client_it', {\n 'subject': 'foo_2',\n 'message': 'bar_2',\n }),\n ('do_email', {})\n ])\n test_solution_2.save()\n isr_2 = ResolutionStep.objects.create(solution=test_solution_2, issue=issue)\n\n test_solution_3 = Solution.objects.create(plan=[\n ('do_something_else', {}),\n ])\n test_solution_3.save()\n ResolutionStep.objects.create(solution=test_solution_3, issue=issue)\n\n ResolutionStep.objects.create(action_type=ResolutionStepActionType.PASS, issue=issue)\n\n # Example plan to match\n target_solution = Solution.objects.create(plan=[\n ('notify_client_it', {}),\n ('do_email', {})\n ])\n\n self.assertEqual(\n set(issue.steps_matching_plan(target_solution.get_plan())),\n set([isr_1, isr_2]),\n 'There should only be two matching issue resolution steps')", "def is_solved(self):\n return (khun := self.sorted_pieces()[0]).x() == self.goal[0] and khun.y() == self.goal[1]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Plan route to nearest location with heading directed toward one of the possible wumpus locations (in goals), then append shoot action.
def plan_shot(current, heading, goals, allowed): if goals and allowed: psp = PlanShotProblem((current[0], current[1], heading), goals, allowed) node = search.astar_search(psp) if node: plan = node.solution() plan.append(action_shoot_str(None)) # HACK: # since the wumpus_alive axiom asserts that a wumpus is no longer alive # when on the previous round we perceived a scream, we # need to enforce waiting so that itme elapses and knowledge of # "dead wumpus" can then be inferred... plan.append(action_wait_str(None)) return plan # no route can be found, return empty list return []
[ "def move_arm_a_to_b(goal): #move very short distance\n\n rospy.loginfo('goal')\n\n waypoints = []\n wpose = group.get_current_pose().pose\n wpose.position.x += 0.0001\n waypoints.append(copy.deepcopy(wpose))\n wpose.position.x = goal[0]\n wpose.position.y = goal[1] # First move up (z)\n wpose.position.z = goal[2] # and sideways (y)\n quaternion = tf.transformations.quaternion_from_euler(goal[3], goal[4], goal[5]) #(roll, pitch, yaw)\n wpose.orientation.x = quaternion[0]\n wpose.orientation.y = quaternion[1]\n wpose.orientation.z = quaternion[2]\n wpose.orientation.w = quaternion[3]\n waypoints.append(copy.deepcopy(wpose))\n # rospy.loginfo(\"waypoitns\")\n # rospy.loginfo(waypoints)\n group.set_planning_time(4)\n (plan, fraction) = group.compute_cartesian_path(\n waypoints, # waypoints to follow\n 0.001, # eef_step - LOWER THIS WHEN using GAZEBO ROBOT\n 2) # jump_threshold\n # rospy.loginfo(goal)\n\n return plan", "def move_to(self):\n #self.find_wall()\n \n t = self.find_best_way()\n if t:\n click(t)\n else:\n click(random.choice(locations))", "def route_on_completion(self) -> None:\n for idx, agent in enumerate(self.agents):\n if not self.is_agent_at_goal(idx):\n self.route(idx, self.agent_goals[idx])", "def walk_to_place(self, level, goal):\n DX = self.pos[0] - goal[0]\n DY = self.pos[1] - goal[1]\n total_length = (DX**2 + DY**2)**0.5\n dx = -1 * self.speed / total_length * DX\n dy = -1 * self.speed / total_length * DY\n self.change_direction(dx, dy)\n self.collision_move(level, dx, dy)", "def straightShotAccuracy(fromY=SHOOT_POSITIONS, goalX=0, goalY=GOAL_POST_Y, z=1.05):\n def executor():\n # aim and shoot via motionPlanning::shootAtTarget (blocking tst service)\n robot.shootAt(float(goalX), float(goalY), float(z))\n \n for y in fromY:\n # shoot 3 times from the same position\n for _ in range(3):\n _singleShot(0, y, None, None, executor)", "def add_goal(self) -> None:\n\n self.grid[self.random_row_goal][self.random_column_goal] = self.goal\n self.all_locations.append((self.random_column_goal, self.random_row_goal))", "def traveling(self):\n\n# self.path[0] should be the monster's current square.\n# self.path[1] should be the square the monster wants to move to.\n# self.path[-1] should be the monster's ultimate destination.\n\n assert self.path != None, \"Despite the monster being in state TRAVELING, the path variable is null.\"\n\n if self.currentLevel.player in self.fov:\n self.state = ais.FIGHTING\n return self.fighting()\n else:\n path_is_invalid = False\n\n if len(self.path) == 0:\n assert False # This shouldn't happen!\n path_is_invalid = True\n elif self.coords != self.path[0]:\n# Something has moved the monster since its last turn.\n path_is_invalid = True\n elif len(self.path) == 1:\n# Since self.coords == self.path[0], the monster has reached its destination!\n self.state = ais.WANDERING\n return self.wandering()\n elif not self.canMove(self.path[1]):\n path_is_invalid = True\n\n if path_is_invalid:\n if len(self.path) == 0:\n# If the path is completely empty, something has gone wrong.\n assert False\n# Just give up and return to being stationary.\n self.state = ais.RESTING\n return self.resting()\n else:\n destination = self.path[-1]\n self.path = pf.find_shortest_path(self.currentLevel, self.coords, destination, True)\n if len(self.path) == 0:\n# There simply is no path to the destination!\n# Set self.path to only contain the destination, so that next turn, this code\n# attempts to find another path.\n self.path = [destination]\n return action.Wait(self)\n elif len(self.path) == 1:\n# This should not happen!\n assert False\n return action.Wait(self)\n\n if self.canMove(self.path[1]):\n move_direction = coordinates.subtract(self.path[1], self.coords)\n self.path.pop(0)\n return action.Move(self, move_direction)\n else:\n assert False, \"The supposedly legal path contains an illegal move!\"\n return action.Wait(self)", "def move_arm_handler(req):\n\n goal = [req.x,req.y,req.z,req.rot_x,req.rot_y,req.rot_z]\n # goal = [0.5,-0.5,0.5,0,3.14,0]\n\n group.set_goal_position_tolerance(0.001)\n group.set_goal_orientation_tolerance(0.01)\n\n via_points = plan_cartesian_path(goal,resolution = 1) #res can be changed\n\n for point in via_points:\n plan = move_arm_a_to_b(point) #\n #Publish this plan at my own speed\n if not real_panda:\n group.execute(plan, wait=False)\n print(\"EXECUTING PLAN\")\n\n execute(plan)\n else: #Running on real panada\n plan = slow_down(plan)\n print(\"EXECUTING PLAN ON REAL ROBOT\")\n\n group.execute(plan, wait=True)\n\n group.stop()\n group.clear_pose_targets()\n\n return True", "def _SendNewNavGoal(self):\n if self.execute_path.simple_state != actionlib.SimpleGoalState.DONE:\n self.execute_path.cancel_goal()\n # We need to wait until the goal was really canceled\n if not self.execute_path.wait_for_result(rospy.Duration(2)):\n rospy.logwarn('Cancellation of goal took more than 2 seconds. Continuing anyway.')\n self.execute_path.send_goal(\n navigation_waypoints_server.msg.ExecutePathGoal(\n waypoints=[wp.pose for wp in self._waypoints.GetWaypoints()],\n continue_on_error=True),\n done_cb=self._NavigationDoneCallback,\n feedback_cb=self._NavigationFeedbackCallback)", "def run_ant_tour(self):\n\n\t\twhile self.allowed_locations:\n\t\t\tnext_node = self._choose_next_node()\n\t\t\tself._move_to_node(self.current_location, next_node)\n\n\t\t# finally go back to the initial node (to complete circuit)\n\t\t# self.allowed_locations.append(self.route_taken[0])\n\t\t# next_node = self.route_taken[0]\n\t\t# self._move_to_node(self.current_location, next_node)\n\t\tself.tour_completed = True", "def on_point_goal(self, *args: list) -> None:\n self.goals.points.text = f'Points goal: {self.point_goal:,}'\n set_text_to_fit(self.goals.points)\n if self.parent:\n solo_game = [screen for screen in self.parent.screens if screen.name == 'solo'][0]\n solo_game.point_goal = int(self.point_goal)\n if self.turn_limit and self.point_goal:\n self.set_difficulty()", "def on_move_hit(self, user, move, battle):", "def controller_action(self, obs:Dict, take_action:bool=True, DEBUG:bool=False):\n grip_pos = obs['observation'][:3]\n object_pos = obs['observation'][3:6]\n object_rel_pos = obs['observation'][6:9]\n goal_pos = obs['desired_goal']\n\n disp = 0.005 # extra distance to accellerate\n gripper_dim = 0.029 # maximum planar distance across the gripper\n\n # lift the hand little from the table vertically\n if not self.hand_higher:\n action = [0,0,1,0]\n if grip_pos[2]-object_pos[2] > 0.05:\n if take_action:\n self.hand_higher = True\n if DEBUG:\n print('Hand lifted from the table')\n # once above, move it above the puck\n if self.hand_higher and not self.hand_behind:\n goal_grip_pos = object_pos + (disp + gripper_dim + self.r)*(object_pos - goal_pos)/np.linalg.norm(object_pos - goal_pos)\n action_pos = list(self.kp*(goal_grip_pos - grip_pos))\n action = action_pos[:2] + [0,0]\n if np.linalg.norm(grip_pos[:2]-goal_grip_pos[:2]) < 0.001:\n if take_action:\n self.hand_behind = True\n if DEBUG:\n print('Hand has moved behind')\n # now move the hand down\n if self.hand_behind and not self.hand_down:\n action = [0,0,-1,0]\n if grip_pos[2]-object_pos[2] <0.01:\n self.start_time = self.fetch_env.env.sim.data.time # start the time once we are ready to hit\n if take_action:\n self.hand_down = True\n if DEBUG:\n print('Ready to HIT')\n\n # define helper functions\n def calc_direction(pos, goal):\n # calculates unit vector direction from position to goal\n pos = pos[:-1]; goal = goal[:-1]\n return (goal - pos)/np.linalg.norm(goal - pos)\n\n # set the goal speed\n if self.hand_down and not self.set_goal_speed:\n self.dist_to_goal = np.linalg.norm(object_pos[:-1]-goal_pos[:-1])\n self.goal_speed = np.sqrt(2*self.mu*self.g*self.dist_to_goal)\n self.a = (self.goal_speed**2)/(2*disp)\n #print('this is dist to goal ' +str(self.dist_to_goal))\n #print('this is mu ' +str(self.mu))\n #print('this is the goal speed ' + str(self.goal_speed))\n #print('this is the timestep ' + str(self.dt))\n #print('this is a ' + str(self.a))\n #print('this is 0.025+self.r ' +str(0.025+self.r))\n #print('this is the distance between gripper pos and object pos ' +str(np.linalg.norm(object_pos-grip_pos)))\n self.prev_time = self.start_time\n if take_action:\n self.set_goal_speed = True\n\n # slap the puck\n if self.hand_down and self.set_goal_speed:\n time = self.fetch_env.env.sim.data.time\n dtime = time - self.prev_time\n if np.linalg.norm(goal_pos[:-1] - grip_pos[:-1]) > self.dist_to_goal:\n if DEBUG:\n print('this is the distance ' + str(np.linalg.norm(goal_pos[:-1] - grip_pos[:-1])))\n #print(self.prev_speed)\n #print(next_speed)\n next_speed = self.prev_speed + (self.a * dtime)\n action_pos = list((dtime*(next_speed+self.prev_speed)/2)*calc_direction(object_pos,goal_pos))\n self.prev_speed = next_speed\n self.prev_time = time\n else:\n action_pos = [0,0]\n action = action_pos[:2] + [0,0]\n if DEBUG:\n print('commanded action = ' + str(np.linalg.norm(action[0:2])))\n\n # added clipping here\n return np.clip(action, -1, 1)", "def move_to_target(self):\n # Set threshold\n threshold = 0.08\n # Accuracy for keeping in line\n turn_for_accuracy = 0.2\n\n move_msg = Twist()\n move_msg.linear.x = self.forward_speed\n move_msg.angular.z = turn_for_accuracy\n # Calibrate robot according to current path\n direction = get_robot_direction()\n self.calibrate_yaw(direction)\n\n for point in self.path:\n x_c, y_c, wanted_yaw = get_robot_location()\n # Get next direction\n next_direction = self.get_next_direction((x_c, y_c), point)\n # Check if we need to turn\n if next_direction != direction:\n # Turn and calibrate\n self.turn_around(direction, next_direction)\n direction = next_direction\n self.calibrate_yaw(direction)\n\n x_c, y_c, yaw = get_robot_location()\n x_g, y_g = point\n turn_around_dir = 1\n fix_round_move = False\n # Move robot to wanted point\n while not ((x_g - threshold < x_c < x_g + threshold) and (y_g - threshold < y_c < y_g + threshold)):\n direction = get_robot_direction()\n # Check if we need to fix robot yaw\n if fix_round_move:\n move_msg.angular.z = turn_for_accuracy * turn_around_dir\n\n # Send publish to move robot\n self.command_pub.publish(move_msg)\n x_c, y_c, yaw = get_robot_location()\n # Check to which direction we want to fix yaw\n if yaw < convert_direction_to_angle(next_direction):\n turn_around_dir = 1\n fix_round_move = True\n else:\n turn_around_dir = -1\n fix_round_move = True", "def chooseAction(self, gameState):\n\n #Check if our agent has died, if so create a new actionList, and update the goal\n if self.hasDied(gameState):\n self.actionList = []\n self.updateGoalState(self.getClosestFood(gameState), gameState)\n\n enemies = self.getOpponents(gameState)\n\n #This block checks for enemies within 6 map spaces of our agent, if one is found\n #It will find the closest friendly space to the agent, update the goal, and find the BFS path to the goal\n for enemy in enemies:\n enemyPosition = gameState.getAgentPosition(enemy)\n if enemyPosition != None:\n enemyDist = self.getMazeDistance(gameState.getAgentPosition(self.index), enemyPosition)\n\n #If the enemy is within 6 spaces and our agent is pacman and the enemy is a ghost\n #We are in enemy territory and are being chased\n if enemyDist <= 6 and (gameState.getAgentState(self.index).isPacman and\n not gameState.getAgentState(enemy).isPacman):\n\n #Obtain the cells that border the enemy territory and find the closest one\n borderCells = self.getBorderCells(gameState)\n bestDist = 9999\n bestCell = borderCells[0]\n currentPos = gameState.getAgentPosition(self.index)\n for cell in borderCells:\n dist = self.getMazeDistance(currentPos, cell)\n if dist < bestDist:\n bestDist = dist\n bestCell = cell\n #Update the goal with the closest friendly cell\n self.goal = bestCell\n self.actionList = []\n #Find the actions that will take us to the cell\n self.actionList = self.breadthFirstSearch(gameState)\n\n #By Default, if the action list is empty, find the nearest food to the agent,\n #Update the goal with the food location and repopulate the action list with the BFS path to the food\n if len(self.actionList) == 0:\n self.updateGoalState(self.getClosestFood(gameState), gameState)\n self.actionList = []\n self.actionList = self.breadthFirstSearch(gameState)\n\n return self.actionList.pop(0)", "def do_move(self, world, friendly_unit, enemy_units):\n\n initial_translation = 11\n nums_turns_since_kill = 20\n print(self.outbound)\n\n # Initialize initial quadrant\n if self.turn_count == 0:\n if friendly_unit.position == (3, 3):\n self.initial_quadrant = 1\n if friendly_unit.position == (26, 3):\n self.initial_quadrant = 2\n if friendly_unit.position == (3, 26):\n self.initial_quadrant = 3\n if friendly_unit.position == (26, 26):\n self.initial_quadrant = 4\n\n self.turn_count += 1\n\n # if unit is dead, stop making moves.\n if friendly_unit.status == 'DISABLED':\n print(\"Turn {0}: Disabled - skipping move.\".format(str(self.turn_count)))\n self.target = None\n self.outbound = True\n return\n\n # HARD CODE THE INITIAL TURNS TO GET A GOOD POSITION\n if self.turn_count <= 4*(initial_translation - 1):\n if self.initial_quadrant == 1:\n self.outbound = True\n self.target = world.position_to_tile_map[(3, 3 + initial_translation)]\n if self.turn_count == initial_translation:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 1:\n self.target = world.position_to_tile_map[(3 + initial_translation, 3 + initial_translation)]\n if self.turn_count == (2 * initial_translation)-1:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 2:\n self.outbound = False\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position,\n friendly_unit.snake)\n if self.turn_count == (4 * initial_translation)-1:\n self.initial_ai_moves += 1\n\n if self.initial_quadrant == 2:\n self.outbound = True\n self.target = world.position_to_tile_map[(26, 3 + initial_translation)]\n if self.turn_count == initial_translation:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 1:\n self.target = world.position_to_tile_map[(26 - initial_translation, 3 + initial_translation)]\n if self.turn_count == (2 * initial_translation)-1:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 2:\n self.outbound = False\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position,\n friendly_unit.snake)\n if self.turn_count == (4 * initial_translation)-1:\n self.initial_ai_moves += 1\n\n if self.initial_quadrant == 3:\n self.outbound = True\n self.target = world.position_to_tile_map[(3, 26 - initial_translation)]\n if self.turn_count == initial_translation:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 1:\n self.target = world.position_to_tile_map[(3 + initial_translation, 26 - initial_translation)]\n if self.turn_count == (2 * initial_translation) - 1:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 2:\n self.outbound = False\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position,\n friendly_unit.snake)\n if self.turn_count == (4 * initial_translation) - 1:\n self.initial_ai_moves += 1\n\n if self.initial_quadrant == 4:\n self.outbound = True\n self.target = world.position_to_tile_map[(26 - initial_translation, 26)]\n if self.turn_count == initial_translation:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 1:\n self.target = world.position_to_tile_map[(26 - initial_translation, 26 - initial_translation)]\n if self.turn_count == (2 * initial_translation)-1:\n self.initial_ai_moves += 1\n elif self.initial_ai_moves == 2:\n self.outbound = False\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position,\n friendly_unit.snake)\n if self.turn_count == (4 * initial_translation)-1:\n self.initial_ai_moves += 1\n\n else:\n print(self.turns_since_kill)\n self.turns_since_kill += 1\n # LOOK FOR KILL, WHEN KILL RETURN HOME, IF NO KILL, THEN NEVER GO HOME ):\n if self.has_killed is False:\n # Just assume killed and run away\n if self.turns_since_kill == nums_turns_since_kill - 1:\n self.has_killed = True\n self.turns_since_kill = 0\n self.outbound = False\n # If there arent any bodies to take, go to closest territory\n if world.util.get_closest_enemy_body_from(friendly_unit.position, None) is None:\n self.outbound = True\n self.target = world.util.get_closest_enemy_territory_from(friendly_unit.position,\n friendly_unit.snake)\n else:\n self.outbound = True\n self.target = world.util.get_closest_enemy_body_from(friendly_unit.position, friendly_unit.snake)\n\n else:\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position, None)\n if friendly_unit.position in friendly_unit.territory:\n self.has_killed = False\n self.outbound = True\n self.outbound = False\n\n # if unit reaches the target point, reverse outbound boolean and set target back to None\n # if self.target is not None and friendly_unit.position == self.target.position:\n # self.outbound = not self.outbound\n # self.target = None\n #\n\n # # if outbound and no target set, set target as the closest capturable tile at least 1 tile away\n # from your territory's edge.\n if self.outbound and self.target is None:\n self.target = world.position_to_tile_map((14, 14))\n #\n # # else if inbound and no target set, set target as the closest friendly tile\n elif not self.outbound and self.target is None:\n self.target = world.util.get_closest_friendly_territory_from(friendly_unit.position, None)\n #\n # # set next move as the next point in the path to target\n # Construct the set to avoid\n avoid = set()\n avoid.union(friendly_unit.territory)\n avoid.union(friendly_unit.snake)\n # If going outbound, try to avoid own territory\n if self.outbound:\n next_move = world.path.get_shortest_path(friendly_unit.position, self.target.position, avoid)[0]\n else:\n next_move = world.path.get_shortest_path(friendly_unit.position, self.target.position,\n friendly_unit.snake)[0]\n\n # move!\n friendly_unit.move(next_move)\n self.units_from_territory_edge += 1\n print(\"Turn {0}: currently at {1}, making {2} move to {3}.\".format(\n str(self.turn_count),\n str(friendly_unit.position),\n 'outbound' if self.outbound else 'inbound',\n str(self.target.position)\n ))", "def shootArrow(self, x, y, direction):\n\t\thit = False\n\t\t\n\t\t#find the square with the wumpus\n\t\t(wx, wy) = filter( lambda c: self.map[c][1], self.map ).pop()\n\t\t\n\t\t#check the directions to see if the arrow hit the wumpus\n\t\tif direction == NORTH:\n\t\t\thit = x == wx and wy > y\n\t\t\n\t\telif direction == SOUTH:\n\t\t\thit = x == wx and wy < y\n\t\t\n\t\telif direction == EAST:\n\t\t\thit = x < wx and wy == y\n\t\t\n\t\telse:\n\t\t\thit = x > wx and wy == y\n\t\n\t\tif hit:\n\t\t\tself.wumpusDead = True\n\n\t\treturn hit", "def set_goal(self, robot_id, task): \n pub_names = self.goal_pubs.keys()\n pub_objs = self.goal_pubs.values()\n for i in range(len(pub_names)):\n if robot_id == int(pub_names[i]):\n Goal = MoveBaseActionGoal()\n Goal.header.stamp = rospy.Time.now()\n Goal.header.frame_id = ''\n Goal.goal_id.stamp = rospy.Time.now()\n Goal.goal_id.id = str(int(task[0]))\n Goal.goal.target_pose.header.stamp = rospy.Time.now()\n Goal.goal.target_pose.header.frame_id = 'map'\n Goal.goal.target_pose.pose.position.x = task[1]\n Goal.goal.target_pose.pose.position.y = task[2]\n z_rot_rad = task[3] * np.pi / 180\n q = quaternion_from_euler(0, 0, z_rot_rad)\n Goal.goal.target_pose.pose.orientation.z = q[2]\n Goal.goal.target_pose.pose.orientation.w = q[3]\n pub_obj = pub_objs[i]\n pub_obj.publish(Goal)\n break\n else:\n pass", "def go_dropoff(ship, game, commands, map_game):\n\n target = closest_dropoff(ship, game)\n\n if not target[1]:\n\n target = get_position_shipyard(game)\n \n else: \n\n target = target[1].position\n\n movement = get_movement(ship.position, target, map_game, commands)\n\n return ship.move(movement)", "def run_step(self):\n\n if self._cv_image is not None:\n cv2.imshow(\"\", self._cv_image)\n cv2.waitKey(1)\n\n if self._reached_goal:\n # Reached the goal, so stop\n velocity = carla.Vector3D(0, 0, 0)\n self._actor.set_target_velocity(velocity)\n return\n\n self._reached_goal = False\n\n if not self._waypoints:\n # No waypoints are provided, so we have to create a list of waypoints internally\n # get next waypoints from map, to avoid leaving the road\n self._reached_goal = False\n\n map_wp = None\n if not self._generated_waypoint_list:\n map_wp = CarlaDataProvider.get_map().get_waypoint(CarlaDataProvider.get_location(self._actor))\n else:\n map_wp = CarlaDataProvider.get_map().get_waypoint(self._generated_waypoint_list[-1].location)\n while len(self._generated_waypoint_list) < 50:\n map_wps = map_wp.next(3.0)\n if map_wps:\n self._generated_waypoint_list.append(map_wps[0].transform)\n map_wp = map_wps[0]\n else:\n break\n\n direction_norm = self._set_new_velocity(self._generated_waypoint_list[0].location)\n if direction_norm < 2.0:\n self._generated_waypoint_list = self._generated_waypoint_list[1:]\n else:\n # When changing from \"free\" driving without pre-defined waypoints to a defined route with waypoints\n # it may happen that the first few waypoints are too close to the ego vehicle for obtaining a\n # reasonable control command. Therefore, we drop these waypoints first.\n while self._waypoints and self._waypoints[0].location.distance(self._actor.get_location()) < 0.5:\n self._waypoints = self._waypoints[1:]\n\n self._reached_goal = False\n direction_norm = self._set_new_velocity(self._waypoints[0].location)\n if direction_norm < 4.0:\n self._waypoints = self._waypoints[1:]\n if not self._waypoints:\n self._reached_goal = True" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The 'expected initial states and solution pairs' below are provided as a sanity check, showing what the PlanShotProblem soluton is expected to produce. Provide the 'initial state' tuple as the argumetn to test_PRP, and the associate solution list of actions is expected as the result. The test assumes the goals are [(2,3),(3,2)], that the heuristic fn defined in PlanShotProblem uses the manhattan_distance_with_heading()
def test_PSP(initial = (0,0,3)): return plan_shot((initial[0],initial[1]), initial[2], # Goals: [(2,3),(3,2)], # Allowed locations: [(0,0),(0,1),(0,2),(0,3), (1,0),(1,1),(1,2),(1,3), (2,0), (2,3), (3,0),(3,1),(3,2),(3,3)])
[ "def more_pour_problem(capacities, goal, start=None):\n # your code here\n def pp_is_goal(state):\n return goal in state\n\n def psuccessors(state):\n items = []\n for i in range(0, len(state)):\n if state[i] < capacities[i]:\n tpl = update_tuple(state, i, capacities[i])\n items.append((tpl, ('fill', i)))\n if state[i] > 0:\n tpl = update_tuple(state, i, 0)\n items.append((tpl, ('empty', i)))\n for j in range(0, len(state)):\n if i == j:\n continue\n if state[i] > 0:\n available_j = capacities[j] - state[j]\n if available_j <= 0:\n continue\n val_j = state[j] + state[i] if available_j >= state[i] else state[j] + available_j\n val_i = 0 if available_j >= state[i] else state[i] - available_j\n tpl = update_tuple(state, j, val_j)\n tpl = update_tuple(tpl, i, val_i)\n items.append((tpl, ('pour', i, j)))\n return dict(items)\n\n start = tuple([0 for x in range(0, len(capacities))]) if start == None else start\n return shortest_path_search(start, psuccessors, pp_is_goal) # <== your arguments here", "def foodGhostLogicPlan(problem):\n \"*** YOUR CODE HERE ***\"\n DIRS = [game.Directions.NORTH, game.Directions.SOUTH, game.Directions.EAST, game.Directions.WEST]\n startPos, startGrid = problem.getStartState()\n minTime, maxTime = 0, 51 # change these numbers for debugging\n for x in range(1, problem.getWidth()+1):\n for y in range(1, problem.getHeight()+1):\n if startGrid[x][y]: minTime += 1\n\n for T in range(minTime, maxTime):\n #print(\"Trying to find a solution of length %d\" % T)\n\n clauses = [] # one action per time step\n clauses2 = [] # whether food is at (x,y) at start\n clauses3 = [] # whether food is at (x,y) at goal (should all be false)\n clauses4 = [] # action at specific time and place implies the same action at that time overall\n clauses5 = [] # action at specific time and place implies position at that time and place\n clauses6 = [] # action at specific time and place implies next position\n clauses7 = [] # only one forward action per position per time step\n clauses8 = [] # only one possible action to get to this position per time step\n clauses9 = [] # exactly one goal position\n clauses10 = [] # position at specific time and place implies no food at that time and place\n clauses12 = [] # food at specific time and place implies food prior in same place\n clauses13 = [] # food at specific time and place and no Pacman at that place in the next step implies food still there\n clauses14 = [] # JESUS whether ghost/s are at (x,y) at start\n clauses15 = [] # JESUS turnAround ghosts\n clauses16 = [] # JESUS nextGhostW >> ~currAct\n clauses17 = [] # JESUS nextGhostE >> ~currAct\n clauses18 = [] # JESUS ghost go east\n clauses19 = [] # JESUS ghost go weast\n clauses20 = [] # JESUS ghost \n clauses21 = [] # JESUS ghost \n\n clauses9.append(logic.PropSymbolExpr(\"Pos\", startPos[0], startPos[1], 0))\n\n #JESUS\n for ghost in problem.getGhostStartStates():\n clauses14.append(logic.PropSymbolExpr(\"Ge\", ghost.getPosition()[0], ghost.getPosition()[1], 0))\n\n for t in range(T+1):\n if t < T:\n clauses.extend(exactlyOneAsList([logic.PropSymbolExpr(direc, t) for direc in DIRS]))\n\n # Pacman must be in one of the initial food pellet positions in the end\n goalPositionAxioms = []\n goalPositions = []\n\n for x in range(1, problem.getWidth()+1):\n for y in range(1, problem.getHeight()+1):\n state = (x,y)\n\n #JESUS; All the Way East\n if (problem.isWall((x+1, y))):\n borderGhostE = logic.PropSymbolExpr(\"Ge\", x, y, t)\n turnAroundE = logic.PropSymbolExpr(\"Gw\", x - 1, y, t + 1)\n clauses15.append(logic.to_cnf(borderGhostE >> turnAroundE)) # Ge[3, 2, 3] >> Gw[2, 2, 4] if at east wall\n else:\n eastG = logic.PropSymbolExpr(\"Ge\", x, y, t)\n easterG = logic.PropSymbolExpr(\"Ge\", x + 1, y, t + 1)\n clauses18.append(logic.to_cnf(eastG >> easterG))\n\n #JESUS; All the Way West\n if (problem.isWall((x-1, y))):\n borderGhostW = logic.PropSymbolExpr(\"Gw\", x, y, t)\n turnAroundW = logic.PropSymbolExpr(\"Ge\", x + 1, y, t + 1)\n clauses15.append(logic.to_cnf(borderGhostW >> turnAroundW)) # Gw[1, 2, 3] >> Gw[2, 2, 4] if at west wall\n else:\n westG = logic.PropSymbolExpr(\"Gw\", x, y, t)\n westerG = logic.PropSymbolExpr(\"Gw\", x - 1, y, t + 1)\n clauses19.append(logic.to_cnf(westG >> westerG))\n\n # only add these axioms once\n if t == 0:\n if startGrid[x][y]:\n clauses2.append(logic.PropSymbolExpr(\"Food\", x, y, 0)) # food here initially\n goalPositionAxioms.append(logic.PropSymbolExpr(\"Pos\", x, y, T)) # Pacman could end here\n goalPositions.append(state)\n else:\n clauses2.append(~logic.PropSymbolExpr(\"Food\", x, y, 0)) # no food here initially\n clauses3.append(~logic.PropSymbolExpr(\"Food\", x, y, T)) # no food at every position in the end\n\n # if state is not a wall, we calculate all forward and backward implications for position AND food\n if not problem.isWall(state):\n currPos = logic.PropSymbolExpr(\"Pos\", x, y, t) # i.e. Pos[3,6,5]\n currPosNextTime = logic.PropSymbolExpr(\"Pos\", x, y, t+1) # i.e. Pos[3,6,6]\n currFood = logic.PropSymbolExpr(\"Food\", x, y, t) # i.e. Food[3,6,5]\n nextFood = logic.PropSymbolExpr(\"Food\", x, y, t+1) # i.e. Food[3,6,6]\n forwardActions = problem.actions((state, startGrid))\n actionsToGetHere = []\n\n if t < T:\n for action in forwardActions:\n (((nx, ny), nfood), cost) = problem.result((state, startGrid), action)\n rawAct = logic.PropSymbolExpr(action, t)\n currAct = logic.PropSymbolExpr(action, x, y, t)\n nextPos = logic.PropSymbolExpr(\"Pos\", nx, ny, t+1)\n\n nextGhostE = logic.PropSymbolExpr(\"Ge\", nx, ny, t) # JESUS Ge[3, 6, 5]\n nextGhostW = logic.PropSymbolExpr(\"Gw\", nx, ny, t) # JESUS Gw[3, 6, 5]\n waitingGhostNextE = logic.PropSymbolExpr(\"Gw\", nx, ny, t + 1)\n waitingGhostNextW = logic.PropSymbolExpr(\"Ge\", nx, ny, t + 1)\n clauses16.append(logic.to_cnf(nextGhostW >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n clauses17.append(logic.to_cnf(nextGhostE >> ~currAct)) # JESUS GE[3, 6 ,5] >> ~rawAct\n\n clauses20.append(logic.to_cnf(waitingGhostNextW >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n clauses21.append(logic.to_cnf(waitingGhostNextE >> ~currAct)) # JESUS Gw[3, 6, 5] >> ~rawAct\n\n axiom1 = currAct >> rawAct # i.e. West[3,6,5] implies West[5]\n axiom5 = currAct >> currPos # i.e. West[3,6,5] implies Pos[3,6,5]\n axiom2 = currAct >> nextPos # i.e. West[3,6,5] implies Pos[2,6,6]\n axiom1, axiom2, axiom5 = logic.to_cnf(axiom1), logic.to_cnf(axiom2), logic.to_cnf(axiom5)\n clauses4.append(axiom1)\n clauses5.append(axiom5)\n clauses6.append(axiom2)\n onlyOneForwardAction = exactlyOne([logic.PropSymbolExpr(direc, x, y, t) for direc in forwardActions])\n axiom3 = currPos >> onlyOneForwardAction # i.e. Pos[3,6,5] implies North[3,6,5] or West[3,6,5] if South and East are walls\n axiom8 = nextFood >> currFood # i.e. Food[3,6,5] implies Food[3,6,4]\n axiom9 = (currFood & ~currPosNextTime) >> nextFood # i.e. Food[3,6,5] and not Pos[3,6,6] implies Food[3,6,6]\n axiom3 = logic.to_cnf(axiom3)\n axiom8 = logic.to_cnf(axiom8)\n axiom9 = logic.to_cnf(axiom9)\n clauses7.append(axiom3)\n clauses12.append(axiom8)\n clauses13.append(axiom9)\n\n axiom6 = currPos >> ~currFood # i.e. Pos[3,6,5] implies not Food[3,6,5]\n axiom6 = logic.to_cnf(axiom6)\n clauses10.append(axiom6)\n\n # we have to consider t in [1,T] for backward implications, instead of t in [0,T-1]\n if t > 0:\n for action in forwardActions:\n (((px, py), pfood), cost) = problem.result((state, startGrid), action)\n if not isReachableFromStart((px, py), startPos, t-1):\n continue\n willAppend = True\n for goalPos in goalPositions:\n if not isReachableFromGoal((px, py), goalPos, t-1, T):\n willAppend = False\n if willAppend:\n actionsToGetHere.append(logic.PropSymbolExpr(game.Directions.REVERSE[action], px, py, t-1))\n onlyOneActionToGetHere = exactlyOne(actionsToGetHere)\n axiom4 = currPos >> onlyOneActionToGetHere # i.e. Pos[3,6,5] implies South[3,7,4] or East[2,6,4] if South and East are walls\n axiom4 = logic.to_cnf(axiom4)\n clauses8.append(axiom4)\n\n # Pacman must be in one of the initial food pellet positions in the end\n if t == 0:\n clauses9.extend(exactlyOneAsList(goalPositionAxioms))\n\n clauses += clauses2 + clauses3 + clauses4 + clauses5 + clauses6 + clauses7 + clauses8 + clauses9 + clauses10 + clauses12 + clauses13 + clauses14 + clauses15 + clauses16 + clauses17 + clauses18 + clauses19 + clauses20 + clauses21\n model = logic.pycoSAT(clauses)\n if model:\n plan = extractActionSequence(model, DIRS)\n return plan\n\n print(\"NO PLAN, THIS SHOULD NOT BE HAPPENING\")\n return [] # should never get here if maze has solution", "def test_set_goals_ht_input(self):\r\n\r\n self.DUT.mission_time = 10.0\r\n self.DUT.hazard_rate_goal = 0.00015\r\n self.DUT.goal_measure = 2\r\n\r\n self.assertFalse(self.DUT.calculate_goals())\r\n self.assertAlmostEqual(self.DUT.reliability_goal, 0.99850112)\r\n self.assertAlmostEqual(self.DUT.mtbf_goal, 6666.66666666)", "def defineFirstGoal(self, gameState):\n\n #The tuples below represent points along the bottom and top walls\n if gameState.isOnRedTeam(self.index):\n return (21, 1)\n else:\n return (10, 14)", "def manhattan_distance_heuristic(state, puzzle_size):\n heuristic = 0\n for i, row in enumerate(state):\n for j, tile in enumerate(row):\n # Find the i, j values of where this tile should actually be.\n # Don't do this for the gap tile\n if tile == GAP:\n continue\n goal_i, goal_j = int((tile-1)//puzzle_size), (tile-1)%puzzle_size # -1 to account for 0-indexing\n # Add difference to heuristic\n heuristic += abs(goal_i - i) + abs(goal_j -j)\n\n return heuristic", "def heuristic(self,state):\n return self.heur(state.assignment, self.goal)", "def test_set_goals_rt_input(self):\r\n\r\n self.DUT.mission_time = 10.0\r\n self.DUT.reliability_goal = 0.975\r\n self.DUT.goal_measure = 1\r\n\r\n self.assertFalse(self.DUT.calculate_goals())\r\n self.assertAlmostEqual(self.DUT.hazard_rate_goal, 0.00253178)\r\n self.assertAlmostEqual(self.DUT.mtbf_goal, 394.97890205)", "def trajOptRRT(self, state_initial, state_final, goal=False, verbose=False):\n # TODO: reconcile trajOpt and trajOptRRT (shouldn't take long)\n\n # stopwatch for solver time\n tsolve_pre = time.time()\n\n # number of knot points - proportional to x-distance seems to work well\n N = int(max([np.floor(0.8 * np.abs(state_final[0] - state_initial[0])), 6]))\n\n # optimization problem: variables t_f, u[k], x[k]\n mp = MathematicalProgram()\n\n # variable for time to reach goal\n t_f = mp.NewContinuousVariables(1, \"t_f\")\n dt = t_f[0] / N\n\n k = 0\n u = mp.NewContinuousVariables(2, \"u_%d\" % k)\n input_trajectory = u\n\n x = mp.NewContinuousVariables(6, \"x_%d\" % k)\n state_trajectory = x\n\n for k in range(1, N):\n u = mp.NewContinuousVariables(2, \"u_%d\" % k)\n x = mp.NewContinuousVariables(6, \"x_%d\" % k)\n input_trajectory = np.vstack((input_trajectory, u))\n state_trajectory = np.vstack((state_trajectory, x))\n\n x = mp.NewContinuousVariables(6, \"x_%d\" % N)\n state_trajectory = np.vstack((state_trajectory, x))\n\n if verbose:\n print \"Number of decision vars\", mp.num_vars()\n\n # cost function: penalize electric energy use and overall control effort\n thrust = input_trajectory[:, 0]\n elev = input_trajectory[:, 1]\n vel = state_trajectory[:, 2]\n allvars = np.hstack((t_f[0], thrust, elev, vel))\n def totalcost(X):\n dt = X[0] / N\n u0 = X[1:N + 1]\n u1 = X[N + 1:2 * N + 1]\n v = X[2 * N + 1:3 * N + 1]\n return dt * (1.0 * u0.dot(u0) + 1.0 * u1.dot(u1) + 10.0 * X[0] * (u0.dot(v)))\n\n mp.AddCost(totalcost, allvars)\n\n # initial state constraint\n for i in range(len(state_initial)):\n mp.AddLinearConstraint(state_trajectory[0, i] == state_initial[i])\n\n # final state constraints\n if goal:\n # final state constraint (x position)\n mp.AddLinearConstraint(state_trajectory[-1, 0] == 0.0)\n\n # final state constraint (z position) NOTE: range is acceptable\n mp.AddLinearConstraint(state_trajectory[-1, 1] <= 1.5)\n mp.AddLinearConstraint(state_trajectory[-1, 1] >= 0.5)\n\n # final state constraint (velocity) NOTE: range is acceptable\n mp.AddLinearConstraint(state_trajectory[-1, 2] <= 9.0)\n mp.AddLinearConstraint(state_trajectory[-1, 2] >= 6.0)\n\n # final state constraint (flight path angle) NOTE: small range here\n mp.AddLinearConstraint(state_trajectory[-1, 3] <= 1.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[-1, 3] >= - 1.0 * np.pi / 180.0)\n\n # final state constraint (pitch rate)\n mp.AddLinearConstraint(state_trajectory[-1, 5] == 0.0)\n else:\n for i in range(len(state_initial)):\n mp.AddLinearConstraint(state_trajectory[-1, i] == state_final[i])\n\n # input constraints\n for i in range(len(input_trajectory[:, 0])):\n mp.AddLinearConstraint(input_trajectory[i, 0] >= 0.0)\n mp.AddLinearConstraint(input_trajectory[i, 0] <= 1.2 * self.m * self.g)\n mp.AddLinearConstraint(input_trajectory[i, 1] >= -30.0)\n mp.AddLinearConstraint(input_trajectory[i, 1] <= 30.0)\n\n # state constraints\n for i in range(len(state_trajectory[:, 0])):\n # x position\n mp.AddLinearConstraint(state_trajectory[i, 0] >= state_initial[0])\n mp.AddLinearConstraint(state_trajectory[i, 0] <= state_final[0])\n # z position\n mp.AddLinearConstraint(state_trajectory[i, 1] >= 0.3)\n mp.AddLinearConstraint(state_trajectory[i, 1] <= 2.0)\n # velocity\n mp.AddLinearConstraint(state_trajectory[i, 2] >= 2.0)\n mp.AddLinearConstraint(state_trajectory[i, 2] <= 18.0)\n # flight path angle\n mp.AddLinearConstraint(state_trajectory[i, 3] >= -30.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 3] <= 30.0 * np.pi / 180.0)\n # pitch angle\n mp.AddLinearConstraint(state_trajectory[i, 4] >= -20.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 4] <= 40.0 * np.pi / 180.0)\n # pitch rate\n mp.AddLinearConstraint(state_trajectory[i, 5] >= -20.0 * np.pi / 180.0)\n mp.AddLinearConstraint(state_trajectory[i, 5] <= 20.0 * np.pi / 180.0)\n\n # dynamic constraints (direct transcription)\n for j in range(1, N + 1):\n dynamic_prop = dt * self.airplaneLongDynamics(state_trajectory[j - 1, :], input_trajectory[j - 1, :])\n for k in range(len(state_initial)):\n mp.AddConstraint(state_trajectory[j, k] == state_trajectory[j - 1, k] + dynamic_prop[k])\n\n # initial guess for time\n t_guess = np.abs(state_final[0] - state_initial[0]) / (0.5 * (state_final[2] + state_initial[2]))\n mp.SetInitialGuess(t_f[0], t_guess)\n\n # initial guesses for state\n if goal:\n state_final_dummy = np.array(state_final)\n state_final_dummy[1] = state_initial[1]\n state_final_dummy[4] = state_initial[4]\n for i in range(len(state_trajectory[:, 0])):\n state_guess = ((N - i) / N) * state_initial + (i / N) * state_final_dummy\n for j in range(len(state_guess)):\n mp.SetInitialGuess(state_trajectory[i, j], state_guess[j])\n else:\n for i in range(len(state_trajectory[:, 0])):\n state_guess = ((N - i) / N) * state_initial + (i / N) * state_final\n for j in range(len(state_guess)):\n mp.SetInitialGuess(state_trajectory[i, j], state_guess[j])\n\n # initial guesses for input\n for i in range(N):\n mp.SetInitialGuess(input_trajectory[i, 0], self.m * self.g / 3.5)\n mp.SetInitialGuess(input_trajectory[i, 1], 0.01)\n\n # time constraints\n mp.AddLinearConstraint(t_f[0] <= 2.0 * t_guess)\n mp.AddLinearConstraint(t_f[0] >= 0.5 * t_guess)\n\n # set SNOPT iteration limit\n it_limit = int(max(20000, 40 * mp.num_vars()))\n mp.SetSolverOption(SolverType.kSnopt, 'Iterations limit', it_limit)\n\n if verbose:\n print(\"** solver begin with N = %d **\" % N)\n # solve nonlinear optimization problem (w/SNOPT)\n result = mp.Solve()\n if verbose:\n print result\n\n # convert from symbolic to float\n utraj = mp.GetSolution(input_trajectory)\n t_f = mp.GetSolution(t_f)\n xtraj = mp.GetSolution(state_trajectory)\n ttraj = t_f[0] * np.linspace(0.0, 1.0, (N + 1))\n\n tsolve_post = time.time()\n tsolve = tsolve_post - tsolve_pre\n\n solver_id = mp.GetSolverId()\n\n if verbose:\n print (\"** %s solver finished in %.1f seconds **\\n\" % (solver_id.name(), tsolve))\n print (\"t_f computed: %.3f seconds\" % t_f[0])\n\n cost = -1\n # get total cost of solution\n if result == SolutionResult.kSolutionFound:\n thrust = utraj[:, 0]\n elev = utraj[:, 1]\n vel = xtraj[:, 2]\n allvars = np.hstack((t_f[0], thrust, elev, vel))\n cost = totalcost(allvars)\n if verbose:\n print (\"cost computed: %.3f\" % cost)\n\n return utraj, xtraj, ttraj, result, cost", "def test():\n assert csuccessors((2, 2, 1, 0, 0, 0)) == {(2, 1, 0, 0, 1, 1): 'C->',\n (1, 2, 0, 1, 0, 1): 'M->',\n (0, 2, 0, 2, 0, 1): 'MM->',\n (1, 1, 0, 1, 1, 1): 'MC->',\n (2, 0, 0, 0, 2, 1): 'CC->'}\n assert csuccessors((1, 1, 0, 4, 3, 1)) == {(1, 2, 1, 4, 2, 0): '<-C',\n (2, 1, 1, 3, 3, 0): '<-M',\n (3, 1, 1, 2, 3, 0): '<-MM',\n (1, 3, 1, 4, 1, 0): '<-CC',\n (2, 2, 1, 3, 2, 0): '<-MC'}\n assert csuccessors((1, 0, 0, 4, 1, 1)) == {(1, 1, 1, 4, 0, 0): '<-C',\n (2, 0, 1, 3, 1, 0): '<-M',\n (2, 1, 1, 3, 0, 0): '<-MC',\n (3, 0, 1, 2, 1, 0): '<-MM'}\n assert csuccessors((1, 4, 1, 2, 2, 0)) == {}\n assert mc_problem((1, 0, 1, 0, 0, 0)) == [(1, 0, 1, 0, 0, 0), 'M->', (0, 0, 0, 1, 0, 1)]\n assert mc_problem((1, 1, 1, 0, 0, 0)) == [(1, 1, 1, 0, 0, 0), 'MC->', (0, 0, 0, 1, 1, 1)]\n assert mc_problem((2, 1, 1, 0, 0, 0)) == [(2, 1, 1, 0, 0, 0), 'MC->', (1, 0, 0, 1, 1, 1), '<-C', (1, 1, 1, 1, 0, 0), 'MC->', (0, 0, 0, 2, 1, 1)]\n assert mc_problem((1, 2, 1, 0, 0, 0)) == None\n return 'tests pass'", "def is_goal(state):\n return sum(sum(state, [])) == 1", "def minimax(self, state):\n\n self.init_food_list = state.getFood().asList()\n closed = set()\n\n # first step necessary to have different path depending\n # on the type of ghost. We go to the position the most distant of the\n # ghost . The reaction of the ghost will be different\n # depending of it type (smarty and dumby / greedy)\n if state.getGhostDirection(1) == 'Stop':\n successors = state.generatePacmanSuccessors()\n max_dist = -math.inf\n chosen_action = 'Stop'\n for next_state, action in successors:\n dist = manhattanDistance(\n state.getGhostPosition(1), state.getPacmanPosition())\n if max_dist < dist:\n max_dist = dist\n chosen_action = action\n return [chosen_action]\n\n final_score, final_path = self.minimax_rec(state, 0, 0, closed)\n\n return final_path", "def chooseAction(self, gameState):\n pacmanPosition = gameState.getPacmanPosition()\n legal = [a for a in gameState.getLegalPacmanActions() if a != Directions.STOP]\n #Danish starts\n livingGhosts = gameState.getLivingGhosts()\n livingGhostPositionDistributions = [beliefs for i,beliefs\n in enumerate(self.ghostBeliefs)\n if livingGhosts[i+1]]\n #Danish ends\n \"*** YOUR CODE HERE ***\"\n #Danish starts\n closestGhostDistance = float(\"inf\")\n closestGhostPosition = None\n\n for distribution in livingGhostPositionDistributions:\n probability = 0\n maxProbPos = None\n for d in distribution:\n if distribution[d] > probability:\n probability = distribution[d]\n maxProbPos = d\n currDist = self.distancer.getDistance(pacmanPosition, maxProbPos)\n if currDist < closestGhostDistance:\n closestGhostDistance = currDist\n closestGhostPosition = maxProbPos\n\n \"Now that we have the probabalistic closest ghost position, we need to pick a move\"\n \"that minimizes the gap\"\n minDist = closestGhostDistance\n bestMove = None\n \"this loop takes a random best move (in case multiple happen to close the distance equally\"\n for action in legal:\n succPos = Actions.getSuccessor(pacmanPosition, action)\n succDist = self.distancer.getDistance(succPos, closestGhostPosition)\n if succDist < minDist:\n minDist = succDist\n bestMove = action\n elif succDist == minDist:\n bestMove = random.choice([bestMove, action])\n\n #Danish ends\n return bestMove\n\n util.raiseNotDefined()", "def construct_optimized_traj(self, initial_state, desired_state, objects, walls):\n best_traj = []\n best_traj_cost = self.LARGE_NUMBER\n\n start_time = time.perf_counter()\n end_time = start_time + self.PLAN_TIME_BUDGET\n MAX_TRYS = 10\n cnt = 0\n\n while (time.perf_counter() < end_time and cnt < MAX_TRYS):\n end_time_per_trial = min(end_time, time.perf_counter() + (self.PLAN_TIME_BUDGET/2))\n traj, traj_dist = self.construct_traj(initial_state, desired_state, objects, walls, end_time_per_trial)\n if(traj_dist == self.LARGE_NUMBER):\n cnt+= 0.5\n print(\"NO PATHS FOUND (Generate Optimized Trajectory)\")\n if(traj_dist < best_traj_cost): \n cnt+=1\n best_traj = traj \n best_traj_cost = traj_dist \n # print(\"tries\", cnt)\n\n return best_traj, best_traj_cost", "def solve_missionaries_cannibals_prob():\n\n def h(state: tuple):\n \"\"\"\n h(x) heuristic which simply counts the number of missionaries and canibals on the\n destination left bank.\n \"\"\"\n return 6 - state[0] - state[1]\n\n def mc_expand(state: tuple):\n \"\"\"\n Generates child nodes by evaluating all possible moves, then filtering by\n rules of the puzzle.\n \"\"\"\n\n # Boat state is at index 2 of tuple; 1 indicates boat is on left bank.\n boat = state[2]\n\n # Missionaries on left bank.\n m_left = state[0]\n\n # Missionaries on right bank.\n m_right = 3 - m_left\n\n # Canibals on left bank.\n c_left = state[1]\n\n # Canibals on right bank.\n c_right = 3 - c_left\n\n # Next look at all posibilities of missionaries and canibals traveling by boat\n # across river with boat that has capacity for only 2 people.\n children = list()\n\n # Case 1: boat is on right bank.\n if boat is 0:\n children = [\n (m_left + 2, c_left, 1), # 2M go to left bank.\n (m_left + 1, c_left + 1, 1), # 1M & 1C go to left bank.\n (m_left, c_left + 2, 1), # 2C go to left bank.\n (m_left + 1, c_left, 1), # 1M goes to left bank.\n (m_left, c_left + 1, 1) # 1C goes to left bank.\n ]\n\n # Case 2: boat is on left bank.\n else:\n children = [\n (m_left - 2, c_left, 0), # 2M go to right bank.\n (m_left - 1, c_left - 1, 0), # 1M & 1C go to right bank.\n (m_left, c_left - 2, 0), # 2C go to right bank.\n (m_left - 1, c_left, 0), # 1M goes to right bank.\n (m_left, c_left - 1, 0) # 1C goes to right bank.\n ]\n\n # Rule A: number of M and C must be non-negative and add up to 3.\n def non_negative_rule(state: tuple):\n return (state[0] >= 0 and\n state[1] >= 0 and\n state[0] <= 3 and\n state[1] <= 3)\n\n # Rule B: if a bank has M > 0, M >= C otherwise C eats M.\n def dont_eat_me_rule(state: tuple):\n m_left = state[0]\n m_right = 3 - m_left\n c_left = state[1]\n c_right = 3 - c_left\n return ((m_left >= c_left or m_left is 0) and\n (m_right >= c_right or m_right is 0))\n\n # Filter to feasible child states by applying rule A and B.\n return list(filter(lambda s: non_negative_rule(s)\n and dont_eat_me_rule(s),\n children))\n\n # Initial state of puzzle is 3 missionaries, 3 canibals, and boat on right bank.\n init_state = (0, 0, 0)\n\n # Run A* search algorithm and return profiling metrics to caller.\n return astar.astar_search(init_state=init_state,\n expand_fn=mc_expand,\n heuristic_fn=h)", "def solve_puzzle(initial_state):\n queue = PriorityQueue()\n visited_states = set()\n\n queue.put((0, uuid.uuid4(), StateWithParent(state=initial_state, parent=None)))\n\n while not queue.empty():\n parent_cost, _, current_state_with_parent = queue.get()\n\n current_state = current_state_with_parent.state\n visited_states.add(state_to_tuple(current_state))\n\n actions = get_available_actions(current_state)\n successor_states = map(lambda action: action(current_state), actions)\n\n for state in successor_states:\n if state_to_tuple(state) not in visited_states:\n new_state_with_parent = StateWithParent(state=state,\n parent=current_state_with_parent)\n cost = heuristic_cost(state)\n if cost == 0:\n # If the heuristic cost of the given state equals 0, then\n # the goal state is found and we can return it immediately.\n return new_state_with_parent\n total_cost = cost + parent_cost\n\n queue.put((total_cost, uuid.uuid4(), new_state_with_parent))\n\n return None", "def heuristic(state):\n # First fill the state with 1s, and count the connect fours for player 1.\n # Then fill the state with -1s and count the connect fours for player 2.\n state_1 = np.array(state)\n state_1 = np.where(state_1 == 0, 1, state_1)\n line_sums = ConnectFour._calculate_line_sums(state_1)\n num_wins_1 = np.sum(line_sums == 4)\n\n state_2 = np.array(state)\n state_2 = np.where(state_2 == 0, -1, state_2)\n line_sums = ConnectFour._calculate_line_sums(state_2)\n num_wins_2 = np.sum(line_sums == -4)\n\n return num_wins_1 - num_wins_2", "def solvable(self, domain, initial_state, goal_state):\n last_state = set([])\n reachable_literals = set(initial_state)\n positive_goals = set(goal_state[0])\n actions = domain\n\n positive_effects = set([])\n negative_effects = set([])\n for a in actions:\n positive_effects = positive_effects.union(set(a.add_effects))\n negative_effects = negative_effects.union(set(a.del_effects))\n # First check the obvious stuff\n for p in goal_state[0]:\n if p not in reachable_literals and p not in positive_effects:\n return False\n for p in goal_state[1]:\n if p in reachable_literals and p not in negative_effects:\n return False\n\n while last_state != reachable_literals:\n last_state = reachable_literals.copy()\n if positive_goals.issubset(reachable_literals):\n return True\n for a in actions:\n if a.applicable(reachable_literals):\n reachable_literals = reachable_literals.union(a.add_effects)\n\n return False", "def actions(state, goals, direction_dict):\n actions = []\n for piece in state:\n if piece in goals:\n new_state = state.difference(set([piece]))\n actions.append((new_state, f\"EXIT from {piece}.\"))\n continue\n for move, jump in direction_dict[piece]:\n if move:\n if move not in state:\n new_state = state.difference(\n set([piece])).union(set([move]))\n actions.append(\n (new_state, f\"MOVE from {piece} to {move}.\"))\n elif jump and jump not in state:\n new_state = state.difference(\n set([piece])).union(set([jump]))\n actions.append(\n (new_state, f\"JUMP from {piece} to {jump}.\"))\n elif jump and jump not in state:\n new_state = state.difference(set([piece])).union(set([jump]))\n actions.append((new_state, f\"JUMP from {piece} to {jump}.\"))\n return actions", "def test_set_goals_rt_zero_input(self):\r\n\r\n self.DUT.mission_time = 10.0\r\n self.DUT.reliability_goal = 0.0\r\n self.DUT.goal_measure = 1\r\n\r\n self.assertTrue(self.DUT.calculate_goals())", "def test_goal_is_same_as_given(self):\n goal = 30\n b = cs.Board(ladders=[(1, 4)], chutes=[(9, 2)], goal=goal)\n assert b.goal == goal" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates pandas dataframe with information about libraries from specified platform from Libraries.io.
def get_lib_info(self, libraries, platform, save_to=None): libs_info = pd.DataFrame() projects_path = os.path.join(self._librariesio_path, projects_filename) self._log.info("Looking for libraries info...") for chunk in pd.read_csv(projects_path, chunksize=LibrariesIOFetcher.CHUNKSIZE, index_col=False, dtype=object): for lib_name in libraries: indexes = (chunk["Name"] == lib_name) if platform != "": indexes = indexes & (chunk["Platform"] == platform) if libraries[lib_name] != "": indexes = indexes & ((chunk["Repository URL"] == libraries[lib_name]) | pd.isnull(chunk["Repository URL"])) res = chunk[indexes] if len(res) > 0: self._log.info("%s library entry is found!", lib_name) libs_info = pd.concat([libs_info, res]) if save_to: libs_info.to_csv(save_to, index=False) return libs_info
[ "def createLibraryImportMenu(self):\n from . import Tools\n\n sel_env = Tools.getEnvironment()\n\n file = \"platformio_boards.json\"\n data = self.getTemplateMenu(file_name=file, user_path=True)\n data = json.loads(data)\n\n # check current platform\n try:\n platform = data[sel_env]['platform'].lower()\n except:\n platform = 'all'\n\n library_paths = Paths.getLibraryFolders(platform)\n added_lib = [[_(\"select_library\").upper()]]\n check_list = []\n\n # get preset\n for library_dir in library_paths:\n # add separator\n sub_path = glob.glob(library_dir)\n # search in sub path\n for library in sub_path:\n # Add core libraries\n if '__cores__' in library:\n core_subs = os.path.join(library, '*')\n core_subs = glob.glob(core_subs)\n for core_sub in core_subs:\n core_sub_subs = os.path.join(core_sub, '*')\n core_sub_subs = glob.glob(core_sub_subs)\n for core_lib in core_sub_subs:\n if caption not in check_list:\n caption = os.path.basename(core_lib)\n added_lib.append([caption, library])\n check_list.append(caption)\n\n # the rest of the libraries\n caption = os.path.basename(library)\n\n # get library name from json file\n pio_libs = os.path.join('platformio', 'lib')\n if pio_libs in library:\n # get library json details\n json_file = os.path.join(library, 'library.json')\n if not os.path.exists(json_file):\n json_file = os.path.join(library, 'library.properties')\n\n # when there´s json content, read it\n data = JSONFile(json_file)\n data = data.getData()\n if (data != {}):\n caption = data['name']\n\n if caption not in added_lib and '__cores__' not in caption and caption not in check_list:\n added_lib.append([caption, library])\n check_list.append(caption)\n\n if(len(added_lib) <= 1):\n added_lib = [[_(\"menu_not_libraries\")]]\n\n return added_lib", "def get_for_platform(\n self, target_platform: str = get_system().lower()\n ) -> Dict[str, KindOfData]:", "def list_libraries(self, runtime_uid=None, limit=None):\n runtime_uid = str_type_conv(runtime_uid)\n Runtimes._validate_type(runtime_uid, u'runtime_uid', STR_TYPE, False)\n\n if runtime_uid is None:\n details = self.get_library_details()\n\n resources = details[u'resources']\n values = [(m[u'metadata'][u'guid'], m[u'entity'][u'name'], m[u'entity'][u'version'], m[u'metadata'][u'created_at'],\n m[u'entity'][u'platform']['name'], m[u'entity'][u'platform'][u'versions']) for m in\n resources]\n\n self._list(values, [u'GUID', u'NAME', u'VERSION', u'CREATED', u'PLATFORM NAME', u'PLATFORM VERSIONS'], limit, 50)\n else:\n details = self.get_details(runtime_uid)\n\n if 'custom_libraries' not in details['entity'] or len(details['entity']['custom_libraries']) == 0:\n print('No libraries found for this runtime.')\n return\n\n values = [(m[u'url'].split('/')[-1], m[u'name'], m['version']) for m in details['entity']['custom_libraries']]\n\n values = sorted(sorted(values, key=lambda x: x[2], reverse=True), key=lambda x: x[1])\n\n from tabulate import tabulate\n\n header = [u'GUID', u'NAME', u'VERSION']\n table = tabulate([header] + values)\n\n print(table)", "def show_available_platforms_and_devices():\n platforms = _cl.get_platforms()\n for platform_index, platform in enumerate(platforms):\n print(str(platform_index) + \": \" + platform.get_info(_cl.platform_info.NAME))\n devices = platform.get_devices()\n for device_index, device in enumerate(devices):\n print(\n 4 * \" \"\n + str(device_index)\n + \": \"\n + device.get_info(_cl.device_info.NAME)\n )", "def create_df(data_lst=read_data()):\n titles = [line.strip('\\\"') for line in get_needed_data('titles')]\n years = [line.strip('(') for line in get_needed_data('years')]\n locations = [line.strip('\\t').split('\\t')[0] for line in get_needed_data('locations')]\n df = pd.DataFrame({'title': titles,\n 'air_year': years,\n 'location': locations})\n return df", "def repo_information(self):\n\n data = [[repo.git_dir,\n repo.repo.branches,\n repo.repo.bare,\n repo.repo.remotes,\n repo.repo.description,\n repo.repo.references,\n repo.repo.heads,\n repo.repo.submodules,\n repo.repo.tags,\n repo.repo.active_branch] for repo in self.repos]\n\n df = pd.DataFrame(data, columns=[\n 'local_directory',\n 'branches',\n 'bare',\n 'remotes',\n 'description',\n 'references',\n 'heads',\n 'submodules',\n 'tags',\n 'active_branch'\n ])\n\n return df", "def get_platform_info():\n\n systemType = \"Unknown\"\n osName = \"\"\n processor = \"\"\n osInfo = \"\"\n\n try:\n import platform\n systemType = platform.system()\n if (systemType == \"Windows\" or systemType == \"Microsoft\"):\n systemType = \"Windows\"\n if(python_version < 3.0):\n import _winreg\n handle = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, \"SOFTWARE\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\")\n (osName, type) = _winreg.QueryValueEx(handle, \"ProductName\")\n _winreg.CloseKey(handle)\n handle = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, \"SYSTEM\\\\ControlSet001\\\\Control\\\\Session Manager\\\\Environment\")\n (processor, type) = _winreg.QueryValueEx(handle, \"PROCESSOR_ARCHITECTURE\")\n _winreg.CloseKey(handle)\n else:\n import winreg\n handle = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, \"SOFTWARE\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\")\n (osName, type) = winreg.QueryValueEx(handle, \"ProductName\")\n winreg.CloseKey(handle)\n handle = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, \"SYSTEM\\\\ControlSet001\\\\Control\\\\Session Manager\\\\Environment\")\n (processor, type) = winreg.QueryValueEx(handle, \"PROCESSOR_ARCHITECTURE\")\n winreg.CloseKey(handle)\n osInfo = osName + \" \" + processor\n else:\n import os\n if (systemType == \"Linux\"):\n import re\n pipe = \"\"\n if os.path.isfile(\"/etc/SuSE-release\"):\n pipe = os.popen('head -n 1 /etc/SuSE-release')\n else:\n pipe = os.popen(\"head -n 1 /etc/issue\")\n osName = pipe.readline()\n pipe.close()\n osName = osName.rstrip()\n m = re.search(\"(.*?) \\(.*?\\)\", osName)\n if m:\n osName = m.groups()[0]\n pipe = os.popen('uname -p')\n processor = pipe.readline()\n pipe.close()\n processor = processor.rstrip()\n osInfo = osName + \" \" + processor\n elif (systemType == 'SunOS'):\n pipe = os.popen('uname -srp')\n unameInfo = pipe.readline()\n pipe.close()\n unameInfo = unameInfo.rstrip()\n pipe = os.popen('isainfo -b')\n isaInfo = pipe.readline()\n pipe.close()\n isaInfo = isaInfo.rstrip()\n isaInfo += \"-bit\"\n osInfo = unameInfo + \" \" + isaInfo\n elif (systemType == 'HP-UX'):\n pipe = os.popen('uname -srm')\n osInfo = pipe.readline()\n pipe.close()\n osInfo = osInfo.rstrip()\n elif (systemType == 'FreeBSD'):\n pipe = os.popen('uname -srm')\n osInfo = pipe.readline()\n pipe.close()\n osInfo = osInfo.rstrip()\n else:\n osInfo = systemType\n except:\n osInfo = systemType\n return osInfo", "def test_get_platforms_usage(self):\n pass", "def SupportedPlatforms(self):\n return self.platform_infos.keys()", "def _convert_library(self, design):\n\n for _cc in design.components.components:\n _libid = 'default'\n _compname = _cc\n _tech = []\n _attrs = []\n if -1 != _cc.find(':'):\n _libid, _compname = _cc.split(':')\n\n _lib = None\n _libnid = -1\n for _li, _ll in enumerate(self.libraries):\n if _libid == _ll.name:\n _lib = _ll\n _libnid = 1 + _li # numbered from 1\n break\n else:\n _lib = Eagle.Library(name=_libid)\n _libnid = len(self.libraries) # numbered from 1\n self.libraries.append(_lib)\n\n# checking if symbols / devsets / packages are in the library already\n# (adding them if not)\n _co = design.components.components[_cc]\n\n if 0 == len(_lib.devsets):\n _lib.devsets.append(Eagle.DeviceSetHeader(name='default'))\n\n for _di, _dd in enumerate(_lib.devsets[0].shapesets):\n if _compname == _dd.name:\n _dset = _dd\n break\n else:\n _prefix = 'xC'\n _desc = 'n/a'\n if 'prefix' in _co.attributes:\n _prefix = _co.attributes['prefix']\n if 'description' in _co.attributes:\n _desc = _co.attributes['description']\n _dset = Eagle.DeviceSet(name=_compname, prefix=_prefix, \n description=_desc, uservalue=False)\n\n _lib.devsets[0].shapesets.append(_dset)\n\n if 0 == len(_lib.symbols):\n _lib.symbols.append(Eagle.SymbolHeader(name='default'))\n\n for _si, _ss in enumerate(_lib.symbols[0].shapesets):\n if _compname == _ss.name:\n _symbol = _ss\n _symnid = 1 + _si # numbered from 1\n break\n else: # no such symbol yet\n _symbol = Eagle.Symbol(libid=_libnid, name=_compname)\n _symnid = len(_lib.symbols[0].shapesets) # numbered from 1\n\n for _css in _co.symbols:\n for _cbb in _css.bodies:\n\n for _ci in design.component_instances:\n if _cc != _ci.library_id:\n continue\n for _xaa in _ci.attributes:\n if 'technology' == _xaa:\n _tech.append(_ci.attributes[_xaa])\n elif _xaa in ('prefix', 'description'):\n pass\n else:\n _attrs.append((_xaa, _ci.attributes[_xaa]))\n for _sa in _ci.symbol_attributes:\n for _an, _aa in enumerate(_sa.annotations):\n _val = 'n/a'\n if 0 == _an:\n _val = '>NAME'\n elif 1 == _an:\n _val = '>VALUE'\n\n _rot = self.Shape.rotate2strings(_aa.rotation)\n\n _symbol.shapes.append(Eagle.Text(\n value=_val,\n x=_aa.x - _sa.x,\n y=_aa.y - _sa.y,\n size=1.778, layer=95, \n rotate=_rot, font=None,\n ratio=10))\n\n for _cpp in _cbb.pins:\n\n _name = None\n if None != _cpp.label:\n _name = _cpp.label.text\n\n _visible = None\n if 'visible' in _cpp.attributes:\n _visible = _cpp.attributes['visible']\n\n _dir = None\n if 'direction' in _cpp.attributes:\n _dir = _cpp.attributes['direction']\n\n _rot = None\n\n _len = 'short'\n if 'length' in _cpp.attributes:\n _len = _cpp.attributes['length']\n \n _func = None\n if 'function' in _cpp.attributes:\n _func = _cpp.attributes['function']\n \n _swap = 0\n if 'swaplevel' in _cpp.attributes:\n _swap = _cpp.attributes['swaplevel']\n \n _symbol.shapes.append(Eagle.Pin(name=_name,\n x=_cpp.p2.x, y=_cpp.p2.y, visible=_visible,\n direction=_dir, rotate=_rot, length=_len,\n function=_func, swaplevel=_swap))\n for _cff in _cbb.shapes:\n\n _layer = 94\n if 'label' in _cff.attributes:\n _layer = _cff.attributes['layer']\n\n if isinstance(_cff, Line):\n _style = 'Continuous'\n if 'style' in _cff.attributes:\n _style = _cff.attributes['style']\n\n _width = 0.254\n if 'width' in _cff.attributes:\n _width = _cff.attributes['width']\n\n _symbol.shapes.append(Eagle.Wire(\n x1=_cff.p1.x, y1=_cff.p1.y,\n x2=_cff.p2.x, y2=_cff.p2.y,\n style=_style, layer=_layer, width=_width))\n elif isinstance(_cff, Rectangle):\n _symbol.shapes.append(Eagle.Rectangle(\n x1=_cff.x, y1=_cff.y,\n x2=(_cff.x + _cff.width), \n y2=(_cff.y - _cff.height),\n rotate=None, layer=_layer))\n elif isinstance(_cff, Arc):\n _style = 'Continuous'\n if 'style' in _cff.attributes:\n _style = _cff.attributes['style']\n\n _width = 0.254\n if 'width' in _cff.attributes:\n _width = _cff.attributes['width']\n\n _layer = 91 # usually Nets\n\n _dir = ('counterclockwise' \n if _cff.start_angle < _cff.end_angle\n else 'clockwise')\n _symbol.shapes.append(Eagle.Arc( # _cff's angles're in radians\n x1=_cff.x + _cff.radius * math.cos(_cff.start_angle), # sign is ok\n y1=_cff.y + _cff.radius * math.sin(_cff.start_angle),\n x2=_cff.x + _cff.radius * math.cos(_cff.end_angle),\n y2=_cff.y + _cff.radius * math.sin(_cff.end_angle),\n style=_style, \n layer=_layer, width=_width,\n curve=math.degrees(abs(_cff.start_angle - _cff.end_angle)),\n cap=None, \n direction=_dir))\n elif isinstance(_cff, BezierCurve):\n# raise NotImplementedError(\"BezierCurve isn't implemented for Eagle yet\")\n# TODO curve approximation with arcs\n _style = 'Continuous'\n if 'style' in _cff.attributes:\n _style = _cff.attributes['style']\n\n _width = 0.254\n if 'width' in _cff.attributes:\n _width = _cff.attributes['width']\n\n _symbol.shapes.append(Eagle.Wire(\n x1=_cff.p1.x, y1=_cff.p1.y,\n x2=_cff.p2.x, y2=_cff.p2.y,\n style=_style, layer=_layer, width=_width))\n elif isinstance(_cff, Circle):\n _width = 0.254\n if 'width' in _cff.attributes:\n _width = _cff.attributes['width']\n\n _symbol.shapes.append(Eagle.Circle(\n x=_cff.x, y=_cff.y,\n radius=_cff.radius, \n width=_width, layer=_layer))\n elif isinstance(_cff, Polygon):\n _width = 0.254\n if 'width' in _cff.attributes:\n _width = _cff.attributes['width']\n\n _style = 'Continuous'\n if 'style' in _cff.attributes:\n _style = _cff.attributes['style']\n\n _symbol.shapes.append(Eagle.Polygon(\n width=_width, layer=_layer,\n numofshapes=len(_cff.points),\n shapes=[ # lines from points\n Eagle.Wire(\n x1=p1.x, y1=p1.y,\n x2=p2.x, y2=p2.y,\n style=_style, layer=_layer, \n width=_width)\n for p1, p2 in zip(_cff.points, \n _cff.points[1:]+[_cff.points[0],])\n ]))\n elif isinstance(_cff, Label):\n _layer = 95 # usually Names\n if 'label' in _cff.attributes:\n _layer = _cff.attributes['layer']\n\n _rot = self.Shape.rotate2strings(_cff.rotation)\n\n _symbol.shapes.append(Eagle.Text(\n value=_cff.text,\n x=_cff.x, y=_cff.y,\n size=1.778, font=None, ratio=10,\n rotate=_rot, layer=_layer))\n else:\n raise ValueError(\"cannot process \" + _cff.__class__.__name__)\n\n _lib.symbols[0].shapesets.append(_symbol)\n\n _dset.shapes.append(Eagle.Gate(name='G$1', x=0., y=0., \n sindex=_symnid, addlevel=False))\n _dset.connblocks.append(Eagle.ConnectionHeader(name='default', \n attributes=_attrs, technologies=_tech,\n sindex=_symnid))\n \n if 0 == len(_lib.packages):\n _lib.packages.append(Eagle.PackageHeader(name='default'))\n # TODO to load from a library file\n return", "def get_host_plat_info(self):\n ret_plat_dict = {}\n handler = self.get_handler()\n try:\n host_ref = handler.xenapi.host.get_all()[0]\n bios_info = handler.xenapi.host.get_bios_strings(host_ref)\n ret_plat_dict['vendor_name'] = bios_info.get('system-manufacturer', \"\")\n ret_plat_dict['product_name'] = bios_info.get('system-product-name', \"\")\n ret_plat_dict['serial_number'] = bios_info.get('system-serial-number', \"\")\n except Exception as error:\n log.error(\"Exception when get host platform infor:%s\", error)\n\n return ret_plat_dict", "def df_from_files(self):\n print('Creating dataframe...')\n num = len([name for name in os.listdir(self.raw) if not name[0] == '.'])\n files = os.path.join(self.raw, '~.info.json') # This is a weird hack\n files = files.replace('~', '{:05d}') # It allows path joining to work on Windows\n data = [json.load(open(files.format(i))) for i in range(1, num + 1)]\n\n columns = ['formats', 'tags', 'categories', 'thumbnails']\n lists = [[], [], [], []]\n deletes = {k: v for k, v in zip(columns, lists)}\n for dt in data:\n for col, ls in deletes.items():\n ls.append(dt[col])\n del dt[col]\n\n self.df = pd.DataFrame(data)\n self.df['upload_date'] = pd.to_datetime(self.df['upload_date'], format='%Y%m%d')\n self.df.to_csv(os.path.join(self.ran, 'df.csv'))\n\n self.tags = deletes['tags']\n pickle.dump(self.tags, open(os.path.join(self.ran, 'tags.txt'), 'wb'))", "def test_package_list_with_platform(self):\n rid1 = self._create_release(platforms=['platformOne'])\n self._create_package(rid1, name='packageOne')\n\n rid2 = self._create_release(platforms=['platformTwo'])\n self._create_package(rid2, name='packageTwo')\n\n result = orlo.queries.package_list(platform='platformOne').all()\n self.assertEqual(len(result), 1)\n packages = [r[0] for r in result]\n self.assertIn('packageOne', packages)\n self.assertNotIn('packageTwo', packages)", "def get_platform_list(self):\n platform_list = self.dal.get_platforms()\n return make_response(True, platform_list)", "def generate_databag():\n print(\"Pulling from OS@CC...\")\n github_client = Github(GITHUB_TOKEN)\n cc = github_client.get_organization(GITHUB_ORGANIZATION)\n repos = list(cc.get_repos())\n if not repos:\n raise ScriptError(\n \"Unable to setup the Github Client to get the requested\"\n \" Github organization and the repos of that organization\"\n )\n repos.sort(key=lambda repo: repo.name)\n data = []\n for repo in repos:\n data.append({\"name\": repo.name, \"languages\": repo.get_languages()})\n return data", "def metadata(datasets):\n def iter_table_config(cldf):\n for table in cldf.tables:\n try:\n name = cldf.get_tabletype(table)\n except (KeyError, ValueError):\n name = None\n name = name or str(table.url)\n cfg = {}\n try:\n _ = cldf[table, 'name']\n cfg['label_column'] = 'cldf_name'\n except KeyError:\n pass\n if name == 'EntryTable':\n cfg['label_column'] = 'cldf_headword'\n if name == 'SenseTable':\n cfg['label_column'] = 'cldf_description'\n if name == 'ExampleTable':\n cfg['label_column'] = 'cldf_primaryText'\n yield name, cfg\n\n return {\n \"title\": \"\",\n \"description_html\": \"<dl>{0}</dl>\".format(''.join([\n '<dt><strong>{0}</strong></dt><dd><em>{1}</em></dd><dd>{2}</dd>'.format(\n dbname, cldf_ds.module, cldf_ds.properties.get('dc:title'))\n for dbname, cldf_ds in datasets.items()\n ])),\n \"plugins\": {\n \"datasette-cluster-map\": {\n \"latitude_column\": \"cldf_latitude\",\n \"longitude_column\": \"cldf_longitude\"\n }\n },\n \"databases\": {\n dbname: {\n \"description\": cldf_ds.properties.get('dc:title'),\n \"source\": cldf_ds.properties.get('dc:bibliographicCitation'),\n \"source_url\": cldf_ds.properties.get('dc:identifier'),\n \"license\": cldf_ds.properties.get('dc:license'),\n \"tables\": dict(iter_table_config(cldf_ds)),\n }\n for dbname, cldf_ds in datasets.items()\n },\n }", "def as_data_frame(self, **kwargs):\n try:\n import pandas as pd\n except ImportError:\n raise ImportError(\"What are you doing trying to export a Layout \"\n \"as a pandas DataFrame when you don't have \"\n \"pandas installed? Eh? Eh?\")\n if kwargs:\n files = self.get(return_type='file', **kwargs)\n else:\n files = self.files.values()\n data = pd.DataFrame.from_records([f.entities for f in files])\n data.insert(0, 'path', [f.path for f in files])\n return data", "def create_library_sensors():\n for library in plexserver.library.sections():\n sensors.append(PlexLibrarySectionSensor(hass, plexserver, library))", "def list_platforms_ext(self):\n result = {}\n mbeds = self.list_mbeds()\n for i, val in enumerate(mbeds):\n platform_name = str(val['platform_name'])\n if platform_name not in result:\n result[platform_name] = 1\n else:\n result[platform_name] += 1\n return result" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates pandas dataframe with all information about dependent repositories from libraries.
def get_dependent_reps(self, libs_info, save_to=None): self._log.info("Creating list of dependent repos...") if hasattr(libs_info["ID"], "tolist"): lib_id2name = dict(zip(libs_info["ID"].tolist(), libs_info["Name"].tolist())) else: lib_id2name = {libs_info["ID"]: libs_info["Name"]} pd_result = [] dependencies_path = os.path.join(self._librariesio_path, dependencies_filename) for chunk in progress_bar(pd.read_csv(dependencies_path, chunksize=LibrariesIOFetcher.CHUNKSIZE, index_col=False), self._log, expected_size=100): for lib_id in lib_id2name: res = chunk[chunk["Dependency Project ID"] == int(lib_id)] if len(res) > 0: pd_result.append(res) pd_result = pd.concat(pd_result) pd_result["url"] = "https://" + \ pd_result["Host Type"].map(LibrariesIOFetcher.HOST2LINK) + \ pd_result["Repository Name with Owner"] if save_to: pd_result.to_csv(save_to, index=False) return pd_result
[ "def repo_information(self):\n\n data = [[repo.git_dir,\n repo.repo.branches,\n repo.repo.bare,\n repo.repo.remotes,\n repo.repo.description,\n repo.repo.references,\n repo.repo.heads,\n repo.repo.submodules,\n repo.repo.tags,\n repo.repo.active_branch] for repo in self.repos]\n\n df = pd.DataFrame(data, columns=[\n 'local_directory',\n 'branches',\n 'bare',\n 'remotes',\n 'description',\n 'references',\n 'heads',\n 'submodules',\n 'tags',\n 'active_branch'\n ])\n\n return df", "def repo_name(self):\n\n ds = [[x.repo_name] for x in self.repos]\n df = pd.DataFrame(ds, columns=['repository'])\n return df", "def get_lib_info(self, libraries, platform, save_to=None):\n\n libs_info = pd.DataFrame()\n projects_path = os.path.join(self._librariesio_path, projects_filename)\n self._log.info(\"Looking for libraries info...\")\n for chunk in pd.read_csv(projects_path, chunksize=LibrariesIOFetcher.CHUNKSIZE,\n index_col=False, dtype=object):\n for lib_name in libraries:\n indexes = (chunk[\"Name\"] == lib_name)\n if platform != \"\":\n indexes = indexes & (chunk[\"Platform\"] == platform)\n if libraries[lib_name] != \"\":\n indexes = indexes & ((chunk[\"Repository URL\"] == libraries[lib_name]) |\n pd.isnull(chunk[\"Repository URL\"]))\n res = chunk[indexes]\n if len(res) > 0:\n self._log.info(\"%s library entry is found!\", lib_name)\n libs_info = pd.concat([libs_info, res])\n if save_to:\n libs_info.to_csv(save_to, index=False)\n\n return libs_info", "def repos_to_columns(repos):\n data = [[\"NAME\", \"URL\", \"HOMEPAGE\", \"DESCRIPTION\", \"CREATED_AT\"]]\n for repo in repos:\n data.append(\n [\n repo[\"full_name\"],\n repo[\"html_url\"],\n repo[\"homepage\"],\n repo[\"description\"] or \"\",\n repo[\"created_at\"],\n ]\n )\n return data", "def generate_databag():\n print(\"Pulling from OS@CC...\")\n github_client = Github(GITHUB_TOKEN)\n cc = github_client.get_organization(GITHUB_ORGANIZATION)\n repos = list(cc.get_repos())\n if not repos:\n raise ScriptError(\n \"Unable to setup the Github Client to get the requested\"\n \" Github organization and the repos of that organization\"\n )\n repos.sort(key=lambda repo: repo.name)\n data = []\n for repo in repos:\n data.append({\"name\": repo.name, \"languages\": repo.get_languages()})\n return data", "def coverage(self):\n\n df = pd.DataFrame(columns=['filename', 'lines_covered', 'total_lines', 'coverage', 'repository'])\n\n for repo in self.repos:\n try:\n cov = repo.coverage()\n cov['repository'] = repo.repo_name\n df = df.append(cov)\n except GitCommandError:\n print('Warning! Repo: %s seems to not have coverage' % (repo, ))\n\n df.reset_index()\n\n return df", "def is_bare(self):\n\n ds = [[x.repo_name, x.is_bare()] for x in self.repos]\n df = pd.DataFrame(ds, columns=['repository', 'is_bare'])\n return df", "def tabular_output(repos):\n table_headers = [\"URL\", \"Language\", \"Stars\", \"Forks\", \"Watches\"]\n repositories = [\n [\n repo[\"html_url\"],\n repo[\"language\"],\n repo[\"stargazers_count\"],\n repo[\"forks_count\"],\n repo[\"watchers_count\"],\n ]\n for repo in repos\n ]\n print(tabulate(repositories, headers=table_headers, tablefmt=\"fancy_grid\"))", "def df_from_files(self):\n print('Creating dataframe...')\n num = len([name for name in os.listdir(self.raw) if not name[0] == '.'])\n files = os.path.join(self.raw, '~.info.json') # This is a weird hack\n files = files.replace('~', '{:05d}') # It allows path joining to work on Windows\n data = [json.load(open(files.format(i))) for i in range(1, num + 1)]\n\n columns = ['formats', 'tags', 'categories', 'thumbnails']\n lists = [[], [], [], []]\n deletes = {k: v for k, v in zip(columns, lists)}\n for dt in data:\n for col, ls in deletes.items():\n ls.append(dt[col])\n del dt[col]\n\n self.df = pd.DataFrame(data)\n self.df['upload_date'] = pd.to_datetime(self.df['upload_date'], format='%Y%m%d')\n self.df.to_csv(os.path.join(self.ran, 'df.csv'))\n\n self.tags = deletes['tags']\n pickle.dump(self.tags, open(os.path.join(self.ran, 'tags.txt'), 'wb'))", "def package_report(root_packages: List[str]):\n root_packages.sort(reverse=True)\n root_packages_list = []\n for m in pkg_resources.working_set:\n if m.project_name.lower() in root_packages:\n root_packages_list.append([m.project_name, m.version])\n \n display(pd.DataFrame(\n root_packages_list,\n columns=[\"package\", \"version\"]\n ).set_index(\"package\").transpose())", "def get_futures_info():\r\n df = pd.read_csv('https://raw.githubusercontent.com/haobruce/SysTrade/master/SysTrade_FuturesContracts.csv')\r\n return df", "def get_installed_and_missing_repository_dependencies(self, repository):\n missing_repository_dependencies = {}\n installed_repository_dependencies = {}\n has_repository_dependencies = repository.has_repository_dependencies\n if has_repository_dependencies:\n # The repository dependencies container will include only the immediate repository\n # dependencies of this repository, so the container will be only a single level in depth.\n metadata = repository.metadata\n installed_rd_tups = []\n missing_rd_tups = []\n for tsr in repository.repository_dependencies:\n prior_installation_required = self.set_prior_installation_required(repository, tsr)\n only_if_compiling_contained_td = self.set_only_if_compiling_contained_td(repository, tsr)\n rd_tup = [tsr.tool_shed,\n tsr.name,\n tsr.owner,\n tsr.changeset_revision,\n prior_installation_required,\n only_if_compiling_contained_td,\n tsr.id,\n tsr.status]\n if tsr.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED:\n installed_rd_tups.append(rd_tup)\n else:\n # We'll only add the rd_tup to the missing_rd_tups list if the received repository\n # has tool dependencies that are not correctly installed. This may prove to be a\n # weak check since the repository in question may not have anything to do with\n # compiling the missing tool dependencies. If we discover that this is a problem,\n # more granular checking will be necessary here.\n if repository.missing_tool_dependencies:\n if not self.repository_dependency_needed_only_for_compiling_tool_dependency(repository, tsr):\n missing_rd_tups.append(rd_tup)\n else:\n missing_rd_tups.append(rd_tup)\n if installed_rd_tups or missing_rd_tups:\n # Get the description from the metadata in case it has a value.\n repository_dependencies = metadata.get('repository_dependencies', {})\n description = repository_dependencies.get('description', None)\n # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the\n # missing_repository_dependencies dictionaries for proper display parsing.\n root_key = container_util.generate_repository_dependencies_key_for_repository(repository.tool_shed,\n repository.name,\n repository.owner,\n repository.installed_changeset_revision,\n prior_installation_required,\n only_if_compiling_contained_td)\n if installed_rd_tups:\n installed_repository_dependencies['root_key'] = root_key\n installed_repository_dependencies[root_key] = installed_rd_tups\n installed_repository_dependencies['description'] = description\n if missing_rd_tups:\n missing_repository_dependencies['root_key'] = root_key\n missing_repository_dependencies[root_key] = missing_rd_tups\n missing_repository_dependencies['description'] = description\n return installed_repository_dependencies, missing_repository_dependencies", "def file_detail(self, rev='HEAD', committer=True, ignore_globs=None, include_globs=None):\n\n df = None\n\n for repo in self.repos:\n try:\n if df is None:\n df = repo.file_detail(ignore_globs=ignore_globs, include_globs=include_globs, committer=committer, rev=rev)\n df['repository'] = repo.repo_name\n else:\n chunk = repo.file_detail(ignore_globs=ignore_globs, include_globs=include_globs, committer=committer, rev=rev)\n chunk['repository'] = repo.repo_name\n df = df.append(chunk)\n except GitCommandError:\n print('Warning! Repo: %s couldnt be inspected' % (repo, ))\n\n df = df.reset_index(level=-1)\n df = df.set_index(['file', 'repository'])\n return df", "def divide_packages(packages):\n stds = []\n repos = []\n for p in packages:\n if (not 'repository' in p):\n stds = stds + [p]\n elif (p['repository']==None or p['repository']==''):\n stds = stds + [p]\n else:\n repos = repos +[p]\n return stds, repos", "def kodi_repos(repos):\n \n # Get list of repository objects and wrap in RepoDetail class\n details = OrderedDict([\n (repo.name, RepoDetail(repo)) for repo in sorted(repos, key=lambda r:r.name)\n ])\n \n for repo_det in details.values():\n # Get latest version\n tags = repo_tags(repo_det.repo)\n repo_det.tags = tags\n repo_det.tagnames = {vers:tag.name for vers,tag in tags.items()}\n \n releases = repo_releases(repo_det.repo, tags)\n repo_det.releases = releases\n\n downloads = repo_downloads(repo_det.repo, releases, tags)\n repo_det.downloads = downloads\n\n version, newest_tag = newest_repo_version(tags)\n repo_det.newest_version = version\n repo_det.newest_tagname = newest_tag.name\n\n # Grab a copy of addon.xml from the latest version\n addon_xml_handle = repo_det.repo.contents('addon.xml',repo_det.newest_tagname)\n if addon_xml_handle.encoding == 'base64':\n addon_xml = base64.b64decode(addon_xml_handle.content)\n else:\n addon_xml = addon_xml_handle.content\n _log.warning('Unexpected encoding (%s) on file: %s' % (addon_xml_handle.encoding, addon_xml_handle.name))\n repo_det.addon_xml = addon_xml\n\n return details", "def GetupwardRepoList(self):\n # Skip population if the repo is already found with dependencies.\n if self.keyword in self._all_dependent_repo_names:\n pprint('Repo detail for %s was already included.' % self.keyword)\n query_output, _ = util.GitURLOpener(self._repo_url)\n pprint('Getting keyword repo detail through %d found repos ...' % (\n min(query_output['total_count'], 100)))\n # pylint: disable=undefined-loop-variable\n for item in query_output['items']:\n item_name = item['name']\n if item_name == self.keyword:\n pprint('Repo detail found for %s.' % item_name)\n break\n # Break function if no repo detail is found.\n if not query_output['total_count'] or item_name != self.keyword:\n pprint('%s is not found with repo detail.' % self.keyword)\n return\n\n all_unique_dependencies = self.GetDependency(item['full_name'])\n # If the matched dependency file is already created earlier in other\n # depth, do not keep further nodes to prevent future search.\n for repo_name in all_unique_dependencies:\n if repo_name in self._all_dependent_repo_names:\n all_unique_dependencies[repo_name] = ''\n item['all_dependencies'] = all_unique_dependencies\n self._tree_dict[self.keyword] = item\n self._all_dependent_repo_names.add(item_name)\n\n util.PickleTree(self._tree_dict, self._raw_data_file_name)\n util.PickleTree(self._all_dependent_repo_names, self._all_repos_file_name)\n pprint('%d dependencies are found for %s.' % (\n len(all_unique_dependencies), self.keyword))", "def get_dependencies_for_repository(self, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False):\n rdim = repository_dependency_manager.RepositoryDependencyInstallManager(self.app)\n repository = None\n installed_rd = {}\n installed_td = {}\n missing_rd = {}\n missing_td = {}\n name = next(iter(repo_info_dict))\n repo_info_tuple = repo_info_dict[name]\n description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \\\n repository_util.get_repo_info_tuple_contents(repo_info_tuple)\n if tool_dependencies:\n if not includes_tool_dependencies:\n includes_tool_dependencies = True\n # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.\n # We don't add to installed_td and missing_td here because at this point they are empty.\n installed_td, missing_td = self.get_installed_and_missing_tool_dependencies_for_repository(tool_dependencies)\n # In cases where a repository dependency is required only for compiling a dependent repository's\n # tool dependency, the value of repository_dependencies will be an empty dictionary here.\n if repository_dependencies:\n # We have a repository with one or more defined repository dependencies.\n if not repository:\n repository = repository_util.get_repository_for_dependency_relationship(self.app,\n tool_shed_url,\n name,\n repository_owner,\n changeset_revision)\n if not updating and repository and repository.metadata:\n installed_rd, missing_rd = self.get_installed_and_missing_repository_dependencies(repository)\n else:\n installed_rd, missing_rd = \\\n self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install(repo_info_tuple)\n # Discover all repository dependencies and retrieve information for installing them.\n all_repo_info_dict = rdim.get_required_repo_info_dicts(tool_shed_url, util.listify(repo_info_dict))\n has_repository_dependencies = all_repo_info_dict.get('has_repository_dependencies', False)\n has_repository_dependencies_only_if_compiling_contained_td = \\\n all_repo_info_dict.get('has_repository_dependencies_only_if_compiling_contained_td', False)\n includes_tools_for_display_in_tool_panel = all_repo_info_dict.get('includes_tools_for_display_in_tool_panel', False)\n includes_tool_dependencies = all_repo_info_dict.get('includes_tool_dependencies', False)\n includes_tools = all_repo_info_dict.get('includes_tools', False)\n required_repo_info_dicts = all_repo_info_dict.get('all_repo_info_dicts', [])\n # Display tool dependencies defined for each of the repository dependencies.\n if required_repo_info_dicts:\n required_tool_dependencies = {}\n for rid in required_repo_info_dicts:\n for name, repo_info_tuple in rid.items():\n description, repository_clone_url, changeset_revision, ctx_rev, \\\n repository_owner, rid_repository_dependencies, rid_tool_dependencies = \\\n repository_util.get_repo_info_tuple_contents(repo_info_tuple)\n if rid_tool_dependencies:\n for td_key, td_dict in rid_tool_dependencies.items():\n if td_key not in required_tool_dependencies:\n required_tool_dependencies[td_key] = td_dict\n if required_tool_dependencies:\n # Discover and categorize all tool dependencies defined for this repository's repository dependencies.\n required_installed_td, required_missing_td = \\\n self.get_installed_and_missing_tool_dependencies_for_repository(required_tool_dependencies)\n if required_installed_td:\n if not includes_tool_dependencies:\n includes_tool_dependencies = True\n for td_key, td_dict in required_installed_td.items():\n if td_key not in installed_td:\n installed_td[td_key] = td_dict\n if required_missing_td:\n if not includes_tool_dependencies:\n includes_tool_dependencies = True\n for td_key, td_dict in required_missing_td.items():\n if td_key not in missing_td:\n missing_td[td_key] = td_dict\n else:\n # We have a single repository with (possibly) no defined repository dependencies.\n all_repo_info_dict = rdim.get_required_repo_info_dicts(tool_shed_url, util.listify(repo_info_dict))\n has_repository_dependencies = all_repo_info_dict.get('has_repository_dependencies', False)\n has_repository_dependencies_only_if_compiling_contained_td = \\\n all_repo_info_dict.get('has_repository_dependencies_only_if_compiling_contained_td', False)\n includes_tools_for_display_in_tool_panel = all_repo_info_dict.get('includes_tools_for_display_in_tool_panel', False)\n includes_tool_dependencies = all_repo_info_dict.get('includes_tool_dependencies', False)\n includes_tools = all_repo_info_dict.get('includes_tools', False)\n required_repo_info_dicts = all_repo_info_dict.get('all_repo_info_dicts', [])\n dependencies_for_repository_dict = \\\n dict(changeset_revision=changeset_revision,\n has_repository_dependencies=has_repository_dependencies,\n has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,\n includes_tool_dependencies=includes_tool_dependencies,\n includes_tools=includes_tools,\n includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,\n installed_repository_dependencies=installed_rd,\n installed_tool_dependencies=installed_td,\n missing_repository_dependencies=missing_rd,\n missing_tool_dependencies=missing_td,\n name=name,\n repository_owner=repository_owner)\n return dependencies_for_repository_dict", "def create_commits_dataframe_functions(self):\n\n columns = []\n\n pbar = tqdm.tqdm(total=self.total_commits)\n for commit in self.repository_mining.traverse_commits():\n\n columns.append(commit.hash)\n\n pbar.update(1)\n pbar.close()\n\n\n dataframe_list = []\n index = []\n\n\n cwd = os.getcwd()\n os.chdir(self.repo_folder)\n\n with open('./gitattributes', 'a') as f:\n f.write('*.py diff=python\\n')\n\n print(os.listdir('./'))\n \n\n # Print analyzing all the lines of the repo\n print('Print analyzing all the lines of the repo')\n file_methods = []\n \n\n for file_path in tqdm.tqdm(self.repo_files_path):\n\n if file_path[-3:] == '.py':\n\n print(file_path)\n # Get path to file and count number of lines\n complete_file_path = self.repo_folder + '\\\\' + file_path\n methods = self.find_methods_in_python_file(complete_file_path)\n\n for method in methods:\n file_methods.append((file_path, method))\n\n with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor:\n future_to_method = {executor.submit(self.analyze_method, file_method): file_method for file_method in file_methods}\n\n pbar = tqdm.tqdm(total=len(file_methods))\n for future in concurrent.futures.as_completed(future_to_method):\n file_method = future_to_method[future]\n try:\n \n modified_in_commits = future.result()\n modified_in_commits = [commit[1:-1] for commit in modified_in_commits]\n row_name = f'{file_method[0]}:{file_method[1]}'\n if row_name not in index:\n index.append(f'{file_method[0]}:{file_method[1]}')\n file_method_commits = []\n for commit in columns:\n if commit in modified_in_commits:\n file_method_commits.append(1)\n else:\n file_method_commits.append(0)\n dataframe_list.append(file_method_commits)\n except Exception as exc:\n print(f'Error during execution : {exc}')\n pbar.update(1)\n pbar.close()\n\n\n os.chdir(cwd)\n\n return pd.DataFrame(dataframe_list, index=index, columns=columns)", "def get_installed_and_missing_repository_dependencies_for_new_or_updated_install(self, repo_info_tuple):\n missing_repository_dependencies = {}\n installed_repository_dependencies = {}\n missing_rd_tups = []\n installed_rd_tups = []\n (description, repository_clone_url, changeset_revision, ctx_rev,\n repository_owner, repository_dependencies, tool_dependencies) = repository_util.get_repo_info_tuple_contents(repo_info_tuple)\n if repository_dependencies:\n description = repository_dependencies['description']\n root_key = repository_dependencies['root_key']\n # The repository dependencies container will include only the immediate repository dependencies of\n # this repository, so the container will be only a single level in depth.\n for key, rd_tups in repository_dependencies.items():\n if key in ['description', 'root_key']:\n continue\n for rd_tup in rd_tups:\n tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \\\n common_util.parse_repository_dependency_tuple(rd_tup)\n # Updates to installed repository revisions may have occurred, so make sure to locate the\n # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple\n # that includes the correct repository owner which we get from the current rd_tup. The current\n # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner,\n # repository_dependencies, installed_td )\n tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup(self.app, rd_tup)\n tmp_repo_info_tuple = (None, tmp_clone_url, changeset_revision, None, owner, None, None)\n repository, installed_changeset_revision = repository_util.repository_was_previously_installed(self.app,\n tool_shed,\n name,\n tmp_repo_info_tuple,\n from_tip=False)\n if repository:\n new_rd_tup = [tool_shed,\n name,\n owner,\n changeset_revision,\n prior_installation_required,\n only_if_compiling_contained_td,\n repository.id,\n repository.status]\n if repository.status == self.install_model.ToolShedRepository.installation_status.INSTALLED:\n if new_rd_tup not in installed_rd_tups:\n installed_rd_tups.append(new_rd_tup)\n else:\n # A repository dependency that is not installed will not be considered missing if its value\n # for only_if_compiling_contained_td is True This is because this type of repository dependency\n # will only be considered at the time that the specified tool dependency is being installed, and\n # even then only if the compiled binary of the tool dependency could not be installed due to the\n # unsupported installation environment.\n if not util.asbool(only_if_compiling_contained_td):\n if new_rd_tup not in missing_rd_tups:\n missing_rd_tups.append(new_rd_tup)\n else:\n new_rd_tup = [tool_shed,\n name,\n owner,\n changeset_revision,\n prior_installation_required,\n only_if_compiling_contained_td,\n None,\n 'Never installed']\n if not util.asbool(only_if_compiling_contained_td):\n # A repository dependency that is not installed will not be considered missing if its value for\n # only_if_compiling_contained_td is True - see above...\n if new_rd_tup not in missing_rd_tups:\n missing_rd_tups.append(new_rd_tup)\n if installed_rd_tups:\n installed_repository_dependencies['root_key'] = root_key\n installed_repository_dependencies[root_key] = installed_rd_tups\n installed_repository_dependencies['description'] = description\n if missing_rd_tups:\n missing_repository_dependencies['root_key'] = root_key\n missing_repository_dependencies[root_key] = missing_rd_tups\n missing_repository_dependencies['description'] = description\n return installed_repository_dependencies, missing_repository_dependencies" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Extract and save dependent urls of dependent repositories.
def get_dependent_rep_urls(self, libraries, platform, output): libraries_info = self.get_lib_info(libraries, platform) dependent_reps = self.get_dependent_reps(libraries_info) self.save_urls_only(dependent_reps, libraries_info, save_to=output)
[ "def get_dependent_reps(self, libs_info, save_to=None):\n self._log.info(\"Creating list of dependent repos...\")\n if hasattr(libs_info[\"ID\"], \"tolist\"):\n lib_id2name = dict(zip(libs_info[\"ID\"].tolist(), libs_info[\"Name\"].tolist()))\n else:\n lib_id2name = {libs_info[\"ID\"]: libs_info[\"Name\"]}\n pd_result = []\n dependencies_path = os.path.join(self._librariesio_path, dependencies_filename)\n for chunk in progress_bar(pd.read_csv(dependencies_path,\n chunksize=LibrariesIOFetcher.CHUNKSIZE,\n index_col=False), self._log, expected_size=100):\n for lib_id in lib_id2name:\n res = chunk[chunk[\"Dependency Project ID\"] == int(lib_id)]\n if len(res) > 0:\n pd_result.append(res)\n\n pd_result = pd.concat(pd_result)\n pd_result[\"url\"] = \"https://\" + \\\n pd_result[\"Host Type\"].map(LibrariesIOFetcher.HOST2LINK) + \\\n pd_result[\"Repository Name with Owner\"]\n if save_to:\n pd_result.to_csv(save_to, index=False)\n\n return pd_result", "def GetupwardRepoList(self):\n # Skip population if the repo is already found with dependencies.\n if self.keyword in self._all_dependent_repo_names:\n pprint('Repo detail for %s was already included.' % self.keyword)\n query_output, _ = util.GitURLOpener(self._repo_url)\n pprint('Getting keyword repo detail through %d found repos ...' % (\n min(query_output['total_count'], 100)))\n # pylint: disable=undefined-loop-variable\n for item in query_output['items']:\n item_name = item['name']\n if item_name == self.keyword:\n pprint('Repo detail found for %s.' % item_name)\n break\n # Break function if no repo detail is found.\n if not query_output['total_count'] or item_name != self.keyword:\n pprint('%s is not found with repo detail.' % self.keyword)\n return\n\n all_unique_dependencies = self.GetDependency(item['full_name'])\n # If the matched dependency file is already created earlier in other\n # depth, do not keep further nodes to prevent future search.\n for repo_name in all_unique_dependencies:\n if repo_name in self._all_dependent_repo_names:\n all_unique_dependencies[repo_name] = ''\n item['all_dependencies'] = all_unique_dependencies\n self._tree_dict[self.keyword] = item\n self._all_dependent_repo_names.add(item_name)\n\n util.PickleTree(self._tree_dict, self._raw_data_file_name)\n util.PickleTree(self._all_dependent_repo_names, self._all_repos_file_name)\n pprint('%d dependencies are found for %s.' % (\n len(all_unique_dependencies), self.keyword))", "def get_repo_url(paths, config):\n\n result = None\n for path in paths:\n for upath, url in config:\n if path.startswith(upath):\n if result and result != url:\n raise EtherHookError(\"get_repo_url got 2 different results: \"\n \"%s and %s\" % (result, url))\n if not result:\n result = url\n break\n if not result:\n raise EtherHookError(\"get_repo_url: Can't get repo url from %s\" \\\n % str(paths))\n return result", "def getRepos(self, urls, path='.'):\n repos_path = path\n if repos_path == '.' or repos_path == './': repos_path = ''\n types = ['git', 'svn', 'fs'] # if omitted, defaults to first item\n urls = urls.split(',')\n for url in urls:\n url = url.strip() # remove trailing spaces\n repo_name = url.split('/')[-1].split('.git')[0]\n path = repos_path + repo_name\n if url.split(' ')[0] in types: # user specified type\n typ = url.split(' ')[0] # get type\n url = ' '.join(url.split(' ')[1:]) # remove type of url\n else:\n typ = types[0] # default to first type\n\n if not url.startswith('http') and not typ=='fs':\n url = 'http://' + url\n\n if typ=='git':\n os.system('git clone ' + url + ' ' + path)\n elif typ=='svn':\n os.system('svn co ' + url + ' ' + path)\n elif typ=='fs':\n os.system('cp -r ' + url + ' ' + path)", "def _getDependencies(self, url, artifact):\n self._fileManager.setStatus(artifact, StatusTypes.gettingDependencies)\n dependencies = self.fetchDependencies(url, artifact)\n\n if len(dependencies) == 0:\n self._fileManager.writeDependency(artifact, 'None')\n\n self._saveDependencies(artifact, dependencies)\n self._fileManager.setStatus(artifact, StatusTypes.doneDependencies)\n print('There are {} dependencies in the artifact {}' .format(len(dependencies), artifact))", "def _saveDependencies(self, artifact, dependencies):\n self._fileManager.addArtifact(artifact)\n for dependency in dependencies:\n self._fileManager.addArtifact(dependency)\n self._fileManager.addLinks(artifact, dependencies)\n\n print('Updated nodes and links')", "def fetchDependencies(self, url, artifact):\n soup = UrlHandler.getSoup(url)\n\n found = False\n scope = False\n dependencies = []\n\n for tag in soup.find_all('a'):\n link = tag.get('href')\n \n if scope:\n if 'twitter' in link:\n scope = False\n if '/artifact/' in link:\n if not tag.get('class'):\n found = False\n if not found and tag.get('class') and len(tag.get('class')) > 1 and tag.get('class')[0] == 'vbtn':\n dependencies.append(link[10:])\n if self._fileManager:\n self._fileManager.writeDependency(artifact, link[10:])\n found = True\n\n if '#buildr' in link:\n scope = True\n\n return dependencies", "def fetch_authors_other_work(self):\r\n sumGPS = gitProfileSet(\"inverse_\"+self.name)\r\n repoList = []\r\n \r\n for author in tqdm(self.authors.values()):\r\n repoList.extend([repo.clone_url for repo in author.getRepos()])\r\n\r\n return repoList", "def artifact_urls(self):\n data = self._api.get_api_data()\n artifacts_node = data['artifacts']\n retval = []\n\n for node in artifacts_node:\n url = urllib_parse.urljoin(\n self._api.url, \"artifact/\" + node['fileName'])\n retval.append(url)\n\n return retval", "def check_py_dependencies() -> Iterable[str]:\n print(\"Checking Python dependencies\")\n\n print(\"Creating venv\")\n run_command(\"python3 -m venv .venv\", \"make_venv.log\")\n run_command(\".venv/bin/python3 -m pip install -U pip\", \"pip_upgrade.log\")\n print(\"Downloading packages\")\n run_command(\".venv/bin/python3 -m pip download --dest files -r base.txt\", \"pip_download.log\")\n\n urls = set()\n for url in parallel_map(repo_url_from_wheel, Path(\"files\").glob(\"*.whl\"), \"Examining wheels\"):\n if url:\n urls.add(canonical_url(url))\n\n for url in parallel_map(repo_url_from_tgz, Path(\"files\").glob(\"*.tar.gz\"), \"Examining tar.gz\"):\n if url:\n urls.add(canonical_url(url))\n\n with open(\"base.txt\") as fbase:\n for line in fbase:\n if match := re.search(r\"https://github.com[^@ #]*(\\.git)?\", line):\n urls.add(canonical_url(match[0]))\n\n real_urls = set()\n for url in parallel_map(find_real_url, urls, \"Getting real URLs\"):\n if url:\n real_urls.add(url)\n\n write_list(\"repo_urls.txt\", sorted(real_urls))\n return real_urls", "def process_directory():\n repo_name = Path.cwd().name\n repo_work = WORK_DIR / repo_name\n repo_work.mkdir(parents=True, exist_ok=True)\n repo_urls = set()\n if (js_reqs := Path(\"package-lock.json\")).exists():\n shutil.copyfile(js_reqs, repo_work / \"package-lock.json\")\n with change_dir(repo_work):\n repo_urls.update(check_js_dependencies())\n if (py_reqs := find_py_reqs()):\n shutil.copyfile(py_reqs, repo_work / \"base.txt\")\n with change_dir(repo_work):\n repo_urls.update(check_py_dependencies())\n return repo_urls", "def download_dep(env, name, repo_type, repo_url):\n commands = [\n \"svn co \" + repo_url + \" deps/\" + name,\n \"cd ./deps/\"+name + \"; ./configure\", \"touch deps/\"+name+\"/Configure.marker\",\n \"cd ./deps/\"+name + \"; make\", \"touch deps/\"+name+\"/Makefile.marker\"\n ]\n i1 = env.Command(\"deps/\"+\"mxml\", \"\", commands);\n return i1", "def __saveHistory(self):\n url = self.vcsUrlPicker.text()\n vcsUrlHistory = self.vcsUrlPicker.getPathItems()\n if url not in vcsUrlHistory:\n vcsUrlHistory.insert(0, url)\n \n # max. list sizes is hard coded to 20 entries\n newVcsUrlHistory = [url for url in vcsUrlHistory if url]\n if len(newVcsUrlHistory) > 20:\n newVcsUrlHistory = newVcsUrlHistory[:20]\n \n self.__vcs.getPlugin().setPreferences(\n \"RepositoryUrlHistory\", newVcsUrlHistory)", "def get_host_repo_for_link(cls, repo):\n hostname = None\n # return repo modified to result of extraction\n if repo.startswith(('https://', 'http://')):\n # parse hostname for passing to whatever holder selected\n url_parts = repo.split('/')\n hostname = url_parts[2]\n offset = 3 + cls.REPO_URL_PROJECT_OFFSET\n repo = \"/\".join(url_parts[offset:offset + cls.REPO_URL_PROJECT_COMPONENTS])\n return hostname, repo", "def url_to_repo(self, url):\n tokens = re.split(r'://|/', url)\n owner, repo_name = tokens[3], tokens[4]\n return self.repo_root_url.format(owner=owner, repo_name=repo_name)", "def get_url(self, what_to_grade, student):\n repo = URL_FOR_SVN_REPOS + what_to_grade.course.students_repo \\\n + '-' + student + '/' + what_to_grade.project_name\n return repo", "def download( self ):\n urls = [ self.root_url ]\n\n while len( urls ) > 0:\n url = urls.pop()\n if self.is_raw_file( url ):\n print( \"is raw {}\".format( url ) )\n self.download_raw_file( url )\n elif self.is_folder( url ):\n for f in self.parse_folder( url ):\n if f.startswith( \"https://\" ) or f.startswith( \"http://\" ):\n urls.append( f )\n else: urls.append( \"https://github.com%s\" % f )\n else:\n self.download_raw_file( url )", "def add_github_url(*_):\n from account.models import Profile\n from tools.base import update_model\n ps = Profile.objects.select_related('user').all()\n for p in ps:\n if not p.github_url:\n update_model(p, **{'github_url': 'https://github.com/' + p.user.username})", "def get_repo_url():\n default_repo = 's3://gluonnlp-numpy-data'\n repo_url = os.environ.get('GLUONNLP_REPO_URL', default_repo)\n if repo_url[-1] != '/':\n repo_url = repo_url + '/'\n return repo_url" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
From uvmodel.py for binning velocity field vector data (for both u and v components of velocity) given xyz position and the x and y bins, return the location (xb,yb) and the new component of velocity data (zb_mean)
def sh_bindata(x, y, z, xbins, ybins): ix=np.digitize(x,xbins) iy=np.digitize(y,ybins) xb=0.5*(xbins[:-1]+xbins[1:]) # bin x centers yb=0.5*(ybins[:-1]+ybins[1:]) # bin y centers zb_mean=np.empty((len(xbins)-1,len(ybins)-1),dtype=z.dtype) for iix in range(1,len(xbins)): for iiy in range(1,len(ybins)): k,=np.where((ix==iix) & (iy==iiy)) zb_mean[iix-1,iiy-1]=np.mean(z[k]) return xb,yb,zb_mean
[ "def bin_YbyX (Vy,Vx,bins=[],bin_min=0,bin_max=1,bin_spc=1,wgt=[],keep_time=False):\n #----------------------------------------------------------------------------\n # use min, max, and spc (i.e. stride) to define bins \n nbin = np.round( ( bin_max - bin_min + bin_spc )/bin_spc ).astype(np.int)\n bins = np.linspace(bin_min,bin_max,nbin)\n bin_coord = xr.DataArray( bins )\n #----------------------------------------------------------------------------\n # create output data arrays\n nlev = len(Vy['lev']) if 'lev' in Vy.dims else 1\n ntime = len(Vy['time']) if 'time' in Vy.dims else 1\n if ntime==1 and keep_time==True : keep_time = False\n\n shape,dims,coord = (nbin,),'bin',[('bin', bin_coord)]\n if nlev >1 and keep_time==False : shape,coord,dims = (nbin,nlev), [('bin',bin_coord),('lev',Vy['lev'])], ['bin','lev'] \n if nlev==1 and keep_time==False : shape,dims,coord = (nbin,),'bin',[('bin',bin_coord)]\n \n mval = np.nan\n bin_val = xr.DataArray( np.full(shape,mval,dtype=Vy.dtype), coords=coord, dims=dims )\n bin_std = xr.DataArray( np.full(shape,mval,dtype=Vy.dtype), coords=coord, dims=dims )\n bin_cnt = xr.DataArray( np.zeros(shape, dtype=Vy.dtype), coords=coord, dims=dims )\n #----------------------------------------------------------------------------\n levchk = False\n if 'lev' in Vy.dims and len(Vy.lev)>1 : levchk = True\n\n if levchk :\n avg_dims = ['ncol']\n if 'time' in Vy.dims : avg_dims = ['time','ncol']\n avg_dims_wgt = ['ncol']\n\n val_chk = np.isfinite(Vx.values)\n #----------------------------------------------------------------------------\n # Loop through bins\n for b in range(nbin):\n bin_bot = bin_min - bin_spc/2. + bin_spc*(b )\n bin_top = bin_min - bin_spc/2. + bin_spc*(b+1)\n\n condition = xr.DataArray( np.full(Vx.shape,False,dtype=bool), coords=Vx.coords )\n condition.values = ( np.where(val_chk,Vx.values,bin_bot-1e3) >=bin_bot ) \\\n &( np.where(val_chk,Vx.values,bin_bot-1e3) <bin_top )\n \n if np.sum(condition)>0 :\n if levchk :\n if len(wgt)==0 : \n bin_val[b,:] = Vy.where(condition,drop=True).mean( dim=avg_dims, skipna=True )\n else:\n if wgt.dims != Vy.dims : \n wgt, *__ = xr.broadcast(wgt, Vy) \n if 'time' in Vy.dims :\n wgt = wgt.transpose('time','lev','ncol')\n else :\n wgt = wgt.transpose('lev','ncol')\n if 'time' in Vy.dims : \n bin_val[b,:] = ( (Vy*wgt).where(condition,drop=True).sum( dim='ncol', skipna=True ) \\\n / wgt.where(condition,drop=True).sum( dim='ncol', skipna=True ) ).mean(dim='time', skipna=True )\n else:\n bin_val[b,:] = ( (Vy*wgt).where(condition,drop=True).sum( dim='ncol', skipna=True ) \\\n / wgt.where(condition,drop=True).sum( dim='ncol', skipna=True ) )\n bin_std[b,:] = Vy.where(condition,drop=True).std( dim=avg_dims, skipna=True )\n bin_cnt[b,:] = Vy.where(condition,drop=True).count(dim=avg_dims)\n else:\n bin_val[b] = Vy.where(condition).mean(skipna=True)\n bin_std[b] = Vy.where(condition).std()\n bin_cnt[b] = np.sum( condition )\n #----------------------------------------------------------------------------\n # use a dataset to hold all the output\n dims = ('bins','lev') if levchk else ('bins',)\n bin_ds = xr.Dataset()\n bin_ds['bin_val'] = (dims, bin_val )\n bin_ds['bin_std'] = (dims, bin_std )\n bin_ds['bin_cnt'] = (dims, bin_cnt )\n bin_ds['bin_pct'] = (dims, bin_cnt/bin_cnt.sum()*1e2 )\n bin_ds.coords['bins'] = ('bins',bin_coord)\n if levchk : bin_ds.coords['lev'] = ( 'lev', xr.DataArray(Vy['lev']) )\n #----------------------------------------------------------------------------\n return bin_ds", "def _update_intermediate_vel_bc_(self, u, w, mask, time, _bc):\n\n # Interior boundaries\n # Apply no-slip boundary conditions to obstacles.\n # Setup masks that are 0 where velocities need to be updated,\n # and 1 where they stay unmodified.\n # Note that (mask & 1) has 1 in the ghost cells.\n u_mask = ( mask[:-1,:] | mask[1:,:] ) & 1\n w_mask = ( mask[:,:-1] | mask[:,1:] ) & 1\n\n # zero velocity inside and on the boundary of obstacles\n u[:,:] *= ( mask[:-1,:] & mask[1:,:] & 1 )\n # negate velocities inside obstacles\n u[:,1:-2] -= ( 1 - u_mask[:,1:-2] ) * u[:,2:-1]\n u[:,2:-1] -= ( 1 - u_mask[:,2:-1] ) * u[:,1:-2]\n\n # zero velocity inside and on the boundary of obstacles\n w[:,:] *= ( mask[:,:-1] & mask[:,1:] & 1 )\n # nullify velocities inside obstacles\n w[1:-2,:] -= ( 1 - w_mask[1:-2,:] ) * w[2:-1,:]\n w[2:-1,:] -= ( 1 - w_mask[2:-1,:] ) * w[1:-2,:] \n\n # top boundary\n _bc_ = _bc[self.UP]\n if 'w' in _bc_:\n fun_ = _bc_['w']\n if callable(fun_):\n for i in range(w.shape[0]):\n node = self.grid[i-0.5, w.shape[1]-1]\n w[i,-1] = fun_(node[0], node[1], time) * (mask[i,-2] & 1)\n else:\n w[:,-1] = fun_ \n\n # bottom boundary\n _bc_ = _bc[self.DOWN]\n if 'w' in _bc_:\n fun_ = _bc_['w']\n if callable(fun_):\n for i in range(w.shape[0]):\n node = self.grid[i-0.5, 0]\n w[i,0] = fun_(node[0], node[1], time) * (mask[i,1] & 1)\n else:\n w[:,0] = fun_ \n\n # left boundary\n _bc_ = _bc[self.LEFT]\n if 'u' in _bc_:\n fun_ = _bc_['u']\n if callable(fun_):\n for i in range(u.shape[1]):\n node = self.grid[u.shape[0]-1, i-0.5]\n u[-1,i] = fun_(node[0], node[1], time) * (mask[-2,i] & 1)\n else:\n u[-1,:] = fun_\n\n # west boundary\n _bc_ = _bc[self.RIGHT]\n if 'u' in _bc_:\n fun_ = _bc_['u']\n if callable(fun_):\n for i in range(u.shape[1]):\n node = self.grid[0, i-0.5]\n u[0,i] = fun_(node[0], node[1], time) * (mask[1,i] & 1)\n else:\n u[0,:] = fun_", "def velocity_bamber(args, nc_insar, nc_base, trans):\n insar_y = nc_insar.variables['y']\n insar_ny = insar_y[:].shape[0]\n\n insar_x = nc_insar.variables['x']\n insar_nx = insar_x[:].shape[0]\n\n base_data = np.ndarray( (trans.ny,trans.nx) )\n\n\n for vv in ['vy','vx','ey','ex'] :\n insar_data[:,:] = 0.\n base_data[:,:] = 0.\n \n insar_var = nc_insar.variables[ vv ]\n insar_data = np.ma.masked_values( nc_bamber.variables[var_list[1]][:,:], -2.e9)\n data_min = insar_data.min() \n data_max = insar_data.max() \n\n\n speak.verbose(args,\" Interpolating \"+vv+\".\")\n insar_to_base = scipy.interpolate.RectBivariateSpline( insar_y[:], insar_x[:], insar_data, kx=1, ky=1, s=0) # regular 2d linear interp. but faster\n\n for ii in range(0, trans.nx):\n base_data[:,ii] = insar_to_base.ev(trans.y_grid[:,ii], trans.x_grid[:,ii] )\n \n base_data[base_data < data_min] = -2.e9\n base_data[base_data > data_max] = -2.e9\n \n speak.verbose(args,\" Writing \"+vv+\" to base.\")\n base_var = nc_base.createVariable( vv, 'f4', ('y','x',) )\n base_var[:,:] = base_data[:,:]\n copy_atts(insar_var, base_var)", "def get_mean_uv(uv_forward, uv_backward):\n u_forward = (uv_forward[0, :, :] - uv_backward[0, :, :])/2.0\n v_forward = (uv_forward[1, :, :] - uv_backward[1, :, :])/2.0\n u_backward = (uv_backward[0, :, :] - uv_forward[0, :, :])/2.0\n v_backward = (uv_backward[1, :, :] - uv_forward[1, :, :])/2.0\n uv_forward = numpy.asarray([u_forward, v_forward])\n uv_backward = numpy.asarray([u_backward, v_backward])\n return uv_forward, uv_backward", "def get_xy_velocity(posa,posb,v):\n rest = posa-posb\n m = magnitude(rest)\n vx = (v * rest[0])/m\n vy = (v * rest[1])/m\n if m < scout_near:\n return vx * scout_velocity_decay*m/scout_near,vy * scout_velocity_decay*m/scout_near\n return vx,vy", "def compute_v_estimates(self):\n # momentary result, based on the activity in one time bin\n self.vx_avg = np.zeros(self.n_bins) \n self.vy_avg = np.zeros(self.n_bins)\n # ---> gives theta_avg \n\n # based on the activity in several time bins\n self.vx_moving_avg = np.zeros((self.n_bins, 2))\n self.vy_moving_avg = np.zeros((self.n_bins, 2))\n\n # non linear transformation of vx_avg\n self.vx_non_linear = np.zeros(self.n_bins)\n self.vy_non_linear = np.zeros(self.n_bins)\n\n trace_length = 100 # [ms] window length for moving average \n trace_length_in_bins = int(round(trace_length / self.time_binsize))\n # ---> gives theta_moving_avg\n\n # # # # # # # # # # # # # # # # # # # # # # \n # S P E E D P R E D I C T I O N \n # # # # # # # # # # # # # # # # # # # # # # \n self.vx_confidence_binned = self.nspikes_binned_normalized[self.sorted_indices_vx]\n self.vy_confidence_binned = self.nspikes_binned_normalized[self.sorted_indices_vy]\n vx_prediction_trace = np.zeros((self.n_cells, self.n_bins, 2)) # _trace: prediction based on the momentary and past activity (moving average, and std) --> trace_length\n vy_prediction_trace = np.zeros((self.n_cells, self.n_bins, 2)) # _trace: prediction based on the momentary and past activity (moving average, and std) --> trace_length\n for i in xrange(self.n_bins):\n\n # 1) momentary vote\n # take the weighted average for v_prediction (weight = normalized activity)\n vx_pred = self.vx_confidence_binned[:, i] * self.vx_tuning\n vy_pred = self.vy_confidence_binned[:, i] * self.vy_tuning\n self.vx_avg[i] = np.sum(vx_pred)\n self.vy_avg[i] = np.sum(vy_pred)\n\n # 2) moving average\n past_bin = max(0, min(0, i-trace_length_in_bins))\n for cell in xrange(self.n_cells):\n vx_prediction_trace[cell, i, 0] = self.vx_confidence_binned[cell, past_bin:i].mean()\n vx_prediction_trace[cell, i, 1] = self.vx_confidence_binned[cell, past_bin:i].std()\n vy_prediction_trace[cell, i, 0] = self.vy_confidence_binned[cell, past_bin:i].mean()\n vy_prediction_trace[cell, i, 1] = self.vy_confidence_binned[cell, past_bin:i].std()\n self.vx_moving_avg[i, 0] = np.sum(vx_prediction_trace[:, i, 0] * self.vx_tuning)\n self.vx_moving_avg[i, 1] = np.std(vx_prediction_trace[:, i, 1] * self.vx_tuning)\n self.vy_moving_avg[i, 0] = np.sum(vy_prediction_trace[:, i, 0] * self.vy_tuning)\n self.vy_moving_avg[i, 1] = np.std(vy_prediction_trace[:, i, 1] * self.vy_tuning)\n\n # 3)\n # rescale activity to negative values\n vx_shifted = self.nspikes_binned[self.sorted_indices_vx, i] - self.nspikes_binned[self.sorted_indices_vx, i].max()\n vy_shifted = self.nspikes_binned[self.sorted_indices_vy, i] - self.nspikes_binned[self.sorted_indices_vy, i].max()\n # exp --> mapping to range(0, 1)\n vx_exp = np.exp(vx_shifted)\n vy_exp = np.exp(vy_shifted)\n # normalize and vote\n vx_votes = (vx_exp / vx_exp.sum()) * self.vx_tuning\n vy_votes = (vy_exp / vy_exp.sum()) * self.vy_tuning\n self.vx_non_linear[i] = vx_votes.sum()\n self.vy_non_linear[i] = vy_votes.sum()\n\n # in the first step the trace can not have a standard deviation --> avoid NANs \n self.vx_moving_avg[0, 0] = np.sum(self.vx_confidence_binned[self.sorted_indices_vx, 0].mean() * self.vx_tuning)\n self.vy_moving_avg[0, 0] = np.sum(self.vy_confidence_binned[self.sorted_indices_vy, 0].mean() * self.vy_tuning)\n self.vx_moving_avg[0, 1] = 0\n self.vy_moving_avg[0, 1] = 0\n\n # ---> time INdependent estimates: based on activity of the full run\n\n # compute the marginalized (over all positions) vx, vy estimates and bin them in a grid\n self.vx_grid = np.linspace(np.min(self.vx_tuning), np.max(self.vx_tuning), self.n_vx_bins, endpoint=True)\n self.vy_grid = np.linspace(np.min(self.vy_tuning), np.max(self.vy_tuning), self.n_vy_bins, endpoint=True)\n self.vx_marginalized_binned = np.zeros(self.n_vx_bins)\n self.vy_marginalized_binned = np.zeros(self.n_vy_bins)\n self.vx_marginalized_binned_nonlinear = np.zeros(self.n_vx_bins)\n self.vy_marginalized_binned_nonlinear = np.zeros(self.n_vy_bins)\n\n for gid in xrange(self.n_cells):\n vx_cell, vy_cell = self.tuning_prop[gid, 2], self.tuning_prop[gid, 3] # cell properties\n vx_grid_pos, vy_grid_pos = utils.get_grid_pos(vx_cell, vy_cell, self.vx_grid, self.vy_grid)\n self.vx_marginalized_binned[vx_grid_pos] += self.nspikes_normalized[gid]\n self.vy_marginalized_binned[vy_grid_pos] += self.nspikes_normalized[gid]\n self.vx_marginalized_binned_nonlinear[vx_grid_pos] += self.nspikes_normalized_nonlinear[gid]\n self.vy_marginalized_binned_nonlinear[vy_grid_pos] += self.nspikes_normalized_nonlinear[gid]", "def getUVIndexAndValue(*args, **kwargs):\n \n pass", "def UV_B(self):\n h = reduce(lambda x,y:x&y,(B(g,self.width-1) for g in self))\n return UV_B(h, self.width)", "def _map_velocity_to_bar_location(self, velocity):\n # (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min --> map one spectrum to another\n if velocity < self._velocity_bar_min_velocity:\n return self._velocity_bar_min_pixel\n if velocity > self._velocity_bar_max_velocity:\n return self._velocity_bar_max_pixel\n pixel = (velocity - self._velocity_bar_min_velocity) * (self._velocity_bar_max_pixel - self._velocity_bar_min_pixel) \\\n / (self._velocity_bar_max_velocity - self._velocity_bar_min_velocity) \\\n + self._velocity_bar_min_pixel\n self._logger.debug(\"mapping location to {}\".format(pixel))\n return int(pixel)", "def translate_into_fbz(df):\n # First, find all the vectors defining the boundary\n coords = df[['kx [1/A]', 'ky [1/A]', 'kz [1/A]']]\n b1, b2, b3 = c.b1, c.b2, c.b3\n b1pos = 0.5 * b1[:, np.newaxis]\n b2pos = 0.5 * b2[:, np.newaxis]\n b3pos = 0.5 * b3[:, np.newaxis]\n lpos = 0.5 * (b1 + b2 + b3)[:, np.newaxis]\n b1neg = -1 * b1pos\n b2neg = -1 * b2pos\n b3neg = -1 * b3pos\n lneg = -1 * lpos\n xpos = -0.5 * (b1 + b3)[:, np.newaxis]\n ypos = 0.5 * (b2 + b3)[:, np.newaxis]\n zpos = 0.5 * (b1 + b2)[:, np.newaxis]\n xneg = -1 * xpos\n yneg = -1 * ypos\n zneg = -1 * zpos\n\n # Place them into octants to avoid problems when finding points\n # (naming is based on positive or negative for coordinate so octpmm means x+ y- z-. p=plus, m=minus)\n vecs_ppp = np.concatenate((b2pos, xpos, ypos, zpos), axis=1)[:, :, np.newaxis]\n vecs_ppm = np.concatenate((b1neg, xpos, ypos, zneg), axis=1)[:, :, np.newaxis]\n vecs_pmm = np.concatenate((lneg, xpos, yneg, zneg), axis=1)[:, :, np.newaxis]\n vecs_mmm = np.concatenate((b2neg, xneg, yneg, zneg), axis=1)[:, :, np.newaxis]\n vecs_mmp = np.concatenate((b1pos, xneg, yneg, zpos), axis=1)[:, :, np.newaxis]\n vecs_mpp = np.concatenate((lpos, xneg, ypos, zpos), axis=1)[:, :, np.newaxis]\n vecs_mpm = np.concatenate((b3pos, xneg, ypos, zneg), axis=1)[:, :, np.newaxis]\n vecs_pmp = np.concatenate((b3neg, xpos, yneg, zpos), axis=1)[:, :, np.newaxis]\n # Construct matrix which is 3 x 4 x 8 where we have 3 Cartesian coordinates, 4 vectors per octant, and 8 octants\n allvecs = np.concatenate((vecs_ppp, vecs_ppm, vecs_pmm, vecs_mmm, vecs_mmp, vecs_mpp, vecs_mpm, vecs_pmp), axis=2)\n\n # Since the number of points in each octant is not equal, can't create array of similar shape. Instead the 'octant'\n # array below is used as a boolean map where 1 (true) indicates positive, and 0 (false) indicates negative\n octants = np.array([[1, 1, 1],\n [1, 1, 0],\n [1, 0, 0],\n [0, 0, 0],\n [0, 0, 1],\n [0, 1, 1],\n [0, 1, 0],\n [1, 0, 1]])\n\n fbzcoords = coords.copy(deep=True).values\n exitvector = np.zeros((8, 1))\n iteration = 0\n while not np.all(exitvector): # don't exit until all octants have points inside\n exitvector = np.zeros((8, 1))\n for i in range(8):\n oct_vecs = allvecs[:, :, i]\n whichoct = octants[i, :]\n if whichoct[0]:\n xbool = fbzcoords[:, 0] >= 0\n else:\n xbool = fbzcoords[:, 0] <= 0\n if whichoct[1]:\n ybool = fbzcoords[:, 1] >= 0\n else:\n ybool = fbzcoords[:, 1] <= 0\n if whichoct[2]:\n zbool = fbzcoords[:, 2] >= 0\n else:\n zbool = fbzcoords[:, 2] <= 0\n octindex = np.logical_and(np.logical_and(xbool, ybool), zbool)\n octcoords = fbzcoords[octindex, :]\n allplanes = 0\n for j in range(oct_vecs.shape[1]):\n diffvec = octcoords[:, :] - np.tile(oct_vecs[:, j], (octcoords.shape[0], 1))\n dist2plane = np.dot(diffvec, oct_vecs[:, j]) / np.linalg.norm(oct_vecs[:, j])\n outside = dist2plane[:] > 0\n if np.any(outside):\n octcoords[outside, :] = octcoords[outside, :] - \\\n (2 * np.tile(oct_vecs[:, j], (np.count_nonzero(outside), 1)))\n # Times 2 because the vectors that define FBZ are half of the full recip latt vectors\n # print('number outside this plane is %d' % np.count_nonzero(outside))\n else:\n allplanes += 1\n if allplanes == 4:\n exitvector[i] = 1\n fbzcoords[octindex, :] = octcoords\n iteration += 1\n print('Finished %d iterations of bringing points into FBZ' % iteration)\n uniqkx = np.sort(np.unique(fbzcoords[:, 0]))\n deltakx = np.diff(uniqkx)\n smalldkx = np.concatenate((deltakx < (np.median(deltakx) * 1E-2), [False]))\n if np.any(smalldkx):\n for kxi in np.nditer(np.nonzero(smalldkx)):\n kx = uniqkx[kxi]\n fbzcoords[fbzcoords[:, 0] == kx, 0] = uniqkx[kxi+1]\n print('Shifted points that were slightly misaligned in kx.\\n')\n df[['kx [1/A]', 'ky [1/A]', 'kz [1/A]']] = fbzcoords\n print('Done bringing points into FBZ!')\n\n return df", "def measurement_vector(self, sensor: Ping):\n x0 = self.x + sensor.r * sin(self.alpha + sensor.beta)\n y0 = self.y + sensor.r * cos(self.alpha + sensor.beta)\n xv = sensor.d * sin(self.alpha + sensor.gamma)\n yv = sensor.d * cos(self.alpha + sensor.gamma)\n return x0, y0, xv, yv", "def surface_bottom_velocity(self, Xin, Zin, Uin, Win):\r\n \r\n #### find the index at each x grid\r\n #### Xin values at each x grid are the same\r\n ind_xgrid = [np.argwhere(Xin==xtem).flatten() for xtem in np.unique(Xin)]\r\n \r\n ## find surface and bottom velocity at each x grid\r\n X_surface = []\r\n Z_surface = []\r\n U_surface = []\r\n \r\n X_bottom = []\r\n Z_bottom = []\r\n U_bottom = []\r\n \r\n \r\n Nx = len(ind_xgrid) -1 ### Number of x grid points, note the last point has no velocity\r\n \r\n for i in range(Nx):\r\n #for i in [35]:\r\n \r\n ## index at each x grid point\r\n ind_x_tem = ind_xgrid[i] \r\n \r\n #### find surface and bottom index\r\n mask = np.logical_or(Uin[ind_x_tem]!=self.mask_value, Win[ind_x_tem]!=self.mask_value) ## find invalid value \r\n ind_surface = Zin[ind_x_tem][mask].argsort()[-3] ## maximum (or second maximum) Z\r\n ind_bottom = Zin[ind_x_tem][mask].argsort()[2] ## minimum (or second minimum) Z, note velocity at the bottom index\r\n ## = 1 (second minimum) may have zero values, so make it 2 (third minimum)\r\n \r\n #### Note, there might some bugs for bottom velocity. \r\n #### If velocity is zero, the particle will not move a lot, simply move with the dispersion ~O(10 m)\r\n #### But at branch 5, U_bottom[t][12] is always 0, so particles get stagnant at ~10447 m, (X_bottom[t][12] = 10676 m)\r\n #### so double check: if at this segment, the bottom velocity is zero, if 0, take one layer up\r\n #### because at segment 12 (branch 5), the velocities at the three bottom layers are always zero\r\n icount = 2\r\n if i != Nx-1:\r\n while Uin[ind_x_tem][ind_bottom] == 0:\r\n ind_bottom = Zin[ind_x_tem][mask].argsort()[icount] \r\n icount += 1\r\n \r\n \r\n ## surface\r\n X_surface_tem = Xin[ind_x_tem][ind_surface]\r\n Z_surface_tem = Zin[ind_x_tem][ind_surface]\r\n U_surface_tem = Uin[ind_x_tem][ind_surface]\r\n \r\n X_surface.append(X_surface_tem)\r\n Z_surface.append(Z_surface_tem)\r\n U_surface.append(U_surface_tem)\r\n \r\n ## bottom\r\n X_bottom_tem = Xin[ind_x_tem][ind_bottom]\r\n Z_bottom_tem = Zin[ind_x_tem][ind_bottom]\r\n U_bottom_tem = Uin[ind_x_tem][ind_bottom]\r\n \r\n X_bottom.append(X_bottom_tem)\r\n Z_bottom.append(Z_bottom_tem)\r\n U_bottom.append(U_bottom_tem)\r\n \r\n #pdb.set_trace()\r\n \r\n return X_surface, Z_surface, U_surface, X_bottom, Z_bottom, U_bottom", "def _offsets_to_vector(data: sc.DataArray, xy: List[float], graph: dict) -> sc.Variable:\n u = data.coords['position'].unit\n # Get two vectors that define the plane normal to the beam\n coords = data.transform_coords(\n ['cyl_x_unit_vector', 'cyl_y_unit_vector'], graph=graph\n ).coords\n center = xy[0] * coords['cyl_x_unit_vector'] + xy[1] * coords['cyl_y_unit_vector']\n center.unit = u\n return center", "def value(self,x,xlist,ylist):\r\n\tdef bin(x,xlist):\r\n\t \"\"\" returns the bin index in which boundaries the value of x lies in the xlist\r\n\t \"\"\"\r\n\t x = float(x)\r\n\t if (x<=xlist[0]): return 0,0\r\n\t if (x>=xlist[-1]): return self.size-1,self.size-1 \r\n\t for i in range(self.size):\r\n\t\tif x < xlist[i]:\r\n\t\t return max(0,i-1),min(self.size-1,i)\r\n\t#print x\r\n\tx = float(x)\r\n\t#print x\r\n\tww = bin(x,xlist)\r\n\t#print ww\r\n\tif not \"__len__\" in dir(ww):\r\n\t\tprint \"Crazy, \" ,x, xlist[0], xlist[-1]\r\n\r\n\ti,j = ww\r\n\tx0 = xlist[i]\r\n\ty0 = ylist[i] \r\n\tdx = xlist[j]-x0\r\n\tdy = ylist[j]-y0\r\n\tdydx = 0.\r\n\tif (i != j): dydx = dy/dx # ???????\r\n\ty = y0+dydx*(x-x0)\r\n\treturn y", "def get_velocity( b ):\n v = []\n for i in range(1,len(b)-1):\n D2 = b[i+1] - 2.0*b[i] + b[i-1]\n D1 = (b[i+1] - b[i-1])/2.0\n D1norm2 = D1[0]**2.0 + D1[1]**2.0\n v.append( D2/D1norm2 )\n return np.array(v)", "def _bin_vectors(self, vecs):\r\n x_bins = (vecs[:, 0] / self._bin_dx).floor()\r\n y_bins = (vecs[:, 1] / self._bin_dy).floor()\r\n output = y_bins * self._nx + x_bins\r\n return output.long()", "def point_xyz_to_uv(self, xyz):\n return rs.SurfaceClosestPoint(self.guid, xyz) + (0.,)", "def getUVArea(*args, **kwargs):\n \n pass", "def bottom_stress(u, v):\n \n nx = len(u[0,:,0])\n ny = len(u[0,0,:])\n nz = 2\n Bx = numpy.zeros(((nz,nx,ny)))\n By = numpy.zeros(((nz,nx,ny)))\n k = 0.01\n Bx[0,:,:]= -k*u[0,:,:]*numpy.sqrt((u[0,:,:]**2)+(v[0,:,:]**2))\n By[0,:,:]= -k*v[0,:,:]*numpy.sqrt((u[0,:,:]**2)+(v[0,:,:]**2))\n return Bx, By", "def xyz_from_vuv(vuv):\n\n xi,yi,zi = vuv[:,0:3].T\n return xi,yi,zi" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return array shrunk to fit a specified shape by triming or averaging. a = shrink(array, shape) array is an numpy ndarray, and shape is a tuple (e.g., from array.shape). a is the input array shrunk such that its maximum dimensions are given by shape. If shape has more dimensions than array, the last dimensions of shape are fit. as, bs = shrink(a, b) If the second argument is also an array, both a and b are shrunk to the dimensions of each other. The input arrays must have the same number of dimensions, and the resulting arrays will have the same shape.
def shrink(a,b): if isinstance(b, np.ndarray): if not len(a.shape) == len(b.shape): raise Exception() 'input arrays must have the same number of dimensions' a = shrink(a,b.shape) b = shrink(b,a.shape) return (a, b) if isinstance(b, int): b = (b,) if len(a.shape) == 1: # 1D array is a special case dim = b[-1] while a.shape[0] > dim: # only shrink a if (dim - a.shape[0]) >= 2: # trim off edges evenly a = a[1:-1] else: # or average adjacent cells a = 0.5*(a[1:] + a[:-1]) else: for dim_idx in range(-(len(a.shape)),0): dim = b[dim_idx] a = a.swapaxes(0,dim_idx) # put working dim first while a.shape[0] > dim: # only shrink a if (a.shape[0] - dim) >= 2: # trim off edges evenly a = a[1:-1,:] if (a.shape[0] - dim) == 1: # or average adjacent cells a = 0.5*(a[1:,:] + a[:-1,:]) a = a.swapaxes(0,dim_idx) # swap working dim back return a
[ "def _get_shrunk_array(self, f, ind, shrink_size=(1.0, 1.0, 1.0)):\n dim = f[ind].shape\n if (f[ind].ndim == 2):\n return np.array(f[ind][:int(round(shrink_size[0] * dim[0])),\n :int(round(shrink_size[1] * dim[1]))])\n elif (f[ind].ndim == 3):\n return np.array(f[ind][:int(round(shrink_size[0] * dim[0])),\n :int(round(shrink_size[1] * dim[1])),\n :int(round(shrink_size[2] * dim[2]))])", "def zoom_array(in_array, final_shape, same_sum=False,\n zoom_function=partial(zoom, order=3), **zoom_kwargs):\n in_array = np.asarray(in_array, dtype=np.double)\n in_shape = in_array.shape\n assert len(in_shape) == len(final_shape)\n mults = [] # multipliers for the final coarsegraining\n for i in range(len(in_shape)):\n if final_shape[i] < in_shape[i]:\n mults.append(int(np.ceil(in_shape[i] / final_shape[i])))\n else:\n mults.append(1)\n # shape to which to blow up\n temp_shape = tuple([i * j for i, j in zip(final_shape, mults)])\n\n # stupid zoom doesn't accept the final shape. Carefully crafting the\n # multipliers to make sure that it will work.\n zoom_multipliers = np.array(temp_shape) / np.array(in_shape) + 0.0000001\n assert zoom_multipliers.min() >= 1\n\n # applying scipy.ndimage.zoom\n rescaled = zoom_function(in_array, zoom_multipliers, **zoom_kwargs)\n\n for ind, mult in enumerate(mults):\n if mult != 1:\n sh = list(rescaled.shape)\n assert sh[ind] % mult == 0\n newshape = sh[:ind] + [sh[ind] // mult, mult] + sh[ind + 1:]\n rescaled.shape = newshape\n rescaled = np.mean(rescaled, axis=ind + 1)\n assert rescaled.shape == final_shape\n\n if same_sum:\n extra_size = np.prod(final_shape) / np.prod(in_shape)\n rescaled /= extra_size\n return rescaled", "def broadcast_to(array, shape):\n def _check(shape_a, shape):\n if not _check_can_broadcast_to(shape_a, shape):\n _raise_value_error('cannot broadcast with ', shape)\n shape_a = F.shape(array)\n _check(shape_a, shape)\n return _broadcast_to_shape(array, shape)", "def reshape(array, newshape):\n if is_numpy_array(array):\n return np.reshape(array, newshape)\n elif is_torch_tensor(array):\n return array.reshape(*newshape)\n elif is_tf_tensor(array):\n import tensorflow as tf\n\n return tf.reshape(array, newshape)\n elif is_jax_tensor(array):\n return jnp.reshape(array, newshape)\n else:\n raise ValueError(f\"Type not supported for reshape: {type(array)}.\")", "def pad_or_crop_to_shape(array, target_shape):\n if array.shape == target_shape:\n return array\n\n lx, ly = array.shape\n lx_w, ly_w = target_shape\n border_x = xp.abs(lx - lx_w) // 2\n border_y = xp.abs(ly - ly_w) // 2\n\n if (lx < lx_w) or (ly < ly_w):\n _log.debug(\"Array shape \" + str(array.shape) + \" is smaller than desired shape \" + str(\n [lx_w, ly_w]) + \"; will attempt to zero-pad the array\")\n\n resampled_array = xp.zeros(shape=(lx_w, ly_w), dtype=array.dtype)\n resampled_array[border_x:border_x + lx, border_y:border_y + ly] = array\n _log.debug(\" Padded with a {:d} x {:d} border to \"\n \" match the desired shape\".format(border_x, border_y))\n\n else:\n _log.debug(\"Array shape \" + str(array.shape) + \" is larger than desired shape \" + str(\n [lx_w, ly_w]) + \"; will crop out just the center part.\")\n resampled_array = array[border_x:border_x + lx_w, border_y:border_y + ly_w]\n _log.debug(\" Trimmed a border of {:d} x {:d} pixels \"\n \"to match the desired shape\".format(border_x, border_y))\n return resampled_array", "def broadcast_rule(shape_a, shape_b):\n\tassert (isinstance(shape_a, tuple))\n\tassert (isinstance(shape_b, tuple))\n\tif len(shape_a) > len(shape_b):\n\t\tlonger_shape, shorter_shape = shape_a, shape_b\n\telse:\n\t\tlonger_shape, shorter_shape = shape_b, shape_a\n\tlen_diff = len(longer_shape) - len(shorter_shape)\n\tfor i in range(len_diff):\n\t\t# pad with leading 1s\n\t\tshorter_shape = (1,) + shorter_shape\n\tassert len(shorter_shape) == len(longer_shape)\n\toutput_shape = list(longer_shape)\n\tfor i in range(len(output_shape)):\n\t\tassert (shorter_shape[i] == longer_shape[i]) \\\n\t\t\t or (shorter_shape[i] == 1) \\\n\t\t\t or (longer_shape[i] == 1)\n\t\toutput_shape[i] = max(shorter_shape[i], longer_shape[i])\n\treturn tuple(output_shape)", "def _reshape(arg, shape):\n\n if isinstance(arg, tuple):\n raise TypeError(\"Composite batches not supported.\")\n\n assert not isinstance(arg, list)\n\n if isinstance(arg, (np.ndarray, theano.tensor.TensorVariable)):\n return arg.reshape(shape)\n elif isinstance(arg, theano.sparse.SparseVariable):\n warnings.warn(\"Using pylearn2.space._reshape(), which is a \"\n \"memory-inefficient hack for reshaping sparse tensors. \"\n \"Do not use this on large tensors. This will eventually \"\n \"be replaced by a proper Theano Op for sparse \"\n \"reshaping, once that is written.\")\n dense = theano.sparse.dense_from_sparse(arg)\n dense = dense.reshape(shape)\n if arg.format == 'csr':\n return theano.sparse.csr_from_dense(dense)\n elif arg.format == 'csc':\n return theano.sparse.csc_from_dense(dense)\n else:\n raise ValueError('Unexpected sparse format \"%s\".' % arg.format)\n else:\n raise TypeError('Unexpected batch type \"%s\"' % str(type(arg)))", "def crop_to_shape(x: torch.Tensor, shape: Tuple[int, int]) -> torch.Tensor:\n h, w = x.shape[-2:]\n\n if h > shape[0]:\n x = x[:, :, : shape[0], :]\n if w > shape[1]:\n x = x[:, :, :, : shape[1]]\n return x", "def crop_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, ratio: AxesParams = 0.5) -> np.ndarray:\n if not hasattr(x, 'ndim') or not hasattr(x, 'shape'):\n x = np.asarray(x)\n\n axis = resolve_deprecation(axis, x.ndim, shape)\n shape, ratio = broadcast_to_axis(axis, shape, ratio)\n\n old_shape, new_shape = np.array(x.shape), np.array(fill_by_indices(x.shape, shape, axis))\n if (old_shape < new_shape).any():\n raise ValueError(f'The resulting shape cannot be greater than the original one: {old_shape} vs {new_shape}')\n\n ndim = len(x.shape)\n ratio = fill_by_indices(np.zeros(ndim), ratio, axis)\n start = ((old_shape - new_shape) * ratio).astype(int)\n\n return x[build_slices(start, start + new_shape)]", "def resize(self, shape):\n if not isshape(shape):\n raise TypeError(\"dimensions must be a 2-tuple of positive\"\n \" integers\")\n newM, newN = shape\n M, N = self.shape\n if newM < M or newN < N:\n # Remove all elements outside new dimensions\n for (i, j) in list(self.keys()):\n if i >= newM or j >= newN:\n del self[i, j]\n self._shape = shape", "def broadcast_array(array, shape):\n a_shape = _numpy_shape(array)\n if a_shape == shape:\n return array\n\n tile = [(m if n == 1 else 1) for n, m in zip(a_shape[::-1], shape[::-1])]\n tile = shape[0 : len(shape) - len(a_shape)] + tuple(tile[::-1])\n\n return _numpy_tile(array, tile)", "def scale_to_shape(x: np.ndarray, shape: Sequence, axes: Sequence = None, order: int = 1) -> np.ndarray:\n old_shape = np.array(x.shape, 'float64')\n new_shape = np.array(compute_shape_from_spatial(x.shape, shape, axes), 'float64')\n\n return ndimage.zoom(x, new_shape / old_shape, order=order)", "def broadcast_to(x, shape, **kw):\n if isinstance(x, StructuredArray):\n return x.broadcast_to(shape, **kw)\n else:\n return np.broadcast_to(x, shape, **kw)", "def arr_resize(arr, newdim=None, padval=np.nan):\n\n assert len(np.asarray(arr).shape) == 2, \"Function arr_resize expects a 2D array\"\n\n arr = np.asarray(arr)\n dimy, dimx = arr.shape\n\n ####################################################################\n # Default new size is at least as large as old size, and is a\n # multiple of 500 plus 1. Must be odd.\n ####################################################################\n\n if newdim is None:\n newdim = ((np.max([dimy, dimx]) - 2) // 500 + 1) * 500 + 1\n else:\n try:\n newdim = np.absolute(newdim // 2) * 2 + 1\n except:\n print \"Error: new dimension in arr_resize must be an integer.\"\n return None\n try:\n padval = float(padval)\n except:\n print \"Error: padding value in arr_resize must be a float.\"\n return None\n \n ####################################################################\n # Return a subarray of the original array is possible, otherwise\n # make a larger array and copy into it.\n ####################################################################\n\n dx1 = dimx // 2\n dy1 = dimy // 2\n dx2 = dy2 = newdim // 2\n\n if newdim < dimy and newdim < dimx:\n return arr[dy1 - dy2:dy1 + dy2 + 1,\n dx1 - dx2:dx1 + dx2 + 1]\n else:\n newarr = np.ones((newdim, newdim), dtype=arr.dtype) * padval\n if newdim > dimy and newdim > dimx:\n newarr[dy2 - dy1:dy2 + dy1 + 1,\n dx2 - dx1:dx2 + dx1 + 1] = arr\n elif newdim > dimy:\n newarr[dy2 - dy1:dy2 + dy1 + 1, :] = \\\n arr[:, dx1 - dx2:dx1 + dx2 + 1]\n else:\n newarr[:, dx2 - dx1:dx2 + dx1 + 1] = \\\n arr[dy1 - dy2:dy1 + dy2 + 1]\n return newarr", "def revert_trim_reshape (preds):\n \n if original_size == None:\n raise ValueError('original_size has not been set')\n if len(preds.shape) != 3:\n raise ValueError('preds array must be 3D argmax (batch_size, height, width)')\n if trim == False and reshape == False:\n return preds\n new_preds = np.zeros((preds.shape[0], original_size[1], original_size[0]), dtype=np.uint8)\n for i, pred in enumerate(preds):\n if reshape and trim:\n pred = cv2.resize(pred, (original_size[0], trim_ind[1]-trim_ind[0]), interpolation=cv2.INTER_NEAREST)\n elif reshape:\n pred = cv2.resize(pred, original_size, interpolation=cv2.INTER_NEAREST)\n if trim:\n new_preds[i, trim_ind[0]:trim_ind[1]] = pred\n else:\n new_preds[i] = pred\n return new_preds", "def broadcast_to(input, shape, name=None):\n return array_ops.broadcast_to(input, shape, name=name)", "def _rfft_tile_reshape(shape_a):\n reshape_a = list(shape_a)\n reshape_a[-2] = 1\n reshape_a[-1] = 1\n return tuple(reshape_a)", "def scale_shape(dest_shape, shape_points):\n max_p = np.amax(shape_points, axis=0)\n if max_p[1] == 0 or max_p[0] == 0:\n return shape_points\n n, m = (dest_shape[1] - 4) / max_p[0], (dest_shape[0] - 4) / max_p[1]\n resized_x = np.trunc(shape_points[:, 0] * n)\n resized_y = np.trunc(shape_points[:, 1] * m)\n resized_x = resized_x[np.newaxis, :].transpose()\n resized_y = resized_y[np.newaxis, :].transpose()\n # if max_p[1] > dest_shape[0] or max_p[0] > dest_shape[1]:\n\n return np.hstack((resized_x.astype(np.int), resized_y.astype(np.int))).clip(0)", "def _to_be_squeezed(self, array, always_return_1d=True):\n squeeze = array.ndim == 2 and array.shape[1] == 1\n if not always_return_1d:\n squeeze = squeeze and self._training_dim == 1\n return squeeze" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
From Rich Signell's blog, plots ROMS velocity field for a single timepoint
def plot_ROMS_velocity_field(): url='http://tds.marine.rutgers.edu/thredds/dodsC/roms/doppio/2017_da/his/runs/History_RUN_2018-05-15T00:00:00Z' nc = netCDF4.Dataset(url) lon_rho = nc.variables['lon_rho'][:] lat_rho = nc.variables['lat_rho'][:] #bbox = [-71., -63.0, 41., 44.] #GoM bbox = [-67.35, -64.72, 44.23, 45.33] #BoF i0,i1,j0,j1 = bbox2ij(lon_rho,lat_rho,bbox) tvar = nc.variables['ocean_time'] # usual ROMS #tvar = nc.variables['time'] # USGS COAWST FMRC Aggregation h = nc.variables['h'][j0:j1, i0:i1] lon = lon_rho[j0:j1, i0:i1] lat = lat_rho[j0:j1, i0:i1] land_mask = 1 - nc.variables['mask_rho'][j0:j1, i0:i1] #start=datetime.datetime(2012,1,1,0,0) #start = datetime.datetime.utcnow() #tidx = netCDF4.date2index(start,tvar,select='nearest') # get nearest index to now tidx = -1 #timestr = netCDF4.num2date(stats.tvar[tidx], stats.tvar.units).strftime('%b %d, %Y %H:%M') #BRING BACK zlev = -1 # last layer is surface layer in ROMS u = nc.variables['u'][tidx, zlev, j0:j1, i0:(i1-1)] v = nc.variables['v'][tidx, zlev, j0:(j1-1), i0:i1] lon_u = nc.variables['lon_u'][ j0:j1, i0:(i1-1)] lon_v = nc.variables['lon_v'][ j0:(j1-1), i0:i1] lat_u = nc.variables['lat_u'][ j0:j1, i0:(i1-1)] lat_v = nc.variables['lat_v'][ j0:(j1-1), i0:i1] lon=lon_rho[(j0+1):(j1-1), (i0+1):(i1-1)] lat=lat_rho[(j0+1):(j1-1), (i0+1):(i1-1)] mask = 1 - nc.variables['mask_rho'][(j0+1):(j1-1), (i0+1):(i1-1)] ang = nc.variables['angle'][(j0+1):(j1-1), (i0+1):(i1-1)] # average u,v to central rho points u = shrink(u, mask.shape) v = shrink(v, mask.shape) # rotate grid_oriented u,v to east/west u,v u, v = rot2d(u, v, ang) basemap = Basemap(projection='merc',llcrnrlat=44,urcrnrlat=46,llcrnrlon=-68,urcrnrlon=-64, lat_ts=30,resolution='i') fig1 = plt.figure(figsize=(10,8)) ax = fig1.add_subplot(111) basemap.drawcoastlines() basemap.fillcontinents() basemap.drawcountries() basemap.drawstates() x_rho, y_rho = basemap(lon,lat) spd = np.sqrt(u*u + v*v) #h1 = basemap.pcolormesh(x_rho, y_rho, spd, vmin=0, vmax=1.0,shading='nearest') #add color nsub=2 scale=0.03 basemap.quiver(x_rho[::nsub,::nsub],y_rho[::nsub,::nsub],u[::nsub,::nsub],v[::nsub,::nsub],scale=1.0/scale, zorder=1e35, width=0.002) #basemap.colorbar(h1,location='right',pad='5%') #add colorbar title('COAWST Surface Current: ROMS Velocity Field') #BRING BACK plt.savefig('ROMS_velocity_field_BoF05152018.png')
[ "def PlotVoltage(cell):\n # check before if \"recordAll\" was TRUE\n t = np.asarray(cell.record['time']) * .001\n\n v = np.asarray(cell.record['voltage']) * .001\n plt.plot(t, v)\n plt.xlabel('Time (s)')\n plt.ylabel('Voltage (mV)')", "def VelocityChart(request):\n kwargs = {\n 'status': 'Accepted',\n 'startDate__gte': '2016-06-01 00:00:00',\n }\n return _drawVelocity(request, kwargs, 'radabo/velocity.html')", "def plot_velocity(self, x, uu, figname):\r\n \r\n #pdb.set_trace()\r\n ## 120 days\r\n uu = uu[:self.period]\r\n \r\n umin = -0.04\r\n umax = 0.04\r\n #unew[unew<umin] = umin\r\n #unew[unew>umax] = umax\r\n \r\n ## this step is only needed for visualizing the extremly large positive and negative velocities\r\n for i in range(len(uu)):\r\n for j in range(len(uu[i])):\r\n if uu[i][j] > umax:\r\n uu[i][j] = umax\r\n elif uu[i][j] < umin:\r\n uu[i][j] = umin\r\n \r\n \r\n tt = np.arange(len(uu)) + 1\r\n \r\n lx = max(map(len, x))\r\n for i in range(len(x)):\r\n if len(x[i]) == lx:\r\n y = x[i]\r\n exit\r\n \r\n #y = np.array([[None]*(lx-len(xi)) + xi for xi in x])\r\n unew = np.array([[None]*(lx-len(xi)) + xi for xi in uu])\r\n \r\n plt.rcParams.update({'font.size': 18})\r\n fig = plt.figure(figsize=(9.5,8))\r\n ax = fig.add_subplot(111)\r\n \r\n \r\n \r\n levels = np.linspace(umin, umax, 100)\r\n cmap = plt.set_cmap('bwr')\r\n CS = ax.contourf(tt, y, unew.T, cmap=cmap, levels=levels)\r\n ax.set_ylim(ax.get_ylim()[::-1])\r\n ax.set_xlabel('Time (day)')\r\n ax.set_ylabel('Distance from upstream (m)')\r\n \r\n cb = fig.colorbar(CS, orientation='vertical')\r\n cb.set_label('Velocity (m/s)', fontsize=16)\r\n #plt.show()\r\n plt.savefig(figname)\r\n plt.close()", "def plot_vel_redshift_evo(sim):\n halo = myname.get_name(sim, True)\n vels = {}\n for snap in (1,3,5):\n hspec0 = ps.VWPlotSpectra(snap, halo)\n (vbin, vels[snap]) = hspec0.vel_width_hist(\"Si\", 2)\n mm = np.min([np.size(vel) for vel in vels.values()])\n #Normalised by z=3\n plt.semilogx(vbin[:mm], vels[5][:mm]/vels[3][:mm], color=\"black\",ls=\"--\")\n plt.semilogx(vbin[:mm], vels[1][:mm]/vels[3][:mm], color=\"grey\",ls=\"-\")\n plt.xlim(10, 1000)\n plt.ylim(0.5,1.5)\n save_figure(path.join(outdir,\"cosmo\"+str(sim)+\"_zz_evol\"))\n plt.clf()", "def phase_plane(self):\n plt.figure(figsize=(8, 5))\n plt.plot(self.V, self.W, color='cornflowerblue')\n plt.plot(self.V, self.V - (self.V**3)/3 + self.I, color=\"slateblue\")\n plt.plot(self.V, (self.V + self.a)/(self.b), color=\"red\")\n plt.xlabel('Voltage [V]', fontsize=12)\n plt.ylabel('Recovery [W]', fontsize=12)\n plt.grid(alpha=0.3)", "def OldVelocityChart(request):\n kwargs = {\n 'status': 'Accepted',\n 'startDate__gte': '2015-09-01 00:00:00',\n 'endDate__lte': '2016-06-10 00:00:00',\n }\n return _drawVelocity(request, kwargs, 'radabo/month_velocity.html')", "def plot_velocities(self, LAXIS, xbl, xbr, ybu, ybd, ilg):\n\n bconv = self.bconv\n tconv = self.tconv\n super_ad_i = self.super_ad_i\n super_ad_o = self.super_ad_o\n\n # check supported geometries\n if self.ig != 1 and self.ig != 2:\n print(\"ERROR(VelocitiesMLTturb.py):\" + self.errorGeometry(self.ig))\n sys.exit()\n\n # load x GRID\n grd1 = self.xzn0\n\n # load DATA to plot\n plt1 = self.ux\n plt2 = self.vexp1\n plt3 = self.vexp2\n plt4 = self.vturb\n plt5 = self.vmlt_1 # vmlt_1 = fhh / (alphae * dd * fht_cp * tt_rms) - REFERENCE NEEDED\n plt6 = self.vmlt_2 # vmlt_2 = gg * betaT * (nabla - nabla_ad) * ((lbd ** 2.) / (8. * Hp)) - REFERENCE NEEDED\n plt7 = self.vmlt_3 # THIS IS FROM TYCHO's initial model\n plt8 = self.vrms\n\n # create FIGURE\n plt.figure(figsize=(7, 6))\n\n # format AXIS, make sure it is exponential\n plt.gca().yaxis.get_major_formatter().set_powerlimits((0, 0))\n\n # temporary hack\n plt4 = np.nan_to_num(plt4)\n plt5 = np.nan_to_num(plt5)\n plt6 = np.nan_to_num(plt6)\n plt7 = np.nan_to_num(plt7)\n plt8 = np.nan_to_num(plt8)\n\n # set plot boundaries \n to_plot = [plt4, plt5, plt6, plt7]\n self.set_plt_axis(LAXIS, xbl, xbr, ybu, ybd, to_plot)\n\n # plot DATA \n plt.title('velocities ' + str(self.nsdim) + \"D\")\n # plt.plot(grd1,plt1,color='brown',label = r'$\\overline{u}_r$')\n # plt.plot(grd1,plt2,color='red',label = r'$\\widetilde{u}_r$')\n # plt.plot(grd1,plt3,color='green',linestyle='--',label = r'$\\overline{v}_{exp} = -\\dot{M}/(4 \\pi r^2 \\rho)$')\n #plt.plot(grd1, plt4, color='blue', label=r\"$u_{turb} = +\\widetilde{u''_x u''_x}^{1/2}$\")\n plt.plot(grd1, plt8, color='blue', label=r\"$u_{rms}$\")\n\n plt.plot(grd1,plt5,color='red',label = r'$u_{mlt}$')\n # plt.plot(grd1,plt6,color='g',label = r'$u_{MLT} 2$')\n # plt.plot(self.rr,plt7,color='brown',label = r'$u_{MLT} 3 inimod$')\n\n # convective boundary markers\n plt.axvline(bconv, linestyle='--', linewidth=0.7, color='k')\n plt.axvline(tconv, linestyle='--', linewidth=0.7, color='k')\n\n # convective boundary markers - only super-adiatic regions\n plt.axvline(super_ad_i, linestyle=':', linewidth=0.7, color='k')\n plt.axvline(super_ad_o, linestyle=':', linewidth=0.7, color='k')\n\n if self.ig == 1:\n setxlabel = r\"x (cm)\"\n setylabel = r\"velocity (cm s$^{-1}$)\"\n plt.xlabel(setxlabel)\n plt.ylabel(setylabel)\n elif self.ig == 2:\n setxlabel = r\"r (cm)\"\n setylabel = r\"velocity (cm s$^{-1}$)\"\n plt.xlabel(setxlabel)\n plt.ylabel(setylabel)\n\n # show LEGEND\n plt.legend(loc=ilg, prop={'size': 18})\n\n # display PLOT\n plt.show(block=False)\n\n # save PLOT\n if self.fext == \"png\":\n plt.savefig('RESULTS/' + self.data_prefix + 'mean_velocities_turb.png')\n if self.fext == \"eps\":\n plt.savefig('RESULTS/' + self.data_prefix + 'mean_velocities_turb.eps')", "def TrackVelocity3D(particle, fieldset, time):\n print(\"TIME : %g\" % time)\n (u1, v1) = fieldset.UV[time, particle.depth, particle.lat, particle.lon] #\n particle.u = u1 * 1852. * 60. * math.cos(particle.lat * math.pi/180.) \n particle.v = v1 * 1852. * 60.", "def mk_raw_vel_trace_figures():\n # use the same data as in mk_eyegaze_classification_figures()\n # (no need for file retrieval, should be there)\n datalad_get(op.join('data', 'raw_eyegaze'), get_data=False)\n infiles = [\n op.join(\n 'data',\n 'raw_eyegaze', 'sub-32', 'beh',\n 'sub-32_task-movie_run-5_recording-eyegaze_physio.tsv.gz'),\n op.join(\n 'data',\n 'raw_eyegaze', 'sub-02', 'ses-movie', 'func',\n 'sub-02_ses-movie_task-movie_run-5_recording-eyegaze_physio.tsv.gz'\n ),\n ]\n # we need the sampling rate for plotting in seconds and velocity calculation\n sr = 1000\n # load data\n for i, f in enumerate(infiles):\n # read data\n datalad_get(f)\n data = np.recfromcsv(f,\n delimiter='\\t',\n names=['x', 'y', 'pupil', 'frame'])\n\n # subset data. Hessels et al., 2017 display different noise levels on 4\n # second time series (ref. Fig 10). That still looks a bit dense, so we\n # go with 2 seconds, from start of 10sec excerpt to make it easier to\n # associate the 2 sec excerpt in to its place in the 10 sec excerpt\n # above\n data_subset = data[15000:17000]\n px2deg, ext = (0.0266711972026, 'lab') if '32' in f \\\n else (0.0185581232561, 'mri')\n # take raw data and convert it to velocity: euclidean distance between\n # successive coordinate samples. Note: no entry for first datapoint!\n # Will plot all but first data point in other time series\n velocities = cal_velocities(data_subset, sr, px2deg)\n vel_color = 'xkcd:gunmetal'\n # prepare plotting - much manual setup, quite ugly - sorry\n fig, ax1 = plt.subplots()\n fig.set_figheight(2)\n fig.set_figwidth(7)\n fig.set_dpi(120)\n time_idx = np.linspace(0, len(data_subset) / sr, len(data_subset))[1:]\n max_x = float(len(data_subset) / sr)\n ax1.set_xlim(0, max_x)\n ax1.set_xlabel('time (seconds)')\n ax1.set_ylabel('coordinates')\n # left y axis set to max screensize in px\n ax1.set_ylim(0, 1280)\n # plot gaze trajectories (not preprocessed)\n ax1.plot(time_idx,\n data_subset['x'][1:],\n color='black', lw=1)\n ax1.plot(\n time_idx,\n data_subset['y'][1:],\n color='black', lw=1)\n # right y axis shows velocity \"as is\" (not preprocessed)\n ax2 = ax1.twinx()\n ax2.set_ylabel('velocity (deg/sec)', color=vel_color)\n ax2.tick_params(axis='y', labelcolor=vel_color)\n #ax2.set_yscale('log') ## TODO: Log scale or not?\n ax2.set_ylim(1, 2000)\n ax2.plot(time_idx,\n velocities,\n color=vel_color, lw=1)\n plt.savefig(\n op.join('img', 'rawtrace_{}.svg'.format(ext)),\n transparent=True,\n bbox_inches=\"tight\",\n metadata={'Date': None})\n plt.close()", "def ptolrvm(pd):\n\n norbit, nephem = norbeph(pd) \n\n # tref is the time at which the orbital elements are assumed to apply\n mass0 = pd['mass0']\n tref = pd['tstart']\n coord = pd['coord']\n \n r0, v0 = Vec3(), Vec3()\n msum = mass0\n if coord == 'Astro':\n for i in range(1,norbit+1):\n stri = str(i)\n msum += pd['mass'+stri]\n lrvm = []\n pdc = pd.copy()\n\n # 'n' in what follows is the conventional symbol for the angular frequency\n # of an orbit.\n ks = []\n for i in range(1,norbit+1):\n stri = str(i)\n\n # compute angular frequency\n a = pd['a'+stri]\n mass = pd['mass' + stri]\n if coord == 'Jacobi':\n msum += mass\n k = mass/msum\n mu = mass0/(1-k)\n elif coord == 'Marsh':\n msum += mass\n k = mass/msum\n mu = msum\n elif coord == 'Astro':\n mu = mass0+mass\n k = mass/msum\n else:\n raise Exception('Unrecognised coordinates in ptolrvm')\n\n n = comp_n(mu,a)\n pdc['mu'+stri] = mu\n\n orb = Orbit(pdc,i,pdc['eomega'+stri])\n r,v = orb.torv(tref)\n\n # accumulate reflex sums (automatically barycentric)\n r0 -= k*r\n v0 -= k*v\n\n # store in Rvm list, store k values\n lrvm.append(Rvm(r, v, mass, a, n*a, pdc['rint'+stri]))\n ks.append(k)\n\n if coord == 'Jacobi' or coord == 'Marsh':\n # Need to convert the Jacobi coordinates to barycentric ones\n # for N-body work. Work through rvm list in reverse order:\n rsum, vsum = Vec3(), Vec3()\n for i in range(len(ks)-1,-1,-1):\n rsum += ks[i]*lrvm[i].r\n vsum += ks[i]*lrvm[i].v\n lrvm[i].r -= rsum\n lrvm[i].v -= vsum\n\n elif coord == 'Astro':\n # to get from astro to barycentric simply add r0, v0\n for i in range(len(ks)):\n lrvm[i].r += r0\n lrvm[i].v += v0\n\n # Create and insert the zeroth object Rvm and return\n rvm0 = Rvm(r0, v0, mass0, r0.norm(), v0.norm(), pdc['rint0'])\n lrvm.insert(0,rvm0)\n return lrvm", "def plotSpikes(self):\n self.getCompleteSpikeTimes()\n b=np.ones_like(self.completeSpikeTimes)\n matplotlib.pyplot.plot(b)\n matplotlib.pyplot.eventplot(self.spikeTimes)\n matplotlib.pyplot.xlabel(\"time\") \n matplotlib.pyplot.title(\"single neuron raster plot of Neuron \"+self.name)\n matplotlib.pyplot.show()", "def easy_pv_2():\n main_pv_fn = \"carma/M16.ALL.hcop.sdi.cm.subpv.fits\"\n cube = cps2.cutout_subcube(data_filename=main_pv_fn, length_scale_mult=5)\n reg_filename_short = \"catalogs/p1_IDgradients_thru_head.reg\"\n reg_filename = catalog.utils.search_for_file(reg_filename_short)\n selected_path = 2 # 0,1 are threads, 2 is blue cap\n vel_lims = (22.5*kms, 24.0*kms) # to highlight the cap\n mom0 = cube.spectral_slab(*vel_lims).moment0()\n pv_path = pvdiagrams.path_from_ds9(reg_filename, selected_path, width=None)\n pv_path_length = pv_path._coords[0].separation(pv_path._coords[1]).to(u.arcsec)\n sl = pvextractor.extract_pv_slice(cube.spectral_slab(20*kms, 28*kms), pv_path)\n\n fig = plt.figure(figsize=(15, 6))\n ax_img = plt.subplot2grid((1, 2), (0, 1), projection=mom0.wcs)\n im = ax_img.imshow(mom0.to_value(), origin='lower', cmap='Greys_r')\n fig.colorbar(im, ax=ax_img, label='K km/s')\n ax_img.plot([c.ra.deg for c in pv_path._coords], [c.dec.deg for c in pv_path._coords], color='red', linestyle='-', lw=3, transform=ax_img.get_transform('world'))\n ax_img.text(pv_path._coords[0].ra.deg, pv_path._coords[0].dec.deg + 4*u.arcsec.to(u.deg), 'Offset = 0\\\"', color='red', fontsize=10, va='bottom', ha='center', transform=ax_img.get_transform('world'))\n ax_img.text(pv_path._coords[1].ra.deg, pv_path._coords[1].dec.deg - 4*u.arcsec.to(u.deg), f'Offset = {pv_path_length.to_value():.1f}\\\"', color='red', fontsize=12, va='top', ha='center', transform=ax_img.get_transform('world'))\n ax_sl = plt.subplot2grid((1, 2), (0, 0), projection=WCS(sl.header))\n im = ax_sl.imshow(sl.data, origin='lower', aspect=1.4)\n fig.colorbar(im, ax=ax_sl, label='K')\n ax_sl.coords[1].set_format_unit(u.km/u.s)\n ax_sl.coords[1].set_major_formatter('x.xx')\n ax_sl.coords[0].set_format_unit(u.arcsec)\n ax_sl.coords[0].set_major_formatter('x.xx')\n ax_sl.set_xlabel(\"Offset (arcseconds)\")\n ax_sl.set_ylabel(\"Velocity (km/s)\")\n ax_sl.set_title(\"HCO+ PV diagram\")\n for coord in ax_img.coords:\n coord.set_ticks_visible(False)\n coord.set_ticklabel_visible(False)\n coord.set_axislabel('')\n ax_img.set_title(f\"Integrated HCO+ line intensity {make_vel_stub(vel_lims)}\")\n hcop_noise = 0.3 # I did this one by hand in DS9 on 2022-01-24, I think the sample that yielded 0.546 was in the corner where noise gets worse\n ax_sl.contour(sl.data, colors='k', levels=np.arange(hcop_noise*5, np.max(sl.data), hcop_noise*5), lw=2, alpha=0.7)\n plt.tight_layout()\n plt.subplots_adjust(left=0.07, bottom=0.1, wspace=0.05)\n fig.savefig(\"/home/ramsey/Pictures/2022-01-24-work/blue-cap_hcop_pv.png\",\n metadata=catalog.utils.create_png_metadata(title=f\"reg i={selected_path} from {reg_filename_short}\",\n file=__file__, func=\"easy_pv_2\"))", "def _subfn_add_replay_velocities(df, ax):\n df['center'] = (df['stop'] + df['start'])/2.0\n for index, row in df.iterrows():\n start = row['start']\n stop = row['stop']\n center = row['center']\n \n # Single Version:\n # velocity = row['velocity']\n # ax.plot([start, stop], [velocity, velocity], label=row['label'], marker='s', markersize=4.5, color='k') # , linewidth=2.5\n\n # LONG/SHORT Version:\n velocity_L = row['velocity_LONG']\n ax.plot([start, stop], [velocity_L, velocity_L], label=f\"{row['label']}_Long\", marker='s', markersize=3.5, color='g') # , linewidth=2.5\n velocity_S = row['velocity_SHORT']\n ax.plot([start, stop], [velocity_S, velocity_S], label=f\"{row['label']}_Short\", marker='s', markersize=3.5, color='r') # , linewidth=2.5\n # Draw directed line\n head_length = 40.0\n # arrow_start = (start, velocity_L)\n # arrow_end = (stop, velocity_S)\n arrow_start = (center, velocity_L)\n arrow_end = (center, velocity_S) # - (head_length * 0.5) subtract off half the head-length so the arrow ends at the point\n arrow_dx = arrow_end[0] - arrow_start[0]\n arrow_dy = arrow_end[1] - arrow_start[1]\n ax.arrow(*arrow_start, arrow_dx, arrow_dy, head_width=20.0, head_length=head_length, fc='k', ec='k')\n \n # Set labels and title\n ax.set_xlabel('time')\n ax.set_ylabel('Velocity')\n ax.set_title('Replay Velocities over Time')\n\n # Display legend\n # ax.legend()\n\n return plt.gcf(), ax", "def RV_model(t, p):\n\n\t(period, ttran, ecosomega, esinomega, K, gamma, gamma_offset, sigma_jitter1_sqrd, sigma_jitter2_sqrd) = p\n\te = np.sqrt(ecosomega**2. + esinomega**2.)\n\tomega = np.arctan2(esinomega, ecosomega)\n\n\t#mean motion: n = 2pi/period\n\tn = 2. * np.pi / period\n\n\t# Sudarsky 2005 Eq. 9 to convert between center of transit\n\t# and pericenter passage (tau)\n\n\n\n\tedif = 1. - e**2.\n\tfcen = np.pi/2. - omega\n\ttau = (ttran + np.sqrt(edif) * period / (2 * np.pi) * \n\t\t (e * np.sin(fcen) / (1. + e * np.cos(fcen)) - 2. / np.sqrt(edif) * \n\t\t np.arctan(np.sqrt(edif) * np.tan(fcen / 2.) / (1. + e))))\n\n\n\t#Define mean anomaly: M\n\tM = (n * (t - tau)) % (2. * np.pi)\n\n\t#Determine the Energy: E\n\tE = kepler(M, e)\n\n\t#Solve for fanom (measure of location on orbit)\n\ttanf2 = np.sqrt((1. + e) / (1. - e)) * np.tan(E / 2.)\n\tfanom = (np.arctan(tanf2) * 2.) % (2. * np.pi)\n\n\t#Calculate RV at given location on orbit\n\tRV = K * (e * np.cos(omega) + np.cos(fanom + omega)) + gamma\n\n\treturn RV", "def plot_GPS_and_TV(GPS_file, VO_poses):\n\n vo_poses = OrderedDict()\n # Reading in the textfile\n with open(VO_poses, \"r\") as textfile:\n for line in textfile:\n parse_line = line.strip().split()\n vo_poses[parse_line[0]] = (float(parse_line[1]), float(parse_line[2]))\n\n coordinates_dict = OrderedDict()\n # Reading in the textfile\n with open(GPS_file, \"r\") as textfile:\n for line in textfile:\n parse_line = line.strip().split()\n coordinates_dict[parse_line[0]] = from_latlon(float(parse_line[1]), float(parse_line[2]), 17)[:2]\n\n k = vo_poses.keys() + coordinates_dict.keys()\n k = [i for i in unique_everseen([i for i in k if k.count(i) > 1])]\n\n T_v, gps_orig = [], []\n for key in k:\n T_v.append(vo_poses[key])\n gps_orig.append(coordinates_dict[key])\n\n # Retrieving the GPS coordinates into a list\n # Shifting the trajectory to the origin\n utm_dx = gps_orig[0][0]\n utm_dy = gps_orig[0][1]\n\n gps = [(u[0] - utm_dx, u[1] - utm_dy) for u in gps_orig]\n\n last_gps = gps[len(gps) - 1]\n last_vo = T_v[len(T_v) - 1]\n d_gps = math.sqrt((last_gps[0] ** 2) + (last_gps[1] ** 2))\n d_VO = math.sqrt((last_vo[0] ** 2) + (last_vo[1] ** 2))\n\n\n scale = d_gps / d_VO\n\n # print 'The scale factor', scale\n # Apply scale factor to the translation vectors\n T_v = [np.array(t) * scale for t in T_v]\n\n # Obtaining the angle between the first points of each list: VO list and GPS list\n rotate_idx = min(len(T_v) - 1, len(gps) - 1)\n VO_v = np.array(T_v[rotate_idx])\n GPS_v = np.array(gps[rotate_idx])\n\n # Distance between points.\n d1 = math.sqrt((VO_v[0] - GPS_v[0]) ** 2 + (VO_v[1] - GPS_v[1]) ** 2)\n # Obtain the angle assuming the two points are vectors\n angle = math.acos((VO_v.dot(GPS_v)) / (np.linalg.norm(VO_v) * np.linalg.norm(GPS_v)))\n # Rotates the GPS point only for verification\n VO_v = TT.rotateFunct([VO_v], angle)\n\n # Distance between points after rotation.\n d2 = math.sqrt((VO_v[0][0] - GPS_v[0]) ** 2 + (VO_v[0][1] - GPS_v[1]) ** 2)\n # Verify if points are closer after rotation if not rotate the other way\n if d2 < d1:\n sign = 1\n else:\n sign = -1\n\n # Rotating the GPS function so it aligns with the VO function\n T_v = TT.rotateFunct(T_v, sign * angle)\n gps_factor = np.array(gps_orig[670]) - np.array(T_v[670])\n print(len(T_v))\n print(len(gps_orig))\n\n # --------------------------------------------------\n\n # Plotting the VO and GPS trajectories\n font = {'family': 'normal',\n 'weight': 'bold',\n 'size': 22}\n\n plt.rc('font', **font)\n plt.figure(1)\n GPS, = plt.plot(*zip(*gps), color='red', marker='o', label='GPS')\n pyMVO, = plt.plot(*zip(*T_v), marker='o', color='b', label='py-MVO')\n plt.legend(handles=[pyMVO, GPS])\n # Set plot parameters and show it\n plt.axis('equal')\n plt.grid()\n plt.show()", "def velocity(self, t):\n pass", "def old_run_plots(self,params):\n lw = 2\n \n \n # Plot voltage at soma and dendrites (apical proximal and distal)\n pylab.figure(1)\n pylab.plot(h.tvec,h.vsoma,lw=lw,c='k',label='v_soma')\n #pylab.plot(h.tvec,h.vdend,lw=lw,c='r',label='v_dend')\n #pylab.plot(h.tvec,h.vdend2,lw=lw,c='b',label='v_dend2')\n pylab.xlim(h.tstart-20,h.tstop+20)\n pylab.ylim(-120,40)\n # If optogenetics were included, draw blocks for times that illumination occurred in appropriate colours \n if params.has_key('opdict'):\n for (opsin,opexpressions) in params['opdict'].iteritems():\n for opexp in opexpressions:\n if opexp[0] is None or opexp[0].lower() == 'none':\n continue\n for pulsenum in range(opexp[1][6]): \n pulse_start = opexp[1][2]+pulsenum*(opexp[1][3]+opexp[1][4])\n self.plot_optogenetic(opsin,pulse_start,pulse_start+opexp[1][3],yoffset=40)\n # once we've plotted an activation for one area, that should be sufficient i.e. we don't need to plot apical *and* soma, only the first \n # TODO: think how to extend this to allow for different areas to be indicated i.e. ChR in soma vs ChR in apical dendritic arbor\n break\n pylab.title('V')\n ax = pylab.gca()\n for loc, spine in ax.spines.iteritems():\n if loc in ['left','bottom']:\n spine.set_position(('outward',5))\n ax.tick_params(direction='out')\n elif loc in ['right','top']:\n spine.set_color('none') \n pylab.legend()\n pylab.xlabel('time (ms)')\n pylab.ylabel('V (mV)')\n \n \"\"\"\n # Plot currents at soma and i_syn\n pylab.figure(2)\n pylab.plot(h.tvec,h.isyn,lw=lw,c='g',label='i_syn')\n pylab.plot(h.tvec,h.isoma,lw=lw,c='k',label='i_soma')\n if params.has_key('opdict'):\n for (opsin,opexpressions) in params['opdict'].iteritems():\n for opexp in opexpressions:\n if opexp[0] is None or opexp[0].lower() == 'none':\n continue\n h('objref list_i_opsin')\n h('list_i_opsin = new List()')\n h('list_i_opsin.append(i_%s)'%opsin)\n pylab.plot(h.tvec,h.list_i_opsin.object(0),color=opsin_dict[opsin]['color'],label='i_%s'%opsin)\n break\n pylab.xlim(h.tstart-20,h.tstop+20)\n #pylab.ylim(-3,6)\n pylab.title('I')\n ax = pylab.gca()\n for loc, spine in ax.spines.iteritems():\n if loc in ['left','bottom']:\n spine.set_position(('outward',5))\n ax.tick_params(direction='out')\n elif loc in ['right','top']:\n spine.set_color('none') \n pylab.legend()\n pylab.xlabel('time (ms)')\n pylab.ylabel('I (nA)')\n \"\"\"\n \n if params['expname'] is not None:\n savename = params['expname']\n pylab.figure(1)\n pylab.savefig(savename+'_voltage.png')\n #pylab.figure(2)\n #pylab.savefig(savename+'_current.png')\n print \"Saved figures under %s*.png\"%savename\n pylab.close('all')\n else:\n pylab.show()", "def demo_lsr_and_sun_cal():\n perimeter = 2 * np.pi * 8 * u.kpc\n velocity = 220 * u.km/ u.s\n # for reference, LSR (at 8 kpc, with V = 220 km/s) should take this long\n # to complete one orbit\n orbit_time = (perimeter / velocity).to(\"Gyr\")\n\n max_age = 100 * orbit_time / bovy_conversion.time_in_Gyr(220., 8.) # Gyr\n ntimes = 10000\n ts = np.linspace(0,max_age,ntimes)\n\n # INITIALISING SUN COORDINATES AND ORBIT\n #deg, deg, kpc, mas/yr, mas/yr, km/s\n ra, dec, dist, mu_ra, mu_dec, vlos = 0., 0., 0., 0., 0., 0.\n solar_coords = [ra, dec, dist, mu_ra, mu_dec, vlos]\n sun = Orbit(vxvv=solar_coords, radec=True, solarmotion='schoenrich') # should just be the sun's orbit\n sun.integrate(ts,mp,method='odeint')\n\n # get the orbit [R, vR, vT, z, vz, phi] (pos scaled by ro, vel scaled by vo)\n sun_data = sun.getOrbit()\n\n # plots the sun's motion with respect to Galactic Centre\n sunR = 8 * sun_data[:,0]\n sunphi = sun_data[:,5]\n sunX = sunR * np.cos(sunphi)\n sunY = sunR * np.sin(sunphi)\n sunZ = 8 * sun_data[:,3]\n plt.clf()\n plt.plot(sunX, sunY)\n plt.savefig('temp_plots/sunXY.png')\n\n plt.clf()\n plt.plot(sunX, sunZ)\n plt.savefig('temp_plots/sunXZ.png')\n\n # plot the XY of the sun's motion using galpy's plot function (w.r.t GC)\n plt.clf()\n sun.plot(d1='x', d2='y')\n plt.savefig('temp_plots/galpy_sunXY.png')\n\n sun.plot(d1='x', d2='z')\n plt.savefig('temp_plots/galpy_sunXZ.png')\n\n plt.clf()\n sun.plot(d1='R', d2='z')\n plt.savefig('temp_plots/galpy_sunRZ.png')\n\n # kpc, km/s\n # INITIALISING THE LSR (at XYZUVW (w.r.t sun) of [0,0,-0.025,0,220,0]\n R, vR, vT, z, vz, phi = 1., 0., 1., 0., 0., 0. # <--- Galpy units\n LSR_coords = [R, vR, vT, z, vz, phi]\n lsr = Orbit(vxvv=LSR_coords, solarmotion='schoenrich', vo=220, ro=8)\n lsr.integrate(ts, mp, method='odeint')\n\n # plots a perfect circle\n plt.clf()\n lsr.plot(d1='x', d2='y')\n plt.savefig('temp_plots/galpy_lsrXY.png')\n\n plt.clf()\n lsr.plot(d1='x', d2='z')\n plt.savefig('temp_plots/galpy_lsrXZ.png')\n\n # Manually reconstructing orbit\n lsr_data = lsr.getOrbit()\n lsrR = 8 * lsr_data[:,0]\n lsrphi = lsr_data[:,5]\n\n lsrX = lsrR * np.cos(lsrphi)\n lsrY = lsrR * np.sin(lsrphi)\n lsrZ = 8 * lsr_data[:,3]\n\n plt.clf()\n plt.plot(lsrX, lsrY)\n plt.savefig('temp_plots/lsrXY.png')\n plt.clf()\n plt.plot(lsrX, lsrZ)\n plt.savefig('temp_plots/lsrXZ.png')\n\n # plotting both sun and lsr\n plt.clf()\n plt.plot(lsrX, lsrY)\n plt.plot(sunX, sunY)\n plt.savefig('temp_plots/combXY.png')\n plt.clf()\n plt.plot(lsrX, lsrZ)\n plt.plot(sunX, sunZ)\n plt.savefig('temp_plots/combXZ.png')\n\n # Finding sun's path w.r.t the LSR in non-corotating frame\n relsunX = sunX - lsrX\n relsunY = sunY - lsrY\n relsunZ = sunZ - lsrZ\n\n plt.clf()\n plt.plot(relsunX, relsunY)\n plt.savefig('temp_plots/relsunXY.png')\n plt.clf()\n plt.plot(relsunX, relsunZ)\n plt.savefig('temp_plots/relsunXZ.png')\n\n # Getting sun's path w.r.t the LSR in cortating frame\n sun_rel_data = sun_data - lsr_data\n sun_relR = 8 * sun_rel_data[:,0]\n sun_relphi = sun_rel_data[:,5]\n\n sun_relX = sun_relR * np.cos(sun_relphi)\n sun_relY = sun_relR * np.sin(sun_relphi)\n sun_relZ = 8 * sun_rel_data[:,3]\n\n plt.clf()\n plt.plot(sun_relX, sun_relY)\n plt.savefig('temp_plots/sun_relXY.png')\n plt.clf()\n plt.plot(sun_relX, sun_relZ)\n plt.savefig('temp_plots/sun_relXZ.png')\n\n # Try and plot LSR and sun in 3D for comparison with\n # relative plot\n plt.clf()\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n #theta = np.linspace(-4 * np.pi, 4 * np.pi, 100)\n #z = np.linspace(-2, 2, 100)\n #r = z ** 2 + 1\n #x = r * np.sin(theta)\n #y = r * np.cos(theta)\n ax.plot(sunX, sunY, sunZ, label='sun')\n ax.plot(lsrX, lsrY, lsrZ, label='lsr')\n ax.legend()\n plt.savefig('temp_plots/3D_sun_lsr.png')\n plt.show()\n #galpy_coords_to_xyzuvw(lsr_data)\n print(\"Max age is {} and max phi is {}... does this make sense?\".\\\n format(max_age, np.max(lsr_data[:,5])))\n print(\"Max age is {} and max phi is {}... does this make sense?\". \\\n format(max_age, np.max(sun_data[:,5])))", "def manual_pv_slice_series():\n\n \"\"\"\n PV cut orientation, vertical or horizontal\n Vertical means slice at a single RA and plot velocity vs Dec\n Horizontal means slice at a single Dec and plot velocity vs RA\n \"\"\"\n orientation = 'horizontal'\n start_idx, step_idx = 25, 50\n\n # Load cube\n line_stub = 'cii'\n if line_stub in large_map_filenames:\n # Use the custom filename rather than the default\n filename = large_map_filenames[line_stub]\n else:\n # Use default filename from cube_utils (many of these are centered around Pillars)\n filename = line_stub\n cube_obj = cube_utils.CubeData(filename).convert_to_K().convert_to_kms()\n dimension_size = (cube_obj.data.shape[2] if orientation=='vertical' else cube_obj.data.shape[1])\n\n # Make image\n ref_vel_lims = (10*kms, 35*kms)\n ref_mom0 = cube_obj.data.spectral_slab(*ref_vel_lims).moment0()\n ref_img = ref_mom0.to_value()\n\n # Set colors\n pv_cmap = 'plasma'\n img_cmap = 'Greys_r'\n line_color = marcs_colors[1]\n\n # Loop thru slice index\n for slice_idx in range(start_idx, dimension_size, step_idx):\n\n if orientation == 'vertical':\n # Cube index order is V,Y,X = Velocity,Dec,RA = V,I,J\n cube_slices = (slice(None), slice(None), slice_idx)\n else:\n cube_slices = (slice(None), slice_idx, slice(None))\n\n pv_slice = cube_obj.data[cube_slices]\n\n # First try to remake fig/axes each time. Try persistent if slow\n fig = plt.figure(figsize=(8, 10))\n gs = fig.add_gridspec(2, 1)\n ax_img = fig.add_subplot(gs[0,0], projection=cube_obj.wcs_flat)\n ax_pv = fig.add_subplot(gs[1,0], projection=pv_slice.wcs)\n\n im = ax_img.imshow(ref_img, origin='lower', vmin=0, cmap=img_cmap)\n fig.colorbar(im, ax=ax_img, label=ref_mom0.unit.to_string('latex_inline'))\n\n im = ax_pv.imshow(pv_slice.to_value(), origin='lower', vmin=0, cmap=pv_cmap)\n fig.colorbar(im, ax=ax_pv, label=pv_slice.unit.to_string('latex_inline'), orientation='horizontal')\n\n # Plot line\n if orientation == 'vertical':\n plot_line = ax_img.axvline\n else:\n plot_line = ax_img.axhline\n plot_line(slice_idx, color=line_color, linewidth=2)\n # Reference image velocity interval stamp\n ax_img.text(0.1, 0.9, make_vel_stub(ref_vel_lims), color=line_color, ha='left', va='bottom')\n\n # Clean up axes labels\n # ax_img.set_xlabel(\"RA\")\n # ax_img.set_ylabel(\"Dec\")\n ax_pv.coords[1].set_format_unit(kms)\n # 2023-04-26, 06-07\n savename = f\"/home/ramsey/Pictures/2023-04-26/m16_pv_{orientation}_{slice_idx:03d}.png\"\n fig.savefig(savename, metadata=catalog.utils.create_png_metadata(title=f'{line_stub}, using stub/file {filename}', file=__file__, func='manual_pv_slice_series'))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the timepoint of this ScheduleResourceAttributes.
def timepoint(self, timepoint): self._timepoint = timepoint
[ "def setTimePoints(self, timepoints):\n\t\tself.timePoints = timepoints", "def setStartTime(self, startTime):\n self.startTime = startTime", "def set_schedule(self, schedule):\r\n arg_str = p2e._base._util._convert_args_to_string(\"set.object.schedule\", self._object._eco_id, schedule)\r\n p2e._app.Exec(arg_str)", "def set_schedule(self, schedule):\n data = {'propagateToInstances':True, 'schedule':schedule}\n return self.client.put_asg_scaling_schedule(environment=self.env, asgname=self.name, data=data)", "def setTime(self, abstime: 'SbTime') -> \"void\":\n return _coin.SoAlarmSensor_setTime(self, abstime)", "def set_time(self, enable=True):\r\n if enable:\r\n self.time = datetime.now\r\n else:\r\n self.time = None", "def setCurrentTimepoint(self, timepoint):\n\t\tif not self.dataUnit:\n\t\t\treturn\n\t\t\n\t\tself.currentTimePoint = timepoint\n\t\tif self.dataUnit.isProcessed():\n\t\t\tself.currentData = self.dataUnit.doPreview(scripting.WHOLE_DATASET, True, timepoint)\n\t\telse:\n\t\t\tself.currentData = self.dataUnit.getTimepoint(timepoint)\n\t\tself.dimensions = self.currentData.GetDimensions()", "def setTimeStamp(self, timeStamp):\n\n self.__validateTimeStamp(timeStamp)", "def set_time_step(self, time_step):\n\n self._time_step = time_step", "def time_t(self, time_t: int):\n\n self._time_t = time_t", "def at_time(self, at_time):\n\n self._at_time = at_time", "def timeslot_id(self, timeslot_id):\n\n self._timeslot_id = timeslot_id", "def setPlanningTime(self, time):\n self.planning_time = time", "def set_alarm(self, target_time: datetime.time):\n self.time = target_time.replace(second=0, microsecond=0)\n # print the time\n print(\"Alarm set for {}:{}\".format(self.time.hour, self.time.minute))", "def setTime(self, t: 'SbTime') -> \"void\":\n return _coin.SoEvent_setTime(self, t)", "def set_min_time(self, time):\n raise NotImplementedError", "def setMinimumTime(self, account, acl, equipment, mintime):\n\n acl.assertIsAdministrator(account)\n\n mintime = to_int(mintime)\n\n if not mintime:\n mintime = None\n\n if mintime != self.min_booking_time:\n item = equipment._getFromDB()\n item.constraints.min_booking_time = mintime\n self.min_booking_time = mintime\n item.put()", "def set_sample_time(self, sample_time):\n self.validate_sample_time(sample_time)\n self.sample_time = sample_time", "def set_generatedAtTime(self, generatedAtTime):\n self._attributes[VOPROV['generatedAtTime']] = {generatedAtTime}" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the stop_sequence of this ScheduleResourceAttributes.
def stop_sequence(self, stop_sequence): self._stop_sequence = stop_sequence
[ "def stop(self, stop: SourceLocation):\n if stop is None:\n raise ValueError(\"Invalid value for `stop`, must not be `None`\") # noqa: E501\n\n self._stop = stop", "def setStopCondition(self, val):\r\n self.__scl.acquire()\r\n self.__stopCondition = val\r\n self.__scl.release()", "def stop_sign(self, stop_sign):\n self._stop_sign = stop_sign", "def stop_reference(self, stop_reference: List[StopReference]):\n\n self._stop_reference = stop_reference", "def setStartStopId(self, startStopId) -> None:\n self.startStopId = startStopId", "def set_next_stop(self, stop):\n self.next_stop = stop", "def set_previous_stop(self, stop):\n self.previous_stop = stop", "def setEndStopId(self, endStopId: int) -> None:\n self.endStopId = endStopId", "def stops(self, stops):\n\n self._stops = stops", "def stop_is_on(self, stop_is_on):\n if self.local_vars_configuration.client_side_validation and stop_is_on is None: # noqa: E501\n raise ValueError(\"Invalid value for `stop_is_on`, must not be `None`\") # noqa: E501\n\n self._stop_is_on = stop_is_on", "def setStopSweep(self,stop):\r\n self.isSIUnit(stop)\r\n self.stopFreqSweep = stop", "def set_stop_wavelength(self,val): #documented\n if self.__is_int_or_float(val) and self.__is_between(val,600,1800):\n if val < self.get_start_wavelength():\n self.__verbose_output( \"error: stop wavelength can not be set to < start wavelength\",1)\n else:\n self.send_message(\"STO %.1f\"%(val)) \n else:\n self.__verbose_output( \"error: set_stop_wavelength() - invalid argument\",1)", "def set_seq(self, s): # new\r\n self.sequence = s", "def getStartStopId(self) -> int:\n return self.startStopId", "def stop_scheduled(self):\n now = datetime.utcnow()\n consideration_threshold = now - self.stop_attempts_after\n recently_scheduled = and_(Job._stop_at.isnot(None), Job._stop_at > consideration_threshold)\n after_start = or_(Job._start_at < Job._stop_at, Job._start_at.isnot(None))\n can_stop_now = Job._stop_at < now\n jobs_to_stop = Job.query.filter(recently_scheduled, after_start, can_stop_now).all()\n\n log.debug('{} jobs should be stopped.'.format(len(jobs_to_stop)))\n for job in jobs_to_stop:\n log.info(self._log_msg(now=now, action='Stopping scheduled', id=job.id, scheduled=job._stop_at))\n content, status = self.stop_with_grace(job.id)\n\n if status == 200:\n log.debug(content['job']['status'])\n else:\n log.warning(content['msg'])", "def set_sequence(self, sequence):\n\t\tself.sequence = sequence\n\t\tsystem = self.system\n\t\tsystem.set_sequence(sequence, mode = 'stand alone')", "def set_schedule(self, schedule):\n data = {'propagateToInstances':True, 'schedule':schedule}\n return self.client.put_asg_scaling_schedule(environment=self.env, asgname=self.name, data=data)", "def write_t_stop(self):\n t_stop = SpynnakerDataView.get_current_run_time_ms()\n with self.transaction() as cursor:\n cursor.execute(\n \"\"\"\n UPDATE segment\n SET t_stop = ?\n \"\"\", (t_stop, ))", "def _set_sequence(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"sequence\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"sequence must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"sequence\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__sequence = t\n if hasattr(self, '_set'):\n self._set()", "def constrain_stop_time(self, constrain_stop_time):\n \n self._constrain_stop_time = constrain_stop_time" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the pickup_type of this ScheduleResourceAttributes.
def pickup_type(self, pickup_type): self._pickup_type = pickup_type
[ "def pickup_date(self, pickup_date):\n\n self._pickup_date = pickup_date", "def setTripType(self, tripType: TripType) -> None:\n self.tripType = tripType", "def proprietor_type(self, proprietor_type: str):\n\n self._proprietor_type = proprietor_type", "def set_pick_up_time(self, pick_up_time):\n self.pick_up_time = pick_up_time", "def priority_type(self, priority_type):\n\n self._priority_type = priority_type", "def set_task_type(self, task_type):\n self._task_type = task_type", "def type_of_grain(self, type_of_grain):\n\n self._type_of_grain = type_of_grain", "def transport_type(self, transport_type: str):\n allowed_values = [\"aircraft\", \"train\", \"bus\"] # noqa: E501\n if transport_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `transport_type` ({0}), must be one of {1}\"\n .format(transport_type, allowed_values)\n )\n\n self._transport_type = transport_type", "def pickup_dts(self, pickup_dts):\n\n self._pickup_dts = pickup_dts", "def set_type(self, rr_type):\n _ldns.ldns_rr_set_type(self, rr_type)\n #parameters: ldns_rr *, ldns_rr_type,\n #retvals:", "def resilience_type(self, resilience_type):\n\n self._resilience_type = resilience_type", "def item_type(self, item_type):\n allowed_values = [\"simple\", \"complex\", \"collection\"]\n if item_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `item_type` ({0}), must be one of {1}\"\n .format(item_type, allowed_values)\n )\n\n self._item_type = item_type", "def _set_route_type(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=route_type.route_type, is_container='container', presence=False, yang_name=\"route-type\", rest_name=\"route-type\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route type.', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"route_type must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=route_type.route_type, is_container='container', presence=False, yang_name=\"route-type\", rest_name=\"route-type\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route type.', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__route_type = t\n if hasattr(self, '_set'):\n self._set()", "def price_type(self, price_type: str):\n\n self._price_type = price_type", "def email_template_type(self, email_template_type):\n self._email_template_type = email_template_type", "def SetBusType(self):\n self.cal_type = \"BUS\"", "def frame_resolution_type(self, frame_resolution_type):\n self._frame_resolution_type = frame_resolution_type", "def set_type(self,atype):\n return _ldns.ldns_dnssec_rrsets_set_type(self,atype)\n #parameters: ldns_dnssec_rrsets *,ldns_rr_type,\n #retvals: ldns_status", "def pickup_details(self, pickup_details):\n\n self._pickup_details = pickup_details", "def rental_event_type(self, rental_event_type):\n\n self._rental_event_type = rental_event_type" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the drop_off_type of this ScheduleResourceAttributes.
def drop_off_type(self, drop_off_type): self._drop_off_type = drop_off_type
[ "def disruption_type(self, disruption_type):\n\n self._disruption_type = disruption_type", "def _set_drop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, default=YANGBool(\"false\"), is_leaf=True, yang_name=\"drop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='boolean', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"drop must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, default=YANGBool(\"false\"), is_leaf=True, yang_name=\"drop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='boolean', is_config=False)\"\"\",\n })\n\n self.__drop = t\n if hasattr(self, '_set'):\n self._set()", "def end_type(self, end_type):\n\n self.container['end_type'] = end_type", "def resilience_type(self, resilience_type):\n\n self._resilience_type = resilience_type", "def symbol_type(self, symbol_type):\n if symbol_type is None:\n raise ValueError(\"Invalid value for `symbol_type`, must not be `None`\") # noqa: E501\n allowed_values = [undefined, undefined, undefined, undefined, ] # noqa: E501\n\n self._symbol_type = symbol_type", "def dash_type(self, dash_type):\n\n self.container['dash_type'] = dash_type", "def overdraft_type(self, overdraft_type):\n allowed_values = [\"Committed\", \"OnDemand\", \"Other\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and overdraft_type not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `overdraft_type` ({0}), must be one of {1}\" # noqa: E501\n .format(overdraft_type, allowed_values)\n )\n\n self._overdraft_type = overdraft_type", "def rental_event_type(self, rental_event_type):\n\n self._rental_event_type = rental_event_type", "def right_operand_type(self, right_operand_type):\n allowed_values = [\"Absolute\", \"Property\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and right_operand_type not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `right_operand_type` ({0}), must be one of {1}\" # noqa: E501\n .format(right_operand_type, allowed_values)\n )\n\n self._right_operand_type = right_operand_type", "def description_type(self, description_type):\n\n self._description_type = description_type", "def aws_dynamodb_attr_type(self, aws_dynamodb_attr_type):\n\n self._aws_dynamodb_attr_type = aws_dynamodb_attr_type", "def set_type(self, the_type: [bool, int, float, str]):\n if self._value:\n raise CloudioModificationException('The Attribute has already a type (Changing the type is not allowed)!')\n\n if the_type in (bool, int, float, bytes, str):\n self._value = the_type()\n\n # Init to invalid\n self._type = AttributeType(AttributeType.Invalid)\n\n # Set cloudio attribute type accordingly\n if the_type in (bool,):\n self._type = AttributeType(AttributeType.Boolean)\n elif the_type in (int,):\n self._type = AttributeType(AttributeType.Integer)\n elif the_type in (float,):\n self._type = AttributeType(AttributeType.Number)\n else:\n assert the_type in (bytes, str), 'Seems we got a new type!'\n self._type = AttributeType(AttributeType.String)\n else:\n raise InvalidCloudioAttributeException(the_type)", "def price_type(self, price_type: str):\n\n self._price_type = price_type", "def setTripType(self, tripType: TripType) -> None:\n self.tripType = tripType", "def set_task_type(self, task_type):\n self._task_type = task_type", "def _set_blacklist_time(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name=\"blacklist-time\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='uint16', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"blacklist_time must be of a type compatible with uint16\"\"\",\n 'defined-type': \"uint16\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name=\"blacklist-time\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='uint16', is_config=True)\"\"\",\n })\n\n self.__blacklist_time = t\n if hasattr(self, '_set'):\n self._set()", "def type(self, type):\n allowed_values = [\"Vulnerability\", \"Package\"]\n if type not in allowed_values:\n raise ValueError(\n \"Invalid value for `type` ({0}), must be one of {1}\"\n .format(type, allowed_values)\n )\n\n self._type = type", "def item_type(self, item_type):\n allowed_values = [\"simple\", \"complex\", \"collection\"]\n if item_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `item_type` ({0}), must be one of {1}\"\n .format(item_type, allowed_values)\n )\n\n self._item_type = item_type", "def relaxation_type(self, relaxation_type):\n\n self._relaxation_type = relaxation_type" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the direction_id of this ScheduleResourceAttributes.
def direction_id(self, direction_id): self._direction_id = direction_id
[ "def route_direction(self, route_direction):\n\n self._route_direction = route_direction", "def set_direction(self, direction):", "def requested_route_direction(self, requested_route_direction):\n\n self._requested_route_direction = requested_route_direction", "def set_direction(self, direction):\n # type: (QtCore.Qt.Orientation) -> None\n self.direction = direction\n anim_property = ('maximumWidth'\n if self.direction == QtCore.Qt.Horizontal else\n 'maximumHeight')\n self._animation.setPropertyName(anim_property)", "def __change_direction(self):\n\n self.current_direction = self.next_direction", "def scheduled_dir(self, scheduled_dir):\n \n self._scheduled_dir = scheduled_dir", "def SetDirection(self, direction: 'itkMatrixD44') -> \"void\":\n return _itkImagePython.itkImageBase4_SetDirection(self, direction)", "def set_side(self, direction, mapsite_object):\n self._sides[direction] = mapsite_object", "def SetDirection(self, direction: 'itkMatrixD22') -> \"void\":\n return _itkImagePython.itkImageBase2_SetDirection(self, direction)", "def direction(self, direction):\n allowed_values = [\"EGRESS\", \"INGRESS\"]\n if not value_allowed_none_or_none_sentinel(direction, allowed_values):\n direction = 'UNKNOWN_ENUM_VALUE'\n self._direction = direction", "def set_theta_direction(self, direction):\n mtx = self._direction.get_matrix()\n if direction in ('clockwise', -1):\n mtx[0, 0] = -1\n elif direction in ('counterclockwise', 'anticlockwise', 1):\n mtx[0, 0] = 1\n else:\n _api.check_in_list(\n [-1, 1, 'clockwise', 'counterclockwise', 'anticlockwise'],\n direction=direction)\n self._direction.invalidate()", "def forecast_wind_direction(self, forecast_wind_direction):\n\n self._forecast_wind_direction = forecast_wind_direction", "def SetDirection(self, *args) -> \"void\":\n return _ITKIOImageBaseBasePython.itkImageIOBase_SetDirection(self, *args)", "def setStartStopId(self, startStopId) -> None:\n self.startStopId = startStopId", "def _rotate_agent(self, agent_id: str, direction: int):\n if direction not in (-1, 1):\n raise ValueError(\"direction must be in -1, 1\")\n agent = self.agents[agent_id]\n # -1: rotate left\n # 1: rotate right\n agent.rot = (agent.rot + direction) % 4", "def route_direction_status(self, route_direction_status):\n allowed_values = [0, 1] # noqa: E501\n if route_direction_status not in allowed_values:\n raise ValueError(\n \"Invalid value for `route_direction_status` ({0}), must be one of {1}\" # noqa: E501\n .format(route_direction_status, allowed_values)\n )\n\n self._route_direction_status = route_direction_status", "def SetDirection(self, direction: 'itkMatrixD33') -> \"void\":\n return _itkImagePython.itkImageBase3_SetDirection(self, direction)", "def setReverseDirection(self, reverseDirection):\n hal.setCounterReverseDirection(self.counter, reverseDirection)", "def set_pin_direction(self, pin, direction):\n if type(pin) is list:\n for p in pin:\n self.set_pin_direction(p, direction)\n return\n\n pin_id = self._pin_mapping.get(pin, None)\n if pin_id and type(direction) is ahio.Direction:\n self._set_pin_direction(pin_id, direction)\n else:\n raise KeyError(\"Requested pin is not mapped: %s\" % pin)", "def set_rotation_modes(self, mode, direction):\n mode = self.convert_to_enum(mode, MOT_MovementModes)\n direction = self.convert_to_enum(direction, MOT_MovementDirections)\n self.sdk.SCC_SetRotationModes(self._serial, mode, direction)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the departure_time of this ScheduleResourceAttributes.
def departure_time(self, departure_time): self._departure_time = departure_time
[ "def departure_time(self, departure_time: int):\n\n self._departure_time = departure_time", "def departure(self, departureTime):\n self._departure = departureTime", "def add_departure(self, departure_date, departure_time):\r\n self.departure_date = departure_date\r\n self.departure_time = departure_time", "def deactivation_time(self, deactivation_time):\n\n self._deactivation_time = deactivation_time", "def set_finishing_time(self, time_value):\n self._finishing_time = time_value", "def duration(self,new_duration_dict):\n if isinstance(new_duration_dict,dict):\n dur = TemporalConstraint(start=self.start,end=self.end,**new_duration_dict)\n #Should the episode have different guards for the start and end events,\n #makes sure the duration is consistent with the guard for the end event\n #(always a subset of the guard of the start event.)\n dur.support = self.properties['end'].copy_support()\n self.properties['duration'] = dur\n else:\n raise InvalidTypeError('A duration dictionary should be provided when setting the duration of an RMPyL Episode.')", "def set_deadline(self, val):\n self.__deadline = val", "def setTravelTime(vehId: str, edgeId: str, time: float = 999.0, begTime: float = None, endTime: float = None):\n traci.vehicle.setAdaptedTraveltime(\n vehId, edgeId, time, begTime=begTime, endTime=endTime)", "def constrain_stop_time(self, constrain_stop_time):\n \n self._constrain_stop_time = constrain_stop_time", "def set_deadline(self, deadline):\n self.deadline = deadline", "def setEndTime(self, endTime):\n self.endTime = endTime", "def departures(self, departures):\n\n self._departures = departures", "def setPlanningTime(self, time):\n self.planning_time = time", "def set_invalidatedAtTime(self, invalidatedAtTime):\n self._attributes[VOPROV['invalidatedAtTime']] = {invalidatedAtTime}", "def set_exposure_time(self, exposure_time):\n self.exposure_time = utils.get_quantity_value(exposure_time, u.second)", "def set_duration(self, hours, minutes, seconds):\n self.duration = (hours, minutes, seconds)", "def reverse_required_time(self):\n for d in self.departments:\n d.reverse_required_time()", "def arrival(self, arrivalTime):\n self._arrival = arrivalTime", "def time_restriction(self, time_restriction):\n\n self._time_restriction = time_restriction", "def set_time_step(self, time_step):\n\n self._time_step = time_step" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the arrival_time of this ScheduleResourceAttributes.
def arrival_time(self, arrival_time): self._arrival_time = arrival_time
[ "def arrival(self, arrivalTime):\n self._arrival = arrivalTime", "def arrive_time(self, arrive_time: int):\n\n self._arrive_time = arrive_time", "def scheduled_arrival_time_ms(self, scheduled_arrival_time_ms):\n if scheduled_arrival_time_ms is None:\n raise ValueError(\"Invalid value for `scheduled_arrival_time_ms`, must not be `None`\") # noqa: E501\n\n self._scheduled_arrival_time_ms = scheduled_arrival_time_ms", "def acquire_time(self, acquire_time):\n\n self._acquire_time = acquire_time", "def estimated_arrival_ms(self, estimated_arrival_ms):\n\n self._estimated_arrival_ms = estimated_arrival_ms", "def setTimeFromNow(self, reltime: 'SbTime') -> \"void\":\n return _coin.SoAlarmSensor_setTimeFromNow(self, reltime)", "def add_arrival(self, employee_id, first_name, last_name, arrival_date, arrival_time):\r\n # If the system is new, we initiate ids list.\r\n if len(self.ids) == 0:\r\n self.ids.append(1)\r\n else: # Otherwise we continue to count from the last number in id's list.\r\n index = self.ids[-1]\r\n new_index = index + 1\r\n self.ids.append(new_index)\r\n self.attendance_id = self.ids[-1]\r\n\r\n # And add all necessary data to the instance.\r\n self.employee_id = employee_id\r\n self.first_name = first_name\r\n self.last_name = last_name\r\n self.arrival_date = arrival_date\r\n self.arrival_time = arrival_time", "def setPlanningTime(self, time):\n self.planning_time = time", "def setMinimumTime(self, account, acl, equipment, mintime):\n\n acl.assertIsAdministrator(account)\n\n mintime = to_int(mintime)\n\n if not mintime:\n mintime = None\n\n if mintime != self.min_booking_time:\n item = equipment._getFromDB()\n item.constraints.min_booking_time = mintime\n self.min_booking_time = mintime\n item.put()", "def setStartTime(self, startTime):\n self.startTime = startTime", "def setTime(self, abstime: 'SbTime') -> \"void\":\n return _coin.SoAlarmSensor_setTime(self, abstime)", "def time_restriction(self, time_restriction):\n\n self._time_restriction = time_restriction", "def add_arrival_to_system(employee_id, first_name, last_name, arrival_date, arrival_time):\r\n # First we construct new attendance instance and add data to it.\r\n # The method add arrival id to the instance automatically.\r\n attendance.add_arrival(employee_id, first_name, last_name, arrival_date, arrival_time)\r\n\r\n # Then we receive data back with the attendance instance we've just create.\r\n data = attendance.get_attendance()\r\n # and call the function to write received data to the attendance.csv file.\r\n if os.path.isfile('attendance.csv'):\r\n write_to_file('attendance.csv', data)\r\n else:\r\n write_to_file('attendance.csv', data, header=1)", "def set_generatedAtTime(self, generatedAtTime):\n self._attributes[VOPROV['generatedAtTime']] = {generatedAtTime}", "def set_schedule(self, schedule):\n data = {'propagateToInstances':True, 'schedule':schedule}\n return self.client.put_asg_scaling_schedule(environment=self.env, asgname=self.name, data=data)", "def setTravelTime(vehId: str, edgeId: str, time: float = 999.0, begTime: float = None, endTime: float = None):\n traci.vehicle.setAdaptedTraveltime(\n vehId, edgeId, time, begTime=begTime, endTime=endTime)", "def constrain_earliest_stop_time(self, constrain_earliest_stop_time):\n \n self._constrain_earliest_stop_time = constrain_earliest_stop_time", "def reroute_time(self, reroute_time):\n if reroute_time is not None and reroute_time > 120: # noqa: E501\n raise ValueError(\"Invalid value for `reroute_time`, must be a value less than or equal to `120`\") # noqa: E501\n if reroute_time is not None and reroute_time < 10: # noqa: E501\n raise ValueError(\"Invalid value for `reroute_time`, must be a value greater than or equal to `10`\") # noqa: E501\n\n self._reroute_time = reroute_time", "def set_alarm(self, target_time: datetime.time):\n self.time = target_time.replace(second=0, microsecond=0)\n # print the time\n print(\"Alarm set for {}:{}\".format(self.time.hour, self.time.minute))", "def set_timeAvailable(self, newTimeAvailable):\r\n\r\n self._timeAvailable = newTimeAvailable" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns dynamic enlargement factor.
def dynamic_enlargement_factor(self): return self._dynamic_enlargement_factor
[ "def maximum_stretch_ratio(self):\n return 1.0 + self.tensile_strength / self.elasticity", "def PaperScale(self) -> float:", "def scale(self):\n return self._moyal_bijector.scale", "def SpreadFactor(self): \n return 4.5", "def getReductionRatio(self) -> retval:\n ...", "def _get_factor(min_: float, max_: float, px_size: int):\n range_ = abs(max_ - min_)\n return px_size / range_ if range_ != 0 else 1 # if we only need to represent 1 pixel, we can use 1 as density", "def energyMultiplier(self) -> float:\n return self._getMultiplier('energy')", "def height_reduction(self):\n ret = self._get_attr(\"heightReduction\")\n return ret", "def enlarge(factor, state):\n state.zone.w *= factor\n state.zone.h *= factor", "def effect_size(self):\n return 1 - self.rates_ratio", "def enlarge(n):\n return n*5", "def getCapacityFactor(self): \n return self.capFact", "def cost_multiplier(self):\n return 1.0", "def enlarge(n):\r\n return n*100", "def DrawingScale(self) -> float:", "def getDamageMultiplier(self):\n return 1.0", "def get_scaled_majorant(self) -> Union[int, float]:\n\n # Determine the largest permeability in the fracture network\n scaling_factors = []\n for g, d in self.gb:\n if g.dim != 0:\n perm = d[pp.PARAMETERS][\"flow\"][\"second_order_tensor\"].values\n perm = perm[0][0]\n scaling_factors.append(np.max(perm))\n for _, d in self.gb.edges():\n k_norm = d[pp.PARAMETERS][\"flow\"][\"normal_diffusivity\"]\n scaling_factors.append(np.max(k_norm))\n scale_factor = np.max(scaling_factors)\n\n # Perform scaling\n scaled_majorant = scale_factor ** (-0.5) * self.get_majorant()\n\n return scaled_majorant", "def get_obj_factor(self):\n pass", "def calculate_watermark_scale_factor():\n\n memtotal = get_total_ram()\n normal_managed_pages = get_normal_managed_pages()\n\n try:\n wmark = min([watermark_scale_factor(memtotal, managed_pages)\n for managed_pages in normal_managed_pages])\n except ValueError as e:\n log(\"Failed to calculate watermark_scale_factor from normal managed pages: {}\".format(normal_managed_pages), ERROR)\n raise e\n\n log(\"vm.watermark_scale_factor: {}\".format(wmark), DEBUG)\n return wmark" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the ellipsoid update gap used in the algorithm (see
def ellipsoid_update_gap(self): return self._ellipsoid_update_gap
[ "def gap(self) -> float:\n pass", "def set_ellipsoid_update_gap(self, ellipsoid_update_gap=100):\n ellipsoid_update_gap = int(ellipsoid_update_gap)\n if ellipsoid_update_gap <= 1:\n raise ValueError('Ellipsoid update gap must exceed 1.')\n self._ellipsoid_update_gap = ellipsoid_update_gap", "def _xdist(self):\n\t\treturn self.geom.x - self.last.x", "def boundary():\r\n return 250", "def get_growth_delta(list_of_bounding_boxes):\n result = [0.0]\n for i in range(0, len(list_of_bounding_boxes)):\n box_i = list_of_bounding_boxes[i]\n try:\n box_j = list_of_bounding_boxes[i + 1]\n result.append(length_from_coords(box_i[0][0].stop, box_j[0][0].stop, box_i[0][1].stop, box_j[0][1].stop))\n except IndexError:\n pass\n return result", "def getDistanceDelta(self):\n return (((self.last_left_encoder_distance-self.drive.getDistanceInchesLeft()) + (self.last_right_encoder_distance-self.drive.getDistanceInchesRight())) / 2.0)", "def _ydist(self):\n\t\treturn self.geom.y - self.last.y", "def find_base_size(self):\n\n# Find longitudinal locations of first two points\n first_UTM = self.shapes[0].points[0][0]\n second_UTM = self.shapes[1].points[0][0]\n\n# Find the difference. This difference in meters is the size of the grid\n grid_size = second_UTM - first_UTM\n\n return grid_size", "def _get_node_spacing_(self):\n min_coord = np.array( [ self.x_coord[0], self.z_coord[0] ] )\n max_coord = np.array( [ self.x_coord[1], self.z_coord[1] ] )\n return (max_coord - min_coord)/self.sze", "def delta_s(self):\n \n min_delta_s = 2.0 * self.grid_spacing / self.H_BAR * np.sqrt(4.85*2.0*self.ELECTRON_MASS)\n return self.DELTA_S_FACTOR * min_delta_s", "def approx_gap_after(p):\n return floor(log(p) ** 2 * 0.85)", "def delta_x(self):\n return self.bin_end - self.bin_start", "def g_vector(self):\n from sage.functions.other import floor\n d = self.dimension()\n h = self.h_vector()\n g = [1]\n for i in range(1, (d + 1) // 2 + 1):\n g.append(h[i] - h[i-1])\n return g", "def get_global_extent(self) -> Tuple[float, float, float, float]:\n pass", "def fourPtCenteredDiff(x,y):\n #calculate dydx by center differencing using array slices\n dydx = np.zeros(y.shape,float) #we know it will be this size\n dydx[2:-2] = (y[0:-4] -8*y[1:-3] + 8*y[3:-1] - y[4:])/(12*(x[2:-2] - x[1:-3])) #center difference\n dydx[0] = (y[1]-y[0])/(x[1]-x[0]) #forward difference\n dydx[1] = (y[2]-y[1])/(x[2]-x[1])\n dydx[-1] = (y[-1] - y[-2])/(x[-1] - x[-2]) #backward difference\n dydx[-2] = (y[-2] - y[-3])/(x[-2] - x[-3])\n return dydx", "def __calculate_middle_frame(self):\n\n return round((self.first_frame + self.last_frame) / 2)", "def _get_length(self) -> \"double\" :\n return _core.OrientedBoundingBox3D__get_length(self)", "def get_longest_altitude(self):\n return (2 * self.get_area()) / min(self.left_side, self.right_side, self.bottom_side)", "def altitude_range(rpc, x, y, w, h, margin_top, margin_bottom):\n # TODO: iterate the procedure used here to get a finer estimation of the\n # TODO: bounding box on the ellipsoid and thus of the altitude range. For flat\n # TODO: regions it will not improve much, but for mountainous regions there is a\n # TODO: lot to improve.\n\n # find bounding box on the ellipsoid (in geodesic coordinates)\n lon_m, lon_M, lat_m, lat_M = geodesic_bounding_box(rpc, x, y, w, h)\n\n # if bounding box is out of srtm domain, return coarse altitude estimation\n if (lat_m < -60 or lat_M > 60):\n print \"Out of SRTM domain, returning coarse range from rpc\"\n return altitude_range_coarse(rpc)\n\n # sample the bounding box with regular step of 3 arcseconds (srtm\n # resolution)\n ellipsoid_points = sample_bounding_box(lon_m, lon_M, lat_m, lat_M)\n\n # compute srtm height on all these points\n # these altitudes are computed with respect to the WGS84 ellipsoid\n import os\n srtm = common.run_binary_on_list_of_points(ellipsoid_points, 'srtm4',\n option=None, binary_workdir=os.path.dirname(__file__))\n srtm = np.ravel(srtm)\n\n # srtm data may contain 'nan' values (meaning no data is available there).\n # These points are most likely water (sea) and thus their height with\n # respect to geoid is 0. Thus we replace the nans with 0.\n srtm[np.isnan(srtm)] = 0\n\n # extract extrema (and add a +-100m security margin)\n h_m = np.round(srtm.min()) + margin_bottom\n h_M = np.round(srtm.max()) + margin_top\n\n return h_m, h_M", "def spectral_gap(mx):\r\n # eigendecompose\r\n vals = np.linalg.eigvals(mx)\r\n\r\n # get sepctral gap\r\n vals = np.sort(np.fabs(np.real(vals)))\r\n gap = vals[-1] - vals[-2]\r\n return gap" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the enlargement factor used in the algorithm (see
def enlargement_factor(self): return self._enlargement_factor
[ "def dynamic_enlargement_factor(self):\n return self._dynamic_enlargement_factor", "def maximum_stretch_ratio(self):\n return 1.0 + self.tensile_strength / self.elasticity", "def enlarge(n):\r\n return n*100", "def SpreadFactor(self): \n return 4.5", "def threshold_factor_compensated(self):\n alpha = self.threshold_factor\n return alpha * (self.Nr - 1) / (self.Nr - alpha)", "def StepsPerInch(self) -> float:", "def enlarge(n):\n return n*5", "def quality_factor(L):\r\n from numpy import sqrt\r\n L /= (keV/um)\r\n if L < 10:\r\n return 1\r\n elif L < 100:\r\n return 0.32*L-2.2\r\n else:\r\n return 300./sqrt(L)", "def diffusion():\n return 5.1412512431", "def _get_factor(min_: float, max_: float, px_size: int):\n range_ = abs(max_ - min_)\n return px_size / range_ if range_ != 0 else 1 # if we only need to represent 1 pixel, we can use 1 as density", "def asn(self):\n winner_prop = self.contest.winner_prop\n loser_prop = 1.0 - winner_prop\n margin = (2 * winner_prop) - 1\n z_w = math.log(margin + 1)\n z_l = math.log(1 - margin)\n top = (math.log(1.0 / self.alpha) + (z_w / 2.0))\n bottom = (winner_prop * z_w) + (loser_prop * z_l)\n return math.ceil(top / bottom)", "def cost_multiplier(self):\n return 1.0", "def factor(self):\n\t\tif (self.isprime()): return n\n\t\tfor fact in [GaussInt(1,1), GaussInt(2,1), GaussInt(1,2), \n\t\t\t GaussInt(3,0), GaussInt(3,2), GaussInt(2,3)]:\n\t\t\tif self%fact == 0: return fact\n\t\treturn self.factorPR() # Needs work - no guarantee that a prime factor will be returned", "def decimation_factor(self):\n return self._decimation_factor", "def normalize(factor):\n factor = factor * (1/factor.sum())\n return factor", "def productivityMultiplier(self) -> float:\n return self._getMultiplier('productivity')", "def to_multiplier(difficulty):\n return float((1 << 64) - int(work_difficulty, 16)) / float(\n (1 << 64) - int(difficulty, 16)\n )", "def getReductionRatio(self) -> retval:\n ...", "def energyMultiplier(self) -> float:\n return self._getMultiplier('energy')", "def PaperScale(self) -> float:" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the number of rejection sample used in the algorithm (see
def n_rejection_samples(self): return self._n_rejection_samples
[ "def rejection_sampling(self, num_samples):\n accepted_count = 0\n trial_count = 0\n accepted_samples = []\n distances = []\n fixed_dataset = DataSet('Fixed Data')\n sim_dataset = DataSet('Simulated Data')\n fixed_dataset.add_points(targets=self.data, summary_stats=self.summaries_function.compute(self.data))\n\n while accepted_count < num_samples:\n # Rejection sampling\n # Draw from the prior\n trial_param = self.prior_function.draw()\n\n # Perform the trial\n sim_result = self.sim(trial_param)\n\n # Get the statistic(s)\n # In case of multiple summaries, a numpy array of k summaries should be returned\n # ToDo: add exception handling to enforce it\n sim_stats = self.summaries_function.compute(sim_result)\n\n # Set/Update simulated dataset\n sim_dataset.add_points(targets=sim_result, summary_stats=sim_stats)\n\n # Calculate the distance between the dataset and the simulated result\n # In case of multiple summaries, a numpy array of k distances should be returned\n sim_dist = self.distance_function.compute(fixed_dataset.s, sim_stats)\n\n # Normalize distances between [0,1]\n sim_dist_scaled = self.scale_distance(sim_dist)\n\n # Use MAB arm selection to identify the best 'k' arms or summary statistics\n num_arms = len(sim_dist_scaled)\n arms = range(num_arms)\n top_k_arms_idx = self.mab_variant.select(arms, self.k)\n top_k_distances = np.asarray([sim_dist_scaled[i] for i in top_k_arms_idx])\n\n # Take the norm to combine the top k distances\n combined_distance = np.linalg.norm(top_k_distances)\n logger.debug(\"Rejection Sampling: trial parameter = [{0}], distance = [{1}]\".format(trial_param,\n combined_distance))\n\n # Accept/Reject\n if combined_distance <= self.epsilon:\n accepted_samples.append(trial_param)\n distances.append(sim_dist)\n accepted_count += 1\n logger.info(\"Rejection Sampling: accepted a new sample, total accepted samples = {0}\".\n format(len(accepted_samples)))\n\n trial_count += 1\n\n self.results = {'accepted_samples': accepted_samples, 'distances': distances, 'accepted_count': accepted_count,\n 'trial_count': trial_count, 'inferred_parameters': np.mean(accepted_samples, axis=0)}\n return self.results", "def set_n_rejection_samples(self, rejection_samples=200):\n if rejection_samples < 0:\n raise ValueError('Must have non-negative rejection samples.')\n self._n_rejection_samples = rejection_samples", "def getNumFailedSamples(self):\n #---+----|----+----|----+----|----+----|----+----|----+----|----+----|\n return SliceSamplerBase.getNumFailedSamples(self)", "def rejection_ratio(min_offer, predicted):\n accepted = (min_offer <= predicted)\n return 1 - np.mean(accepted)", "def n_divergences(self):\n assert self.trace != None, \"Must run sample() first!\"\n return self.trace[\"diverging\"].nonzero()[0].size", "def num_sequences_sampled(self) -> int:\n return self._num_sequences_sampled", "def n_sessions_until_conclusion(self):\n return max(0, self.seg_prob.details.min_results_per_assignment -\\\n self.n_sessions_w_result)", "def getNumberOfHeuristics(self) -> None:", "def n_profile_samples(self):\n return self.__n_profile_samples", "def calculate_num_ransac_iterations(prob_success, sample_size, ind_prob_correct):\n num_samples = None\n\n ##############################\n # TODO: Student code goes here\n num_samples = math.log( (1 - prob_success), (1 - ind_prob_correct ** sample_size))\n # raise NotImplementedError\n ##############################\n\n return num_samples", "def n_samples(self):\n return len(self.sampler)", "def rejection(target_rv, helper_sim, ratio_bound):\n while True:\n sample = helper_sim.algorithm()\n if np.random.uniform() <= target_rv.pdf(sample)/(ratio_bound*helper_sim.rv.pdf(sample)):\n return sample", "def prob_estimation(n):\n truecount = 0\n for i in range(n):\n test = gen_rand_23()\n if has_duplicates(test):\n truecount += 1\n return truecount", "def wirtinger_rank(self):\n length = 0\n if not self.arcs:\n return len(self.gauss_code)\n while True:\n length = length + 1\n for combination in itertools.combinations(self.arcs, length):\n potential_generating_set = []\n for arc in combination:\n potential_generating_set.append(arc)\n\n if self.is_obvious_visible_generating_set(potential_generating_set):\n return length", "def test_sample_size(base_clumper, n):\n assert len(base_clumper.sample(n, replace=False)) == n", "def num_samplets(self):\n if self._data is not None:\n return len(self._data)\n else:\n return 0", "def count(self) -> int:\n return self.__solution_count", "def expected_disagreement(self) -> float:\n return self.chance_disorders.mean()", "def solve_critical(n, s, N, q=0.99):\n return 1 - (critical_sample_size(n, s, N, q) / n)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the number of rejection samples to take, which will be assigned weights and ultimately produce a set of posterior samples.
def set_n_rejection_samples(self, rejection_samples=200): if rejection_samples < 0: raise ValueError('Must have non-negative rejection samples.') self._n_rejection_samples = rejection_samples
[ "def rejection_sampling(self, num_samples):\n accepted_count = 0\n trial_count = 0\n accepted_samples = []\n distances = []\n fixed_dataset = DataSet('Fixed Data')\n sim_dataset = DataSet('Simulated Data')\n fixed_dataset.add_points(targets=self.data, summary_stats=self.summaries_function.compute(self.data))\n\n while accepted_count < num_samples:\n # Rejection sampling\n # Draw from the prior\n trial_param = self.prior_function.draw()\n\n # Perform the trial\n sim_result = self.sim(trial_param)\n\n # Get the statistic(s)\n # In case of multiple summaries, a numpy array of k summaries should be returned\n # ToDo: add exception handling to enforce it\n sim_stats = self.summaries_function.compute(sim_result)\n\n # Set/Update simulated dataset\n sim_dataset.add_points(targets=sim_result, summary_stats=sim_stats)\n\n # Calculate the distance between the dataset and the simulated result\n # In case of multiple summaries, a numpy array of k distances should be returned\n sim_dist = self.distance_function.compute(fixed_dataset.s, sim_stats)\n\n # Normalize distances between [0,1]\n sim_dist_scaled = self.scale_distance(sim_dist)\n\n # Use MAB arm selection to identify the best 'k' arms or summary statistics\n num_arms = len(sim_dist_scaled)\n arms = range(num_arms)\n top_k_arms_idx = self.mab_variant.select(arms, self.k)\n top_k_distances = np.asarray([sim_dist_scaled[i] for i in top_k_arms_idx])\n\n # Take the norm to combine the top k distances\n combined_distance = np.linalg.norm(top_k_distances)\n logger.debug(\"Rejection Sampling: trial parameter = [{0}], distance = [{1}]\".format(trial_param,\n combined_distance))\n\n # Accept/Reject\n if combined_distance <= self.epsilon:\n accepted_samples.append(trial_param)\n distances.append(sim_dist)\n accepted_count += 1\n logger.info(\"Rejection Sampling: accepted a new sample, total accepted samples = {0}\".\n format(len(accepted_samples)))\n\n trial_count += 1\n\n self.results = {'accepted_samples': accepted_samples, 'distances': distances, 'accepted_count': accepted_count,\n 'trial_count': trial_count, 'inferred_parameters': np.mean(accepted_samples, axis=0)}\n return self.results", "def n_rejection_samples(self):\n return self._n_rejection_samples", "def downsample(self, number):\n self.samples = resample_posterior_distribution(self.samples, number)\n self.make_dictionary()\n return self", "def set_weighted_sampling(self):\n def get_class_distribution(obj, max_num_class):\n count_dict = {}\n for i in range(max_num_class+1):\n count_dict[i] = 0\n \n for i in obj:\n count_dict[i] += 1\n \n return count_dict\n\n target_list = []\n for _, t in self.train_dataset:\n target_list.append(t)\n \n target_list = torch.tensor(target_list)\n target_list = target_list[torch.randperm(len(target_list))]\n\n class_count = [i for i in get_class_distribution(self.y_train, int(max(target_list))).values()]\n class_weights = 1./torch.tensor(class_count, dtype=torch.float) \n\n self.class_weights_all = class_weights[target_list]\n self.weighted_sampler = WeightedRandomSampler(\n weights=self.class_weights_all,\n num_samples=len(self.class_weights_all),\n replacement=True\n )", "def _resample_prior_params(self):\n weight = .01 * self._get_burn_in_ratio(.5)\n if weight == 0:\n return\n\n # noise\n if self.resample_noise_precision:\n precision = self._sample_noise_precision()\n self._noise_precision_value = weight * precision + (1 - weight) * self._noise_precision_value\n\n # weights\n if self.resample_weights_precision:\n precision = self._sample_weights_precision()\n self._weights_precision_value = weight * precision + (1 - weight) * self._weights_precision_value", "def set_sample_number(self):\r\n self.n_samples = self.exprs.shape[0]", "def _initialize_weights(self):\n self.weights = np.random.randn(self.number_of_classes,self.input_dimensions+1)", "def _build_noise_distribution():\n probs = np.zeros(len(self.vocab))\n for ix, token in enumerate(self.vocab):\n count = token.count\n probs[ix] = count ** self.noise_dist_power\n\n probs = probs/np.sum(probs)\n self._noise_sampler = DiscreteSampler(probs, log=False, with_replacement=False)", "def posterior_predictive_resample(self, samples, return_weights=False):\n if isinstance(samples, pd.DataFrame):\n samples = [dict(samples.iloc[ii]) for ii in range(len(samples))]\n elif isinstance(samples, dict):\n samples = [samples]\n weights = xp.zeros((self.n_posteriors, self.samples_per_posterior))\n event_weights = xp.zeros(self.n_posteriors)\n for sample in tqdm(samples):\n self.parameters.update(sample.copy())\n self.parameters, added_keys = self.conversion_function(self.parameters)\n new_weights = self.hyper_prior.prob(self.data) / self.sampling_prior\n event_weights += xp.mean(new_weights, axis=-1)\n new_weights = (new_weights.T / xp.sum(new_weights, axis=-1)).T\n weights += new_weights\n if added_keys is not None:\n for key in added_keys:\n self.parameters.pop(key)\n weights = (weights.T / xp.sum(weights, axis=-1)).T\n new_idxs = xp.empty_like(weights, dtype=int)\n for ii in range(self.n_posteriors):\n new_idxs[ii] = xp.asarray(\n np.random.choice(\n range(self.samples_per_posterior),\n size=self.samples_per_posterior,\n replace=True,\n p=to_numpy(weights[ii]),\n )\n )\n new_samples = {\n key: xp.vstack(\n [self.data[key][ii, new_idxs[ii]] for ii in range(self.n_posteriors)]\n )\n for key in self.data\n }\n event_weights = list(event_weights)\n weight_string = \" \".join([f\"{float(weight):.1f}\" for weight in event_weights])\n logger.info(f\"Resampling done, sum of weights for events are {weight_string}\")\n if return_weights:\n return new_samples, weights\n else:\n return new_samples", "def reinit_weights(self):\n self.w = 0.01 * np.random.randn(self.prev_layer.get_shape()[0], self.nodes)", "def _make_rec_weights(self):\n self.weights_rec = self._compute_symmetric_weights()\n self.weights_rec += self._rand_generator.normal(0, self._noisy_weights_std, (self._num_rec, self._num_rec))", "def rejection_sampling(target_pdf_fn, proposal_pdf_fn, proposal_draw_fn, N=1):\n\n samples = []\n\n while len(samples) < N:\n # draw point along X-axis from proposal distribution\n x = proposal_draw_fn()\n\n # calculate proposal pdf at x\n y = proposal_pdf_fn(x)\n\n # calculate pdf at x\n fx = target_pdf_fn(x)\n\n # draw point randomly between 0 and y\n u = random.random()*y\n\n # the proposal should contain the target for all x \n assert fx <= y\n\n # if u is less than the target distribution pdf at x, then accept x\n if u < fx:\n samples.append(x)\n\n if N == 1:\n return samples[0]\n else:\n return samples", "def init_params_random(self) -> None:\n self.probs = Dirichlet(self.prior).sample()", "def reset_rewards(self):\n self.rewards = np.array(\n [\n self.h(self.features[t, k]) + self.noise_std * np.random.randn()\n for t, k in itertools.product(range(self.T), range(self.n_arms))\n ]\n ).reshape(self.T, self.n_arms)\n\n # to be used only to compute regret, NOT by the algorithm itself\n self.best_rewards_oracle = np.max(self.rewards, axis=1)\n self.best_actions_oracle = np.argmax(self.rewards, axis=1)", "def _initialize_weights(self, size: int) -> 'None':\n self.w_ = self.random_generator.normal(loc=0.0, scale=0.01,\n size=1 + size)\n self.w_initialized = True", "def reset_weights(self):\n # TODO: Maybe use xavier initialization instead.\n self.delete_torch_layers()\n weights = np.random.randn(len(self.connections)) * self.weight_init_std\n self.weights = weights.tolist()", "def initialize_weights(self,seed=None):\r\n if seed != None:\r\n np.random.seed(seed)\r\n self.weights = np.random.randn(self.weights.shape[0],self.weights.shape[1])", "def default_weight_initializer(self):\n self.biases = [np.random.randn(y, 1) for y in self.sizes[1:]]\n self.weights = [np.random.randn(y, x)/np.sqrt(x)\n for x, y in zip(self.sizes[:-1], self.sizes[1:])]", "def reset_parameters(self):\n logger.info('===== Initialize %s with Xavier uniform distribution =====' % self.__class__.__name__)\n # see https://github.com/pytorch/fairseq/blob/master/fairseq/models/transformer.py\n # embedding\n nn.init.normal_(self.embed.weight, mean=0., std=self.d_model**-0.5)\n nn.init.constant_(self.embed.weight[self.pad], 0.)\n # output layer\n nn.init.xavier_uniform_(self.output.weight)\n nn.init.constant_(self.output.bias, 0.)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the frequency with which the minimum volume ellipsoid is reestimated as part of the nested rejection sampling algorithm. A higher rate of this parameter means each sample will be more efficiently produced, yet the cost of recomputing the ellipsoid may mean it is better to update this not each iteration instead, with gaps of ``ellipsoid_update_gap`` between each update. By default, the ellipse is updated every 100 iterations.
def set_ellipsoid_update_gap(self, ellipsoid_update_gap=100): ellipsoid_update_gap = int(ellipsoid_update_gap) if ellipsoid_update_gap <= 1: raise ValueError('Ellipsoid update gap must exceed 1.') self._ellipsoid_update_gap = ellipsoid_update_gap
[ "def set_ellipsoid(self, ellipsoid):\n if not isinstance(ellipsoid, (list, tuple)):\n try:\n self.ELLIPSOID = ELLIPSOIDS[ellipsoid]\n self.ellipsoid_key = ellipsoid\n except KeyError:\n raise Exception(\n \"Invalid ellipsoid. See geopy.distance.ELIPSOIDS\"\n )\n else:\n self.ELLIPSOID = ellipsoid\n self.ellipsoid_key = None\n return", "def set_ellipsoid(self, ellipsoid):\n if not isinstance(ellipsoid, (list, tuple)):\n try:\n self.ELLIPSOID = ELLIPSOIDS[ellipsoid]\n self.ellipsoid_key = ellipsoid\n except KeyError:\n raise Exception(\n \"Invalid ellipsoid. See geopy.distance.ELLIPSOIDS\"\n )\n else:\n self.ELLIPSOID = ellipsoid\n self.ellipsoid_key = None\n return", "def set_E(self,E):\n from scipy.interpolate import splrep,splev;\n # Reps\n tckp = splrep(self.E,self.eps.real,k=3);\n eps = np.array(splev(E,tckp),dtype='complex');\n # Ieps\n tckp = splrep(self.E,self.eps.imag,k=3);\n eps += 1j*np.array(splev(E,tckp),dtype='complex');\n self.E = E;\n self.eps = eps;\n self.param['comment'].append(\"# -performed interpolation along energy axis\\n\");", "def auto_resample(self):\n if self.effective_particles() < 2.0 / 3.0 * self.num_points:\n self.resample()", "def set_frequency(self, frequency):\r\n self.obs.centerFreqHz = float(frequency)\r\n self.ref.centerFreqHz = float(frequency)\r\n self.ave.centerFreqHz = float(frequency)\r\n self.hot.centerFreqHz = float(frequency)\r\n self.cold.centerFreqHz = float(frequency)\r\n deltaNu = self.obs.bandwidthHz/float(self.vlen)\r\n n0 = self.obs.centerFreqHz - (self.obs.bandwidthHz/2.)\r\n nu = n0\r\n print(\"Setting Frequency: %10.0f Hz\" % (self.obs.centerFreqHz))\r\n nx = len( self.obs.xdata)\r\n if nx != self.vlen:\r\n self.update_len(self.obs)\r\n for iii in range(self.vlen):\r\n self.obs.xdata[iii] = nu\r\n nu = nu + deltaNu", "def _set_global_ellipse(self):\n self._set_east_ellipse()._set_west_ellipse()\n e_verts = self.east_ellipse.get_verts()\n e_size = e_verts[:, 0].size\n e_size *= 0.5\n w_verts = self.west_ellipse.get_verts()\n w_size = w_verts[:, 0].size\n w_size *= 0.5\n ew_x = np.hstack((e_verts[e_size:, 0], w_verts[:w_size, 0]))\n ew_y = np.hstack((e_verts[e_size:, 1], w_verts[:w_size, 1]))\n #print self.xpt, ew_x.min(), ew_x.max()\n if ew_x.max() - self.xpt > self.xpt - ew_x.min():\n ew_x = np.hstack((w_verts[w_size:, 0], e_verts[:e_size, 0]))\n ew_y = np.hstack((w_verts[w_size:, 1], e_verts[:e_size, 1]))\n self.ellipse_path = path.Path(np.array([ew_x, ew_y]).T)", "def set_to_momentum_eigenstate(self):\r\n F = np.fft.fft(self.x)\r\n prob = np.abs(F)**2\r\n if np.max(prob) != 0.0:\r\n prob = prob/np.sum(prob)\r\n freq = np.fft.fftfreq(self.N, d=self.dx)\r\n choice = np.random.choice(a=freq, size=1,\r\n p=prob, replace=False)\r\n k = choice[0]\r\n if k == 0.0:\r\n self.x = np.ones([self.N])\r\n self.normalize()\r\n p = 2*np.pi*k*self.hbar/self.L\r\n return p\r\n freq = np.array(\r\n [(0. if f != k else f) for f in freq])\r\n F = freq*F\r\n self.x = np.fft.ifft(F)\r\n self.normalize()\r\n p = 2*np.pi*k*self.hbar/self.L\r\n return p", "def setLoopFreq(self, freq):\n # causes an AttributeError for non-real-time timers\n self._timer.setLoopFreq(freq)", "def add_large_deformations(self, l_c, max_amp, n, wavelength):\n #Cut the Gaussians off at 3 sigma\n for i in range(0,n):\n #Select random coordinates in the grid. We choose ones that are\n #in the rectangle that the aperture is inscribed in\n array_l_c = int(math.floor(self.points_per_m * l_c))\n x, y = np.floor(np.random.random(2) * 2 * self.L + \n (self.pad_factor - 1) * self.L)\n amplitude = np.random.random() * max_amp\n gauss_array = amplitude * np.fromfunction(lambda i, j:\n self.taper(i, j,\n (array_l_c),\n 6 * array_l_c),\n (math.floor(6 * array_l_c),\n math.floor(6 * array_l_c)),\n dtype=complex)\n \n self.aperture[x - 3 * array_l_c: x + 3 * array_l_c,\n y - 3 * array_l_c: y + 3 * array_l_c] = \\\n (np.exp(4 * 1j * math.pi * gauss_array / wavelength) *\n self.aperture[x - 3 * array_l_c: x + 3 * array_l_c,\n y - 3 * array_l_c: y + 3 * array_l_c])", "def correct_freq_axes(self):\n for i in range(0, self.dimensions):\n if self.r_domain[i] == \"f\":\n for j in range(0, len(self.r_axis[i])):\n self.r_axis[i][j] += self.r_correction[i]\n self.r_correction_applied[i] += self.r_correction[i]\n self.r_correction[i] = 0", "def set_search_ellipse(self, xpt, ypt):\n self.xpt = xpt\n self.ypt = ypt\n self.rw_c_mod[:] = 1.75\n\n if self.THE_DOMAIN in ('Global', 'Regional', 'ROMS'):\n self.rw_c[:] = self.rwv.get_rwdistance(xpt, ypt,\n self.DAYS_BTWN_RECORDS)\n self.rw_c_mod *= self.rw_c\n self.rw_c_mod[:] = np.array([self.rw_c_mod,\n self.semi_n_s_minor]).max()\n #self.rw_c_mod *= 2. #Ben: I don't understand why this is multiplied by 2.0\n self._set_global_ellipse()\n\n elif self.THE_DOMAIN in ('BlackSea', 'MedSea'):\n self.rw_c[:] = self.rwv.get_rwdistance(xpt, ypt,\n self.DAYS_BTWN_RECORDS)\n self.rw_c_mod *= self.rw_c\n self._set_black_sea_ellipse()\n\n else:\n Exception\n\n return self", "def _ellipsoid_sample(self, enlargement_factor, A, centroid, n_points):\n if n_points > 1:\n return self._draw_from_ellipsoid(\n np.linalg.inv((1 / enlargement_factor) * A),\n centroid, n_points)\n else:\n return self._draw_from_ellipsoid(\n np.linalg.inv((1 / enlargement_factor) * A), centroid, 1)[0]", "def initialize():\n global effLim, effMax, xEff, detEff, dsExpo, detExp\n\n # load efficiency correction\n f1 = np.load('%s/data/lat-expo-efficiency-all-e95.npz' % dsi.latSWDir)\n xEff = f1['arr_0']\n totEnrEff, totNatEff = f1['arr_1'].item(), f1['arr_2'].item()\n detEff = np.zeros(len(xEff))\n for ds in dsList:\n if enr: detEff += totEnrEff[ds]\n else: detEff += totNatEff[ds]\n\n # load exposure\n f2 = np.load(\"%s/data/expo-totals-e95.npz\" % dsi.latSWDir)\n dsExpo, detExpo = f2['arr_0'].item(), f2['arr_1'].item()\n detExp = 0\n for d in dsExpo:\n if d in dsList:\n if enr: detExp += dsExpo[d][0]\n else: detExp += dsExpo[d][1]\n\n # normalize the efficiency\n detEff = np.divide(detEff, detExp)\n effLim, effMax = xEff[-1], detEff[-1]\n\n # diagnostic plot\n # plt.axhline(np.amax(detEff), c='orange', label=\"%.2f\" % np.amax(detEff))\n # plt.plot(xEff, detEff)\n # plt.legend(loc=4)\n # plt.xlabel(\"Energy (keV)\", ha='right', x=1)\n # plt.tight_layout()\n # plt.show()\n # exit()", "def calculate_ensquared_energy(PSF_model, wave, N_trials, N_rms,\n rms_amplitude, nominal_scale, spaxel_scales, rescale_coeff=None):\n\n N_zern = PSF_model.N_coef\n ensquared_results = []\n\n a_min = 1.0 if rescale_coeff is None else rescale_coeff\n\n print(\"Calculating Ensquared Energy\")\n for scale in spaxel_scales: # Loop over each Spaxel Scale [mas]\n print(\"%d mas spaxel scale\" % scale)\n\n data = np.zeros((2, N_trials * N_rms))\n amplitudes = np.linspace(0.0, rms_amplitude, N_rms)\n i = 0\n p0, s0 = PSF_model.compute_PSF(np.zeros(N_zern)) # Nominal PSF\n # Calculate the EE for the nominal PSF so that you can compare\n EE0 = ensquared_one_pix(p0, pix_scale=nominal_scale, new_scale=2*scale, plot=False)\n\n # 60x120\n # EE0 = ensquared_rectangle(p0, pix_scale=nominal_scale, new_scale=60, plot=False)\n\n for amp in amplitudes: # Loop over coefficient strength\n\n for k in range(N_trials): # For each case, repeat N_trials\n\n c_act = np.random.uniform(-amp, amp, size=N_zern)\n rescale = np.linspace(1, a_min, N_zern)\n c_act *= rescale\n phase_flat = np.dot(PSF_model.model_matrix_flat, c_act)\n rms = wave * 1e3 * np.std(phase_flat)\n p, s = PSF_model.compute_PSF(c_act)\n EE = ensquared_one_pix(p, pix_scale=nominal_scale, new_scale=2*scale, plot=False)\n # EE = ensquared_rectangle(p, pix_scale=nominal_scale, new_scale=60, plot=False)\n dEE = EE / EE0 * 100\n data[:, i] = [rms, dEE]\n i += 1\n\n ensquared_results.append(data)\n\n return ensquared_results", "def update_qnoise_factor(self, freq):\n # Update the qnoise_factor at the frequency of self.update_freq.\n if freq % self.update_freq != 0:\n self.num_iters += 1\n return\n\n new_qnoise_factor = self.calculate_qnoise_factor(freq)\n for quantizer in self.quantizers:\n # Updates the qnoise factors of the quantizers in the model.\n self.set_qnoise_factor(quantizer, new_qnoise_factor)\n self.num_iters += 1", "def set_eke_diffusivities_kernel(state):\n vs = state.variables\n settings = state.settings\n\n if settings.enable_eke:\n \"\"\"\n calculate Rossby radius as minimum of mid-latitude and equatorial R. rad.\n \"\"\"\n C_rossby = npx.sum(\n npx.sqrt(npx.maximum(0.0, vs.Nsqr[:, :, :, vs.tau]))\n * vs.dzw[npx.newaxis, npx.newaxis, :]\n * vs.maskW[:, :, :]\n / settings.pi,\n axis=2,\n )\n vs.L_rossby = npx.minimum(\n C_rossby / npx.maximum(npx.abs(vs.coriolis_t), 1e-16), npx.sqrt(C_rossby / npx.maximum(2 * vs.beta, 1e-16))\n )\n\n \"\"\"\n calculate vertical viscosity and skew diffusivity\n \"\"\"\n vs.sqrteke = npx.sqrt(npx.maximum(0.0, vs.eke[:, :, :, vs.tau]))\n vs.L_rhines = npx.sqrt(vs.sqrteke / npx.maximum(vs.beta[..., npx.newaxis], 1e-16))\n vs.eke_len = npx.maximum(\n settings.eke_lmin,\n npx.minimum(settings.eke_cross * vs.L_rossby[..., npx.newaxis], settings.eke_crhin * vs.L_rhines),\n )\n vs.K_gm = npx.minimum(settings.eke_k_max, settings.eke_c_k * vs.eke_len * vs.sqrteke)\n else:\n \"\"\"\n use fixed GM diffusivity\n \"\"\"\n vs.K_gm = update(vs.K_gm, at[...], settings.K_gm_0)\n\n if settings.enable_eke and settings.enable_eke_isopycnal_diffusion:\n vs.K_iso = update(vs.K_iso, at[...], vs.K_gm)\n else:\n vs.K_iso = update(vs.K_iso, at[...], settings.K_iso_0) # always constant\n\n if not settings.enable_eke:\n return KernelOutput(K_gm=vs.K_gm, K_iso=vs.K_iso)\n\n return KernelOutput(\n L_rossby=vs.L_rossby, L_rhines=vs.L_rhines, eke_len=vs.eke_len, sqrteke=vs.sqrteke, K_gm=vs.K_gm, K_iso=vs.K_iso\n )", "def set_frequency(self):\n if self.laser_status:\n self._fiber_shooting_logic.set_frequency(self._mw.frequency_spinBox.value())\n else:\n pass\n return", "def _establish_ellipse(self):\n self._y = self._sample_prior()", "def eccentric_gwave_freq(a,m,e):\n\n m = add_default_units(m,U.solMass)\n a = add_default_units(a,U.AU)\n\n period = 1 / (rn.au_to_period(a,m))\n return (2*period*pow(1+e,1.1954)/pow(1-e*e,1.5)).to(1/U.s)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draws from the enlarged bounding ellipsoid.
def _ellipsoid_sample(self, enlargement_factor, A, centroid, n_points): if n_points > 1: return self._draw_from_ellipsoid( np.linalg.inv((1 / enlargement_factor) * A), centroid, n_points) else: return self._draw_from_ellipsoid( np.linalg.inv((1 / enlargement_factor) * A), centroid, 1)[0]
[ "def draw(self,pic):\n # By solving the boundary equation, we have x=a**2/sqrt(a**2+b**2)\n # print \"Drawing an ellipse\" \n self.points=[] \n if self.a>self.b:\n # first go from x axis\n points=self._standardDraw(pic,actuallyDraw=True)\n else:\n # change x and y axis to enable standard drawing process\n self.a, self.b=(self.b,self.a)\n points=self._standardDraw(pic,actuallyDraw=False)\n points=[(self.centerX+p[1]-self.centerY,self.centerY+p[0]-self.centerX) for p in points]\n for p in points:\n x=int(p[0])\n y=int(p[1])\n pic[x][y]=self.color\n self.a, self.b=(self.b,self.a)\n self.points=[p for p in points]\n self._duplicate(pic,points)", "def _set_global_ellipse(self):\n self._set_east_ellipse()._set_west_ellipse()\n e_verts = self.east_ellipse.get_verts()\n e_size = e_verts[:, 0].size\n e_size *= 0.5\n w_verts = self.west_ellipse.get_verts()\n w_size = w_verts[:, 0].size\n w_size *= 0.5\n ew_x = np.hstack((e_verts[e_size:, 0], w_verts[:w_size, 0]))\n ew_y = np.hstack((e_verts[e_size:, 1], w_verts[:w_size, 1]))\n #print self.xpt, ew_x.min(), ew_x.max()\n if ew_x.max() - self.xpt > self.xpt - ew_x.min():\n ew_x = np.hstack((w_verts[w_size:, 0], e_verts[:e_size, 0]))\n ew_y = np.hstack((w_verts[w_size:, 1], e_verts[:e_size, 1]))\n self.ellipse_path = path.Path(np.array([ew_x, ew_y]).T)", "def render(self, draw_mesh_bbox=True):\n raise NotImplementedError", "def paint_enter(self):\n GL.glMatrixMode(GL.GL_PROJECTION)\n GL.glLoadIdentity()\n GL.glMatrixMode(GL.GL_MODELVIEW)\n GL.glLoadIdentity()\n GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)", "def show_grids(img, bounding_boxes, facial_landmarks=[], step=1):\n img_copy = img.copy()\n draw = ImageDraw.Draw(img_copy)\n\n for b in bounding_boxes:\n draw.rectangle([(b[0], b[1]), (b[2], b[3])],\n outline = 'white')\n\n inx = 0\n for pp in facial_landmarks:\n p = pp.reshape(2,5).T\n p = p.tolist()\n mouth_center = [(p[3][0] + p[4][0]) / 2, (p[3][1] + p[4][1]) / 2]\n eye_center = [(p[0][0] + p[1][0]) / 2, (p[0][1] + p[1][1]) / 2]\n p6 = [(p[2][0] - mouth_center[0])/4 + mouth_center[0],\n (p[2][1] - mouth_center[1])/4 + mouth_center[1]]\n p9 = [p[3][0] - (p[4][0]-p[3][0])/3, p[3][1] - (p[4][1]-p[3][1])/3]\n p10 = [p[4][0] + (p[4][0]-p[3][0])/3, p[4][1] + (p[4][1]-p[3][1])/3]\n p11 = [mouth_center[0] - (eye_center[0] - mouth_center[0]) / 2,\n mouth_center[1] - (eye_center[1] - mouth_center[1]) / 2]\n p12 = [(eye_center[0] -mouth_center[0])/4 + eye_center[0],\n (eye_center[1] - mouth_center[1])/4 + eye_center[1]]\n p13 = [(p[0][0] + p[3][0])/2, (p[0][1] + p[3][1])/2]\n p14 = [(p[1][0] + p[4][0])/2, (p[1][1] + p[4][1])/2]\n\n\n p.append(p6)\n p.append([p[0][0]-3/8*(p[1][0]-p[0][0]), 3/2*p[0][1]-1/2*p[1][1]]) \n p.append([p[1][0]+3/8*(p[1][0]-p[0][0]), 3/2*p[1][1]-1/2*p[0][1]])\n p.append(p9)\n p.append(p10)\n p.append(p11) \n p.append(p12)\n p.append(p13)\n p.append(p14)\n\n\n #for i in range(12):\n # draw.ellipse([\n # (p[i][0]-2.0,p[i][1]-2.0),\n # (p[i][0]+2.0,p[i][1]+2.0)\n # ],outline='white',fill='white')\n\n #draw.ellipse(\n # [(p[1][0]-30.0, p[1][1]-30.0),\n # (p[1][0]+30.0, p[1][1]+30.0)],\n # outline=(136,232,232),\n # width=5\n #)\n\n draw.line(\n ((p[6][0], p[6][1]),\n (p[0][0], p[0][1]),\n (p[12][0], p[12][1]),\n (p[5][0], p[5][1]),\n (p[13][0],p[13][1]),\n (p[1][0], p[1][1]),\n (p[7][0], p[7][1])),\n fill=(136,232,232),\n width=1\n )\n\n draw.line(\n ((p[11][0], p[11][1]),\n (p[7][0], p[7][1]),\n (p[9][0], p[9][1]),\n (p[10][0], p[10][1]),\n (p[8][0], p[8][1]),\n (p[6][0], p[6][1]),\n (p[11][0], p[11][1])),\n fill=(136,232,232),\n width=1\n )\n\n draw.line(\n ((p[11][0], p[11][1]),\n (p[1][0], p[1][1]),\n (p[2][0], p[2][1]),\n (p[5][0], p[5][1]),\n (p[4][0], p[4][1]),\n (p[10][0], p[10][1]),\n (p[3][0], p[3][1]),\n (p[5][0], p[5][1]),\n (p[2][0], p[2][1]),\n (p[0][0], p[0][1]),\n (p[11][0], p[11][1])),\n fill=(136,232,232),\n width=1\n )\n\n return img_copy", "def ellipsoid2d(ellipsoid, orbitinc):\n errtext = 'Invalid excentricity value in ellipsoid model.'\n inrange(ellipsoid[1], 0, 1, exclude='upper', text=errtext)\n\n inrange(orbitinc, 0, 180,\n exclude='both',\n text='Invalid orbit inclination.')\n\n rp = ellipsoid_r_geocentric(ellipsoid, orbitinc)\n\n return ellipsoid[0], np.sqrt(1 - (rp / ellipsoid[0])**2)", "def draw_bounding_box(self, label_pos: int):\n self.pdf = Canvas(str(self.full_path), pagesize=letter)\n self.width, self.height = letter\n\n box_info = self.label_locations[label_pos]\n\n self.pdf.line(box_info.upper_left_offset.x,\n box_info.upper_left_offset.y,\n box_info.upper_right_offset.x,\n box_info.upper_right_offset.y)\n\n self.pdf.line(box_info.upper_right_offset.x,\n box_info.upper_right_offset.y,\n box_info.lower_right_offset.x,\n box_info.lower_right_offset.y)\n\n self.pdf.line(box_info.lower_right_offset.x,\n box_info.lower_right_offset.y,\n box_info.lower_left_offset.x,\n box_info.lower_left_offset.y)\n\n self.pdf.line(box_info.lower_left_offset.x,\n box_info.lower_left_offset.y,\n box_info.upper_left_offset.x,\n box_info.upper_left_offset.y)", "def draw(self, image, px, py, angle, color, map_resolution, alpha=1.0, draw_steering_details=True):", "def project_ellipsoid_to_line(A, k):\n # Make Cholesky decomposition of A:\n L = cholesky(A)\n L_inv = inv(L)\n w = L_inv.dot(k) / (k.T.dot(k))\n\n return norm(w)", "def get_outer_border():\n\n\touter_border_coords = [] # stores (long, lat pairs) - e.g. (-83, 42)\n\n\t# Append vertices.\n\touter_border_coords.append((-83.098183, 42.286897))\n\touter_border_coords.append((-83.118074, 42.289572))\n\touter_border_coords.append((-83.119683, 42.287215))\n\touter_border_coords.append((-83.117280, 42.279023))\n\touter_border_coords.append((-83.129253, 42.280262))\n\touter_border_coords.append((-83.137515, 42.282786))\n\touter_border_coords.append((-83.161139, 42.254697))\n\touter_border_coords.append((-83.163049, 42.256904))\n\touter_border_coords.append((-83.164101, 42.257682))\n\touter_border_coords.append((-83.166997, 42.259525))\n\touter_border_coords.append((-83.167341, 42.261875))\n\touter_border_coords.append((-83.168414, 42.263971))\n\touter_border_coords.append((-83.173349, 42.265051))\n\touter_border_coords.append((-83.167641, 42.267862))\n\touter_border_coords.append((-83.158425, 42.278682))\n\touter_border_coords.append((-83.162041, 42.281945))\n\touter_border_coords.append((-83.164465, 42.286580))\n\touter_border_coords.append((-83.167255, 42.288913))\n\touter_border_coords.append((-83.167856, 42.290596))\n\touter_border_coords.append((-83.165474, 42.290548))\n\touter_border_coords.append((-83.158865, 42.292247))\n\touter_border_coords.append((-83.157320, 42.293739))\n\touter_border_coords.append((-83.156569, 42.295580))\n\touter_border_coords.append((-83.151569, 42.296564))\n\touter_border_coords.append((-83.143823, 42.293390))\n\touter_border_coords.append((-83.143866, 42.294469))\n\touter_border_coords.append((-83.142707, 42.294469))\n\touter_border_coords.append((-83.141613, 42.295167))\n\touter_border_coords.append((-83.141055, 42.296008))\n\touter_border_coords.append((-83.140604, 42.296881))\n\touter_border_coords.append((-83.140283, 42.298199))\n\touter_border_coords.append((-83.140154, 42.299072))\n\touter_border_coords.append((-83.140304, 42.299818))\n\touter_border_coords.append((-83.141313, 42.302055))\n\touter_border_coords.append((-83.141656, 42.303833))\n\touter_border_coords.append((-83.141913, 42.304928))\n\touter_border_coords.append((-83.142707, 42.305801))\n\touter_border_coords.append((-83.140583, 42.306880))\n\touter_border_coords.append((-83.140841, 42.307768))\n\touter_border_coords.append((-83.139617, 42.308768))\n\touter_border_coords.append((-83.140433, 42.310529))\n\touter_border_coords.append((-83.153651, 42.327728))\n\touter_border_coords.append((-83.156826, 42.326824))\n\touter_border_coords.append((-83.157256, 42.330139))\n\touter_border_coords.append((-83.157620, 42.337262))\n\touter_border_coords.append((-83.153372, 42.337833))\n\touter_border_coords.append((-83.151119, 42.339117))\n\touter_border_coords.append((-83.150175, 42.340029))\n\touter_border_coords.append((-83.149488, 42.341100))\n\touter_border_coords.append((-83.147857, 42.349624))\n\touter_border_coords.append((-83.148029, 42.351297))\n\touter_border_coords.append((-83.195429, 42.349664))\n\touter_border_coords.append((-83.194828, 42.335882))\n\touter_border_coords.append((-83.211930, 42.335691))\n\touter_border_coords.append((-83.213561, 42.335025))\n\touter_border_coords.append((-83.214977, 42.335580))\n\touter_border_coords.append((-83.213239, 42.327427))\n\touter_border_coords.append((-83.225706, 42.328331))\n\touter_border_coords.append((-83.227744, 42.331519))\n\touter_border_coords.append((-83.235576, 42.328664))\n\touter_border_coords.append((-83.236392, 42.335104))\n\touter_border_coords.append((-83.238065, 42.335200))\n\touter_border_coords.append((-83.238602, 42.342496))\n\touter_border_coords.append((-83.242314, 42.342511))\n\touter_border_coords.append((-83.253644, 42.341163))\n\touter_border_coords.append((-83.264716, 42.340925))\n\touter_border_coords.append((-83.267591, 42.357053))\n\touter_border_coords.append((-83.268256, 42.378329))\n\touter_border_coords.append((-83.276324, 42.378012))\n\touter_border_coords.append((-83.279500, 42.405999))\n\touter_border_coords.append((-83.288426, 42.405967))\n\touter_border_coords.append((-83.289735, 42.443538))\n\touter_border_coords.append((-83.259287, 42.446071))\n\touter_border_coords.append((-83.219891, 42.447528))\n\touter_border_coords.append((-83.165860, 42.447718))\n\touter_border_coords.append((-83.126335, 42.448478))\n\touter_border_coords.append((-83.095179, 42.449903))\n\touter_border_coords.append((-83.044667, 42.450853))\n\touter_border_coords.append((-83.000293, 42.452151))\n\touter_border_coords.append((-82.966304, 42.452215))\n\touter_border_coords.append((-82.936392, 42.452563))\n\touter_border_coords.append((-82.948623, 42.436602))\n\touter_border_coords.append((-82.926435, 42.427606))\n\touter_border_coords.append((-82.908454, 42.420700))\n\touter_border_coords.append((-82.908926, 42.415283))\n\touter_border_coords.append((-82.912445, 42.407298))\n\touter_border_coords.append((-82.916822, 42.398678))\n\touter_border_coords.append((-82.921329, 42.393354))\n\touter_border_coords.append((-82.934246, 42.388917))\n\touter_border_coords.append((-82.942615, 42.385684))\n\touter_border_coords.append((-82.923775, 42.357656))\n\touter_border_coords.append((-82.947979, 42.344970))\n\touter_border_coords.append((-82.957850, 42.336786))\n\touter_border_coords.append((-82.986689, 42.331012))\n\touter_border_coords.append((-83.017588, 42.329552))\n\touter_border_coords.append((-83.063164, 42.317939))\n\touter_border_coords.append((-83.078699, 42.308482))\n\touter_border_coords.append((-83.096638, 42.289628))\n\n\touter_border = Polygon(outer_border_coords)\n\treturn outer_border", "def _drawYZentities(self):\n pass", "def ellipse(\n img,\n center,\n axes,\n angle,\n startAngle,\n endAngle,\n color,\n thickness=...,\n lineType=...,\n shift=...,\n) -> img:\n ...", "def plotGlobe3D():", "def set_ellipsoid(self, ellipsoid):\n if not isinstance(ellipsoid, (list, tuple)):\n try:\n self.ELLIPSOID = ELLIPSOIDS[ellipsoid]\n self.ellipsoid_key = ellipsoid\n except KeyError:\n raise Exception(\n \"Invalid ellipsoid. See geopy.distance.ELLIPSOIDS\"\n )\n else:\n self.ELLIPSOID = ellipsoid\n self.ellipsoid_key = None\n return", "def set_ellipsoid(self, ellipsoid):\n if not isinstance(ellipsoid, (list, tuple)):\n try:\n self.ELLIPSOID = ELLIPSOIDS[ellipsoid]\n self.ellipsoid_key = ellipsoid\n except KeyError:\n raise Exception(\n \"Invalid ellipsoid. See geopy.distance.ELIPSOIDS\"\n )\n else:\n self.ELLIPSOID = ellipsoid\n self.ellipsoid_key = None\n return", "def extent(self):\n cos = np.cos(self.position_angle)\n sin = np.sin(self.position_angle)\n x = np.hypot(cos * self.x_fwhm, sin * self.y_fwhm)\n y = np.hypot(cos * self.y_fwhm, sin * self.x_fwhm)\n return Coordinate2D(coordinates=[x, y])", "def distance_ellipsoid(x, y, z, v, center, v1, v2, v3, dmax, label, normal):\n shape = (x.size, y.size, z.size) # shape of the outputs\n # build the equation of the ellipsoid\n # then write the second order equation in d\n # a d**2 + b d + c = 0\n # delta = b**2-4ac\n X = x - center[0]\n Y = y - center[1]\n Z = z - center[2]\n v12 = np.cross(v1, v2)\n v23 = np.cross(v2, v3)\n v31 = np.cross(v3, v1)\n d = np.inner(v1, v23) ** 2\n # equation of the ellipsoid\n # cxx XX + cyy YY + czz ZZ + cxy XY + cyz YZ + czx ZX = d\n cxx = v12[0] ** 2 + v23[0] ** 2 + v31[0] ** 2\n cyy = v12[1] ** 2 + v23[1] ** 2 + v31[1] ** 2\n czz = v12[2] ** 2 + v23[2] ** 2 + v31[2] ** 2\n cxy = 2 * (v12[0] * v12[1] + v23[0] * v23[1] + v31[0] * v31[1])\n cyz = 2 * (v12[1] * v12[2] + v23[1] * v23[2] + v31[1] * v31[2])\n czx = 2 * (v12[2] * v12[0] + v23[2] * v23[0] + v31[2] * v31[0])\n a = (\n cxx * v[0] ** 2\n + cyy * v[1] ** 2\n + czz * v[2] ** 2\n + cxy * v[0] * v[1]\n + cyz * v[1] * v[2]\n + czx * v[2] * v[0]\n )\n b = (\n (2 * cxx * v[0] + cxy * v[1] + czx * v[2]) * X\n + (2 * cyy * v[1] + cyz * v[2] + cxy * v[0]) * Y\n + (2 * czz * v[2] + czx * v[0] + cyz * v[1]) * Z\n )\n c = (\n cxx * X**2\n + cyy * Y**2\n + czz * Z**2\n + cxy * X * Y\n + cyz * Y * Z\n + czx * Z * X\n - d\n )\n delta = b**2 - 4 * a * c\n ind = delta >= 0 # wird but it works\n delta[ind] = np.sqrt(delta[ind])\n\n d1 = 1e16 * np.ones(shape)\n d2 = 1e16 * np.ones(shape)\n d1[ind] = (-b[ind] - delta[ind]) / (2 * a)\n d2[ind] = (-b[ind] + delta[ind]) / (2 * a)\n d1[d1 < 0] = 1e16\n d2[d2 < 0] = 1e16\n d = -np.ones(shape)\n d[ind] = np.minimum(d1[ind], d2[ind])\n d[d == 1e16] = -1\n\n alpha = -np.ones(shape)\n border = -np.ones(shape)\n if dmax is None:\n ind = d > 0\n else:\n ind = np.logical_and(d > 0, d <= dmax)\n alpha[ind] = d[ind]\n border[ind] = label[0]\n\n if normal: # compute the normal vector\n # initialization\n normal_vect = np.zeros(tuple(list(shape) + [3]))\n normal_x = normal_vect[:, :, :, 0]\n normal_y = normal_vect[:, :, :, 1]\n normal_z = normal_vect[:, :, :, 2]\n norm_normal = np.ones(shape)\n # coordinates of the point on the ellipsoid\n Xe = X + alpha * v[0]\n Ye = Y + alpha * v[1]\n Ze = Z + alpha * v[2]\n # compute the direction\n normal_x[ind] = 2 * cxx * Xe[ind] + cxy * Ye[ind] + czx * Ze[ind]\n normal_y[ind] = 2 * cyy * Ye[ind] + cxy * Xe[ind] + cyz * Ze[ind]\n normal_z[ind] = 2 * czz * Ze[ind] + czx * Xe[ind] + cyz * Ye[ind]\n # compute the sense (opposite to v)\n sense = np.asarray(\n normal_x * v[0] + normal_y * v[1] + normal_z * v[2] > 0\n ).nonzero()\n normal_x[sense] *= -1\n normal_y[sense] *= -1\n normal_z[sense] *= -1\n # normalization\n norm_normal[ind] = np.sqrt(\n normal_x[ind] ** 2 + normal_y[ind] ** 2 + normal_z[ind] ** 2\n )\n # copy in the output vector\n normal_x /= norm_normal\n normal_y /= norm_normal\n normal_z /= norm_normal\n else:\n normal_vect = None\n return alpha, border, normal_vect", "def compute_ellipse_kernel( self ):\n \n error_code = 0 \n \n if( int(self._curr_width) %2 == 0 ):\n f_width=int(self._curr_width) + 1 \n keyx=1\n else:\n f_width=int(self._curr_width) \n keyx=0\n \n if( int(self._curr_height) %2 == 0 ):\n f_height=int(self._curr_height) + 1 \n keyy=1\n else:\n f_height=int(self._curr_height) \n keyy=0 \n \n half_width = int(( f_width -1 ) * 0.5)\n half_height =int( ( f_height -1 ) * 0.5) \n \n \n x_limit = int( np.floor( ( f_width - 1) * 0.5 ) )\n \n y_limit = int( np.floor( ( f_height -1 ) * 0.5 ) )\n \n x_range = np.array( [ range( -x_limit , x_limit -keyx+ 1 )])\n y_range = np.array( [ range( -y_limit , y_limit -keyy+ 1 ) ])\n y_range = np.transpose( y_range)\n x_matrix = np.repeat( x_range , y_limit * 2 -keyy+ 1 , axis = 0 )\n y_matrix = np.repeat( y_range , x_limit*2 -keyx + 1 , axis = 1 )\n \n x_square = np.multiply( x_matrix , x_matrix )\n y_square = np.multiply( y_matrix ,y_matrix ) \n #print \"xlimit\" ,x_limit\n #print \"ylimit\", y_limit\n #print \"current width\",self._curr_width\n #print \"current height\",self._curr_height\n \n #print \"xmatrix shape\",x_matrix.shape\n #print \"ymatrix shape\",y_matrix.shape\n \n x_square = np.divide( x_square , float( half_width * half_width ) )\n y_square = np.divide( y_square , float( half_height * half_height ) )\n \n self._kernel_mask = np.ones( [ int(self._curr_height) , int(self._curr_width) ] ) - ( y_square + x_square )\n \n self._kernel_mask [ self._kernel_mask < 0 ] = 0\n \n # print( 'kernel computation complete ')\n \n return error_code", "def drawCoordinates(self):\n \n adjust = 30\n \n self.libtft.DrawRect(adjust, adjust, self._MAX_COLS - adjust, self._MAX_ROWS - adjust, self.libtft.GREEN)\n \n x = adjust - (adjust / 2)\n y = adjust\n st = \"%d, %d\" % (x, y)\n self.libtft.PutSt(st, x, y)\n self.libtft.DrawPixel(x, y, self.libtft.GREEN)\n \n x = self._MAX_COLS - adjust - (adjust / 2)\n y = self._MAX_ROWS - adjust\n st = \"%d, %d\" % (x, y)\n self.libtft.PutSt(st, x, y)\n self.libtft.DrawPixel(x, y, self.libtft.GREEN)\n \n x = adjust - (adjust / 2)\n y = self._MAX_ROWS - adjust\n st = \"%d, %d\" % (x, y) \n self.libtft.PutSt(st, x, y)\n self.libtft.DrawPixel(x, y, self.libtft.GREEN)\n\n x = self._MAX_COLS - adjust - (adjust / 2)\n y = adjust\n st = \"%d, %d\" % (x, y) \n self.libtft.PutSt(st, x, y)\n self.libtft.DrawPixel(x, y, self.libtft.GREEN)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The hyperparameter vector is ``[ active points, rejection samples, enlargement factor, ellipsoid update gap, dynamic enlargement factor, alpha]``.
def set_hyper_parameters(self, x): self.set_n_active_points(x[0]) self.set_n_rejection_samples(x[1]) self.set_enlargement_factor(x[2]) self.set_ellipsoid_update_gap(x[3]) self.set_dynamic_enlargement_factor(x[4]) self.set_alpha(x[5])
[ "def extractHyperParameters(self):\n return(np.array(self.hypers))", "def setKernelParam(self, alpha, scale) -> None:\n ...", "def params(self):\n\t\treturn {\"k\": self.__k, \"alpha\": self.__alpha}", "def sample_hyperparameters(self):\n\n # This stepsize works for Eve corpus (not much stepping out). Tiny\n # test corpora get stuck with overly flat posteriors/likelihoods.\n stepsize = .2\n\n alpha_samples = slice_sampler.log_slice_sampler(\n self.log_prob_trans_alpha_gamma,\n self.alpha,\n stepsize, self.rng,\n num_samples = 10,\n x_min=VERY_SMALL)\n self.alpha = alpha_samples[self.rng.rng_random_uniform_int(len(alpha_samples))]\n\n for i in range(1,self.K): # do not resample boundary beta!\n beta_samples = slice_sampler.log_slice_sampler(\n lambda x: self.log_prob_ems_beta_gamma(i, x),\n self.beta[i],\n stepsize, self.rng,\n num_samples = 10,\n x_min= VERY_SMALL)\n self.beta[i] = beta_samples[self.rng.rng_random_uniform_int(len(beta_samples))]", "def internal_cv(self, method, alphas):\n\n se = np.zeros(alphas.size)\n\n for i, (train, test) in enumerate(self.cv_generator.split(range(self.n_samples))):\n for j, v in enumerate(alphas):\n weights = method(x = self.x[train], alpha = v)\n se[j] += sum(np.sum(weights * self.x[test], axis=1)**2)\n\n best_idx = np.argmin(se)\n #print(best_idx, alphas.size)\n if best_idx == 0 and alphas[0] > 1e-9:\n print(\"Warning: Consider lowering the minimum bound for alpha for method %s\" % str(method))\n elif best_idx == alphas.size-1:\n print(\"Warning: Consider raising the minimum bound for alpha for method %s\" % str(method))\n return method(alpha = alphas[best_idx])", "def alpha(self, alpha):\n return RealRange(((self.kernel[0] - self.support[0]) * alpha \\\n + self.support[0], self.support[1] - \\\n (self.support[1] - self.kernel[1]) * alpha))", "def __calculate_alpha(self):\n\n self.alpha = np.exp(np.dot(self.x, self.lam.T))", "def parameters_scaled(self) -> OptimizationVariableList:\n return self._nlp.parameters.scaled", "def compute_hyperparameter_ranges(self): \n exponent = np.floor(\n np.log10(np.abs(1 / self.trainX.shape[0]))).astype(int)\n self.gamma = np.logspace(exponent - 1, exponent + 4, self.param_space)\n self.c = np.logspace(exponent, 1, self.param_space)\n self.alpha = np.logspace(exponent, 1, self.param_space)\n self.l1_ratio = np.logspace(exponent, 0, self.param_space)", "def params(self):\n\t\treturn {\"beta\": self.__beta, \"c\": self.__c, \"d\": self.__d}", "def sklearn_elastic_net_cv(X, y, alpha):\n l1ratio_sklearn = alpha / (2 - alpha)\n elastic_net = ElasticNetCV(l1_ratio=l1ratio_sklearn, fit_intercept=False, max_iter=10000, tol=0.000001).fit(X, y)\n lambda_opt = elastic_net.alpha_ / (1 - 1 / 2 * alpha)\n return lambda_opt", "def CalculateAlpha(self):\n\n # Adaptive alpha = Base Alpha * Mask modifer * Hygiene modifie * Distancing modifier\n self.Alpha = self.BaseAlpha\n self.Alpha *= (1 - self.MASK * 0.3)\n self.Alpha *= (1 - self.HYGIENE * 0.8)\n self.Alpha *= (1 - self.DISTANCING * 0.7)\n\n return", "def addparamendogdict(p):\n p['a'] = 1\n p['k'] = ((p['ALPHA'] * p['a'])/(1/p['BETA'] - 1 + p['DELTA']))**(1/(1-p['ALPHA']))\n p['y'] = p['a'] * p['k'] ** p['ALPHA']\n p['c'] = p['y'] - p['DELTA'] * p['k']\n\n return(p)", "def ec_alpha_blr( arg, args, strain, plotcurve, alphalist, cell_vol, nnid):\n\n etot_list=[]\n ## rmaxh = alatTi*1.0001\n for i in alphalist:\n etot, alpha, xijc, ijns, dil, rmaxh= elastic_constant_shear_info(LMarg, args, i, strain, nnid)\n args += ' -vrmaxh=' + str(rmaxh) + ' '\n etot_list.append(etot)\n \n ## 1 bohr = x angstrom\n bohr_to_angstrom = 0.529177208 \n ## Rydberg to electron volts\n ryd = 13.606 \n etot_list = np.asarray(etot_list)/cell_vol\n ## Converting to eV/AA**3\n etot_list = etot_list * (ryd / bohr_to_angstrom**3)\n\n w = blr_poly_fit(alphalist, etot_list, 5, 0.01)\n\n \n ## Curvature at alpha = 0\n curvature = 2 * w[2] # w[0] + w[1] * x + w[2]\n\n if plotcurve == True:\n afunclist=np.linspace(alphalist[0], alphalist[-1], 100)\n fitcurveE=[]\n\n for i in afunclist:\n fitcurveE.append( fifth_ord_poly( i, w[0], w[1], w[2], w[3], w[4], w[5] ) )\n x = [alphalist, afunclist]\n y = [etot_list, fitcurveE]\n colour = ['r*', 'g--']\n g.plot_function(2, x, y, colour, 'Total Energy vs Alpha', \n 'Alpha (Deformation parameter)', 'Energy (ev)')\n \n print(' curvature = %s' %(curvature))\n\n return curvature, []", "def edowham(alpha_p, e_eff):\n param_elasticity = alpha_p * e_eff\n return param_elasticity", "def get_alpha(self,x):\n res = x*self.p1_grid[-1] + self.p2_grid[-1]\n return res # a vector of length time", "def compute_hyperparameter_ranges(self):\n exponent = np.floor(\n np.log10(np.abs(1 / self.trainX.shape[0]))).astype(int)\n self.gamma = np.logspace(exponent - 1, exponent + 4, self.param_space)\n self.c = np.logspace(exponent, 1, self.param_space)\n self.alpha = np.logspace(exponent, 1, self.param_space)\n self.l1_ratio = np.logspace(exponent, 0, self.param_space)", "def get_hyperparameters(self):\n theta0 = self.GPkernel.get_params()['k1__k1__constant_value']\n theta1 = self.GPkernel.get_params()['k1__k2__length_scale']\n theta2 = self.GPkernel.get_params()['k2__noise_level']\n return np.array([theta0,theta1,theta2])", "def check_alpha_equations(svm):\n eq4 = 0\n eq5 = []\n for point in svm.training_points:\n eq4 += classify(svm, point) * point.alpha\n\n new_list = []\n for num in point.coords:\n new_list.append(classify(svm, point) * point.alpha * num)\n\n if len(eq5) == 0:\n eq5 = new_list[:]\n else:\n for i in range(len(eq5)):\n eq5[i] += new_list[i]\n\n if eq4 == 0 and eq5 == svm.boundary.w:\n return True\n else:\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get a mask of detected table from detected table bounding box coordinates.
def get_mask_from_bounding_box(bounding_box_coordinates,shape): #unwrap bouding box coordinates x,y,w,h = bounding_box_coordinates #create blank image with corresponding shape blank_image = np.zeros(shape, np.uint8) #create corrected mask corrected_mask = cv2.rectangle(blank_image,(x,y),(x+w,y+h),(255,255,255),-1) return corrected_mask
[ "def fixMasks(image, table_mask, column_mask):\r\n table_mask = table_mask.reshape(1024,1024).astype(np.uint8)\r\n column_mask = column_mask.reshape(1024,1024).astype(np.uint8)\r\n \r\n #get contours of the mask to get number of tables\r\n contours, table_heirarchy = cv2.findContours(table_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\r\n \r\n table_contours = []\r\n #ref: https://www.pyimagesearch.com/2015/02/09/removing-contours-image-using-python-opencv/\r\n #remove bad contours\r\n\r\n #print(contours)\r\n\r\n for c in contours:\r\n # if the contour is bad, draw it on the mask\r\n\r\n\r\n #if not is_contour_bad(c):\r\n if cv2.contourArea(c) > 2000:\r\n table_contours.append(c)\r\n \r\n if len(table_contours) == 0:\r\n return None\r\n\r\n #ref : https://docs.opencv.org/4.5.2/da/d0c/tutorial_bounding_rects_circles.html\r\n #get bounding box for the contour\r\n \r\n table_boundRect = [None]*len(table_contours)\r\n for i, c in enumerate(table_contours):\r\n polygon = cv2.approxPolyDP(c, 3, True)\r\n table_boundRect[i] = cv2.boundingRect(polygon)\r\n \r\n #table bounding Box\r\n table_boundRect.sort()\r\n \r\n col_boundRects = []\r\n for x,y,w,h in table_boundRect:\r\n \r\n col_mask_crop = column_mask[y:y+h,x:x+w]\r\n \r\n #get contours of the mask to get number of tables\r\n contours, col_heirarchy = cv2.findContours(col_mask_crop, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\r\n #get bounding box for the contour\r\n boundRect = [None]*len(contours)\r\n for i, c in enumerate(contours):\r\n polygon = cv2.approxPolyDP(c, 3, True)\r\n boundRect[i] = cv2.boundingRect(polygon)\r\n \r\n #adjusting columns as per table coordinates\r\n boundRect[i] = (boundRect[i][0] + x ,\r\n boundRect[i][1] + y ,\r\n boundRect[i][2],\r\n boundRect[i][3])\r\n \r\n col_boundRects.append(boundRect)\r\n \r\n image = image[...,0].reshape(1024, 1024).astype(np.uint8)\r\n \r\n #draw bounding boxes\r\n color = (0,255,0)\r\n thickness = 4\r\n \r\n for x,y,w,h in table_boundRect:\r\n image = cv2.rectangle(image, (x,y),(x+w,y+h), color, thickness)\r\n \r\n return image, table_boundRect, col_boundRects", "def test_table_mask():\n query = QualityQuery(\n quality_criteria=[\n (\"foo\", \"(x**2 + y**2) < 1.0\"),\n (\"bar\", \"x < 0.5\"),\n ],\n )\n\n table = Table({\"x\": [1.0, 0.2, -0.5, 0.6, 0.7], \"y\": [0.0, 0.5, 1.0, 0.2, 0.1]})\n\n mask = query.get_table_mask(table)\n assert len(mask) == len(table)\n assert mask.dtype == np.bool_\n np.testing.assert_equal(mask, [False, True, False, False, False])\n stats = query.to_table()\n np.testing.assert_equal(stats[\"counts\"], [5, 3, 2])\n np.testing.assert_equal(stats[\"cumulative_counts\"], [5, 3, 1])", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 is xmax + 1 and y2 is ymax + 1\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([y1, x1, y2, x2])\n return boxes.astype(np.int32)", "def _load_mask(self, gt_data):\n img_coco = self.refexp_dataset.loadImgs(ids=gt_data['image_id'])[0]\n mask = Image.new('L', (img_coco['width'], img_coco['height']), 0)\n for seg in gt_data['segmentation']:\n ImageDraw.Draw(mask).polygon(seg, outline='white', fill='white')\n return numpy.asarray(mask)", "def get_mask_bounding_box(self):\r\n try:\r\n self.mask\r\n if self.mask_xy_dim == self.image_xy_dim:\r\n x, y, w, h = self.calculate_bounding_box()\r\n self.mask_bounding_box = {}\r\n self.mask_bounding_box.update({\r\n 'min_x': x,\r\n 'min_y': y,\r\n 'bb_width': w,\r\n 'bb_height': h,\r\n })\r\n else:\r\n print('Error: Mask and image dimensions do not match')\r\n except AttributeError:\r\n print('Error: no mask has been loaded')", "def get_superpixel_borders_mask(self, img_path):\n return find_boundaries(self.images_segmented[img_path])", "def getAsMaskImage(self):\n\t\tif not self.isROI():\n\t\t\treturn None\n\t\tinsideMap = {}\n\t\tinsideMap.update(self.getCoveredPoints())\n\t\tinsMap = {}\n\t\tfor x, y in insideMap.keys():\n\t\t\tinsMap[(x, y)] = 1\n\t\tparent = self.GetCanvas()\n\t\tmx, my, mz = parent.dataUnit.getDimensions()\n\t\treturn lib.ImageOperations.getMaskFromPoints(insMap, mx, my, mz)", "def mask_to_bbox(mask):\n is_3d = len(mask.shape) == 3\n\n reduce_axes = (0, 1) if is_3d else (0,)\n cols_any = np.any(mask, axis=reduce_axes)\n cols_where = np.where(cols_any)[0]\n if cols_where.shape[0] == 0:\n return None\n x1, x2 = cols_where[[0, -1]]\n\n reduce_axes = (0, 2) if is_3d else (1,)\n rows_any = np.any(mask, axis=reduce_axes)\n rows_where = np.where(rows_any)[0]\n if rows_where.shape[0] == 0:\n return None\n y1, y2 = rows_where[[0, -1]]\n\n return x1, y1, x2, y2", "def edge_mask(mask):\n\n # Sagittal profile\n brain = mask.any(axis=0)\n\n # Simple edge detection\n edgemask = 4 * brain - np.roll(brain, 1, 0) - np.roll(brain, -1, 0) - \\\n np.roll(brain, 1, 1) - np.roll(brain, -1, 1) != 0\n return edgemask.astype('uint8')", "def compute_static_mask(box_num: Tensor):\n max_len = documents.MAX_BOXES_NUM\n mask = torch.arange(0, max_len, device=box_num.device).expand((box_num.shape[0], max_len))\n box_num = box_num.expand_as(mask)\n mask = mask < box_num\n row_mask = mask.unsqueeze(1)\n column_mask = mask.unsqueeze(2)\n mask = row_mask & column_mask\n mask = ~mask * -1\n return mask.unsqueeze(-1)", "def mask2bbox(mask) -> List[List[int]]:\n bboxes: List[List[int]] = []\n\n mask = mask_to_border(mask)\n print(np.unique(mask))\n lbl = label(mask)\n props = regionprops(lbl)\n for prop in props:\n x1 = prop.bbox[1]\n y1 = prop.bbox[0]\n\n x2 = prop.bbox[3]\n y2 = prop.bbox[2]\n\n bboxes.append([x1, y1, x2, y2])\n\n return bboxes", "def predict_bounding_box_using_mask(image_tensor: tf.Tensor) -> np.ndarray:\n # resize image to match model input\n input_height = prediction_model.inputs[0].shape[1]\n input_width = prediction_model.inputs[0].shape[2]\n image_tensor = tf.image.resize(\n image_tensor, size=(input_height, input_width), antialias=True\n )\n\n image_tensor = tf.reshape(image_tensor, shape=(1, input_height, input_width, 3))\n\n predicted_mask = prediction_model_mask(image_tensor)[0].numpy()\n\n # visualization_tools.show_image(predicted_mask)\n\n bounding_box = preprocessing.mask_to_bounding_box(predicted_mask)\n\n return preprocessing.bounding_box_in_percent(\n bounding_box, input_height, input_width\n )", "def get_detector_mask(boxes, anchors):\n detectors_mask = [0 for i in range(len(boxes))]\n matching_true_boxes = [0 for i in range(len(boxes))]\n for i, box in enumerate(boxes):\n detectors_mask[i], matching_true_boxes[i] = preprocess_true_boxes(box, anchors, [416, 416])\n return np.array(detectors_mask), np.array(matching_true_boxes)", "def mask_to_border(mask):\n h, w = mask.shape\n border = np.zeros((h, w))\n\n contours = find_contours(mask, 0.5) # since the input range is [0, 1], the threshold is 0.5\n for contour in contours:\n for c in contour:\n x = int(c[0])\n y = int(c[1])\n border[x][y] = 1 # since the input is binary, the value is 1\n\n return border", "def bbox_from_binary_mask(binary_mask):\n # Find all columns and rows that contain 1s\n rows = np.any(binary_mask, axis=1)\n cols = np.any(binary_mask, axis=0)\n # Find the min and max col/row index that contain 1s\n rmin, rmax = np.where(rows)[0][[0, -1]]\n cmin, cmax = np.where(cols)[0][[0, -1]]\n # Calc height and width\n h = rmax - rmin + 1\n w = cmax - cmin + 1\n return [int(cmin), int(rmin), int(w), int(h)]", "def draw_mask_only(image, box, mask, label=None, color=None, binarize_threshold=0.5):\n\n from keras_retinanet.utils.colors import label_color\n\n # import miscellaneous modules\n import cv2\n import numpy as np\n\n if label is not None:\n color = label_color(label)\n if color is None:\n color = (255, 255, 255)\n\n # resize to fit the box\n mask = mask.astype(np.float32)\n mask = cv2.resize(mask, (box[2] - box[0], box[3] - box[1]))\n\n # binarize the mask\n mask = (mask > binarize_threshold).astype(np.uint8)\n\n # draw the mask in the image\n mask_image = np.zeros((image.shape[0], image.shape[1]), np.uint8)\n mask_image[box[1]:box[3], box[0]:box[2]] = mask\n mask = mask_image\n\n # compute a nice border around the mask\n border = mask - cv2.erode(mask, np.ones((5, 5), np.uint8), iterations=1)\n\n # apply color to the mask and border\n mask = (np.stack([mask] * 3, axis=2) * color).astype(np.uint8)\n border = (np.stack([border] * 3, axis=2) * (255, 255, 255)).astype(np.uint8)\n # this is how you look into the mask\n # for i in mask:\n # \tfor j in i:\n # \t\tb = False\n # \t\tfor k in i:\n # \t\t\tfor l in k:\n # \t\t\t\tif l != 0:\n # \t\t\t\t\tb = True\n # \t\t\t\tif b:\n # \t\t\t\t\tbreak\n # \t\t\tif b:\n # \t\t\t\tbreak\n # \t\tif b:\n # \t\t\tprint (j)\n\n # draw the mask\n indices = np.where(mask != color)\n image[indices[0], indices[1], :] = 0 * image[indices[0], indices[1], :]\n\n return mask", "def find_mask(image: Image, bounding_rect: TalonRect, config: str=None) -> 'src.types.Mask':\n\n background_detector_setting = config if config is not None else setting_background_detector.get()\n\n if background_detector_setting == \"mouse_fill\":\n mouse_pos = ctrl.mouse_pos()\n # Ints are because OSX gets floats for both mouse pos and the bounding rect\n mouse_norm_y = int(mouse_pos[1] - bounding_rect.y)\n mouse_norm_x = int(mouse_pos[0] - bounding_rect.x)\n mask = calculate_floodfill_mask(\n image,\n (mouse_norm_x, mouse_norm_y)\n )\n elif background_detector_setting.startswith(\"pixel_fill\"):\n bits = background_detector_setting.split(\":\")\n mods = bits[1].split(\" \") if len(bits) > 1 else [\"0\", \"0\"]\n mask = calculate_floodfill_mask(\n image,\n (\n calculate_relative(mods[0], 0, bounding_rect.width),\n calculate_relative(mods[1], 0, bounding_rect.height),\n )\n )\n elif background_detector_setting.startswith(\"explicit_colors\"):\n _, colors_str = background_detector_setting.split(\":\")\n colors = colors_str.split(\" \")\n mask = calculate_explicit_mask(\n image,\n colors\n )\n\n return mask", "def boundary_mask(self, tri):\n\n\t\tbmask = np.zeros(len(self.x), dtype=bool)\n\t\tvertices = zip(self.x, self.y) # Convert back to tuples\n\n\t\tif self.is_concave:\n\t\t\t# Haven't worked this out yet!\n\t\t\tpass\n\t\telse:\n\t\t\t# triangle has a convex hull routine\n\t\t\thull = triangle.convex_hull(tri['vertices'])\n\t\t\tconvex_hull = zip( hull[:,0], hull[:,1] )\n\t\t\tfor i, vert in enumerate(vertices):\n\t\t\t\tif vert in convex_hull:\n\t\t\t\t\tbmask[i] = True\n\n\t\treturn bmask", "def get_mask(base_image, boundaries, nodata_value=0):\n with rasterio.open(base_image) as base:\n out_raster, out_transform = rasterio.mask.mask(base, [boundaries])\n\n out_raster_bool = out_raster == nodata_value\n\n out_raster_int = out_raster_bool.astype(numpy.uint8)\n out_raster_int = out_raster_int * 255\n\n out_image_array = rasterio.plot.reshape_as_image(out_raster_int)\n\n return out_image_array" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute difference between element and next element in a list and return elements where difference is maximum. For instance if we take the list [1,2,3,4,10,12,15], we will compute the list [1,1,1,6,2,3] and the function will return (4,10) representing the maximum gap.
def get_biggest_gap_index(elements_list): #compute list of difference between element and next element steps = [x-y for y,x in zip(elements_list,elements_list[1:])] #Get index where element has biggest gap with next element index_where_biggest_gap = np.where(steps==max(steps))[0][0] #return element and next element return elements_list[index_where_biggest_gap], elements_list[index_where_biggest_gap+1]
[ "def nextGreaterElements(self, nums):\r\n n = len(nums)\r\n res = [-1] * n\r\n s = []\r\n\r\n for _ in range(2):\r\n for i, num in enumerate(nums):\r\n while s and num > nums[s[-1]]:\r\n res[s.pop()] = num\r\n s.append(i)\r\n return res", "def distance_to_greater_value(arr: list, allow_equal: bool) -> list:\n next_greater_value = [0] * len(arr)\n stack = []\n\n for idx, num in enumerate(arr):\n if len(stack) == 0:\n stack.append(idx)\n elif num < arr[stack[-1]] or (allow_equal and num == arr[stack[-1]]):\n stack.append(idx)\n else:\n while len(stack) > 0 and ((allow_equal and num > arr[stack[-1]]) or\n (not allow_equal and num >= arr[stack[-1]])):\n idx2 = stack.pop()\n next_greater_value[idx2] = idx\n stack.append(idx)\n while len(stack) > 0:\n idx2 = stack.pop()\n next_greater_value[idx2] = len(arr)\n return next_greater_value", "def max_list_iter(int_list): # must use iteration not recursion\n if int_list is None: #If list is None, ValueError is raised\n raise ValueError\n if len(int_list) == 0: #If list is empty, None is returned\n return None\n else:\n mx = int_list[0] #Uses first number in list as first max number (mx)\n for num in int_list:\n if num > mx:\n mx = num #If current num is greater than max, current becomes new max\n return mx #Max num is returned\n pass", "def find_greatest_number(incoming_list):\n\n max_number = max(incoming_list)\n return max_number", "def find_max_min(l):\n l.sort()\n if l[0] != l[-1]:\n return [l[0], l[-1]]\n elif l[0] == l[-1]:\n return [len(l)]", "def largest_element(arr: List[int]) -> int:\n return(max(arr))", "def longestRun(L):\n #set longest running list to only include element of list\n longest_running = [L[0]]\n #create a working list \n working_list = [L[0]]\n for i in range(len(L)-1):\n #check to see if subsequent element is greater than or equal\n if L[i+1] >= L[i]:\n #add to working list if so \n working_list.append(L[i+1])\n #then check if working list is longer than longest running and change if so \n if len(working_list) >= len(longest_running):\n longest_running = working_list\n #starts working list from next element if otherwise\n else:\n working_list = [L[i+1]] \n return len(longest_running)", "def computeP(list):\n P = []\n P.append(0)\n for counter in range(1, len(list)):\n currentPtr = counter - 1\n maxIndex = 0\n while(currentPtr >= 0):\n if list[counter][0] >= list[currentPtr][1]:\n maxIndex = currentPtr + 1\n break\n currentPtr -= 1\n P.append(maxIndex)\n return P", "def range_of_list(l: list):\n return max(l) - min(l)", "def max_value(my_list):\n aux = ordered_values(my_list)\n return aux[len(aux) - 1]", "def nextGreaterPermutation(lst):\n # If there is only 1 digit, return as it is\n if len(lst) == 1:\n return lst\n\n # Search list in reverse, find the index,\n # where the number is less than the one before it\n for i in range(len(lst) - 1, -1, -1):\n if lst[i] > lst[i-1]:\n break\n\n # If the list is in descending order, return a reversed list\n if i == 0:\n lst.reverse()\n return lst\n\n # If not, swap the number found above with the smallest number that is\n # larger than the number found above\n lst[lst.index(min(filter(lambda x: x > lst[i-1], lst[i:])))], lst[i-1] =\\\n lst[i-1], lst[lst.index(min(filter(lambda x: x > lst[i-1], lst[i:])))]\n # Then reverse the rest of the list from the index\n lst[i:] = lst[i:][::-1]\n return lst", "def del_max(self):\n extracted_max = self.heaplist[0]\n self.heaplist[0] = self.heaplist[-1]\n self.heaplist.pop()\n i = 0\n length = len(self.heaplist)\n while i < length//2:\n l_idx = 2*i + 1\n r_idx = 2*i + 2\n if r_idx > length-1:\n if self.heaplist[i] < self.heaplist[l_idx]:\n temp = self.heaplist[l_idx]\n self.heaplist[l_idx] = self.heaplist[i]\n self.heaplist[i] = temp\n i = l_idx\n else:\n break\n else:\n if (self.heaplist[i] >= self.heaplist[l_idx]) and (self.heaplist[i]>= self.heaplist[r_idx]):\n break\n \n else:\n if self.heaplist[l_idx] == self.heaplist[r_idx]:\n max_idx = r_idx\n val = self.heaplist[r_idx]\n else: \n to_swap = {l_idx: self.heaplist[l_idx], r_idx:self.heaplist[r_idx]} \n max_idx, val = max(to_swap.items(), key = lambda x:x[1])\n self.heaplist[max_idx] = self.heaplist[i]\n self.heaplist[i] = val\n i = max_idx\n \n return extracted_max", "def max_continuous(l:list):\n max_count = 1\n count = 1\n for i in range(1, len(l)):\n if l[i] == l[i-1] + 1:\n count += 1\n else:\n max_count = max(count, max_count)\n count = 1\n return max_count", "def get_max_delta(values):\n mind = {'index': None, 'value': None}\n maxd = {'index': None, 'value': None}\n \n for (index, n) in enumerate(values):\n if mind['value'] is None or n < mind['value']:\n mind['index'] = index\n mind['value'] = n\n\n if (maxd['value'] is None or n > maxd['value']) and index > mind['index']:\n maxd['index'] = index\n maxd['value'] = n\n \n return maxd['value'] - mind['value']", "def find_max_gap(self, free_space_ranges):\n start_i,end_i, best_start, best_end = 0,0,0,0\n for i in range(len(free_space_ranges)):\n if free_space_ranges[i] > 0:\n end_i += 1\n else:\n if end_i != start_i and end_i - start_i + 1 > best_end-best_start+1:\n best_start = start_i\n best_end = end_i\n\n start_i = i\n end_i = i\n\n if end_i != start_i-1 and end_i - start_i + 1 > best_end-best_start+1:\n best_start = start_i\n best_end = end_i\n return best_start, best_end", "def get_max(lst):\n maximum = lst[0]\n index = 0\n for i in range(len(lst)):\n if lst[i] > maximum:\n maximum = lst[i]\n index = i\n return (maximum, index)", "def generat_sequence(list):\n try:\n return max(list) + 1\n except:\n return 1", "def find_max_min(list_of_nums):\n \n if max(list_of_nums) == min(list_of_nums): #for a list with identical numbers\n return [len(list_of_nums)]\n \n else:\n return [min(list_of_nums), max(list_of_nums)]", "def find_max_gap(self, ranges):\n ranges[ranges!=0] = 1.0\n ranges1 = np.hstack((np.copy(ranges),0))\n ranges2 = np.hstack((0,np.copy(ranges)))\n check = ranges1 - ranges2\n #start and end indices of gaps\n start = np.where(check==1)[0]\n end = np.where(check==-1)[0]-1\n #check which gap is larger\n big_gap_idx = np.argmax(end-start)\n return start[big_gap_idx], end[big_gap_idx]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compute sum of a matrix (mask) following an axis, get indexes where sum is higher than 80% of the maximum then find biggest submatrix within detected borders. If axis=1, columns will be removed (else, lines will be removed)
def get_sub_mask_by_removing_overfilled_borders(mask,axis,limit_ratio=0.8): #Compute sum over the axis summed_on_axis = mask.sum(axis=axis) #Get maximum value maximum_value = summed_on_axis.max() #Find lines or columns where sum is over 80% of maximum sum. indexes = np.where(summed_on_axis>=maximum_value*limit_ratio)[0] #Use get_biggest_gap_index to get biggest submatrix within matrix by setting excluded elements to 0 # # ______________ ________ # _______ ____ __________ # _______________________ # --> ____ _____ ___ # Detected | __ _ ________ # Submatrix| ______ # --> ______ ______ _________ # __ _______________ ____ # # start, end = get_biggest_gap_index(indexes) if axis == 1: mask[:start]=0 mask[end:] = 0 elif axis == 0: mask[:, :start]=0 mask[:, end:] = 0 return mask
[ "def DownsampleBoundsMatrix(bm, indices, maxThresh=4.0):\n nPts = bm.shape[0]\n k = numpy.zeros(nPts, numpy.int0)\n for idx in indices:\n k[idx] = 1\n for i in indices:\n row = bm[i]\n for j in range(i + 1, nPts):\n if not k[j] and row[j] < maxThresh:\n k[j] = 1\n keep = numpy.nonzero(k)[0]\n bm2 = numpy.zeros((len(keep), len(keep)), numpy.float)\n for i, idx in enumerate(keep):\n row = bm[idx]\n bm2[i] = numpy.take(row, keep)\n return bm2", "def peak_finder_2d(matrix):\n\n global num_steps\n num_steps += 1\n\n n = len(matrix) # number of rows\n m = len(matrix[0]) # number of columns\n\n j = m//2\n\n mid_col = list(matrix[:,j])\n j_max = max(mid_col)\n i = mid_col.index(j_max)\n start = matrix[i,j]\n\n # base case:\n if start >= matrix[i,j+1] and start >= matrix[i,j-1]:\n return (start,i,j)\n\n # both sides are larger, choose the largest \n if start < matrix[i,j+1] and start < matrix[i,j-1]:\n if matrix[i,j+1] > matrix[i,j-1]:\n # go right\n for i in range(j,0,-1):\n matrix = np.delete(matrix,0,1)\n return peak_finder_2d(matrix)\n else:\n # go left\n for i in range(j+1,len(matrix[0])):\n matrix = np.delete(matrix,j+1,1)\n return peak_finder_2d(matrix)\n\n if start < matrix[i,j+1]:\n # go right\n for i in range(j,0,-1):\n matrix = np.delete(matrix,0,1)\n return peak_finder_2d(matrix)\n else:\n # go left\n for i in range(j+1,len(matrix[0])):\n matrix = np.delete(matrix,j+1,1)\n return peak_finder_2d(matrix)", "def max_width(mask):\r\n # mask_img = cv2.imread(mask, cv2.IMREAD_GRAYSCALE)\r\n mask_img = mask\r\n # cv2.imwrite(\"mask_img.jpg\", mask_img)\r\n # print(\"pixel:\", mask[0, 0])\r\n ret, mask_img = cv2.threshold(mask_img, 30, 255, cv2.THRESH_BINARY)\r\n # print(\"shape\", mask_img.shape)\r\n height, width = mask_img.shape\r\n\r\n # count max width\r\n max_wid = 0\r\n for i in range(height):\r\n # initialize leftend and rightend of mask area as -1\r\n leftend = -1\r\n rightend = -1\r\n for j in range(width-1):\r\n if mask_img[i, j] > 127 and leftend == -1:\r\n leftend = j\r\n if mask_img[i, j] == 0 and mask_img[i, j-1] > 0 and j > 0:\r\n rightend = j\r\n cv2.imwrite(\"mask_img.png\", branding(mask_img, (i, j), 1))\r\n print(\"leftend:({}, {}); rightedn:({}, {})\\n\".format(i, leftend, i, rightend))\r\n break\r\n max_wid = max(max_wid, rightend-leftend)\r\n # for col in range(width):\r\n # # initialize leftend and rightend of mask area as -1\r\n # leftend = -1\r\n # rightend = -1\r\n # for row in range(height-1):\r\n # if mask_img[row, col] > 30 and leftend == -1:\r\n # leftend = row\r\n # if mask_img[row, col] == 0 and mask_img[row-1, col] > 0 and row > 0:\r\n # rightend = row\r\n # # cv2.imwrite(\"mask_img.png\", branding(mask_img, (i, j), 2))\r\n # # print(\"leftend:({}, {}); rightedn:({}, {})\\n\".format(i, leftend, i, rightend))\r\n # break\r\n # max_wid = max(max_wid, rightend-leftend)\r\n \r\n # print(\"max width: {}\".format(max_wid))\r\n return max_wid", "def find_sudoku_edges(im,axis=0):\r\n# threshold and sum rows and columns\r\n trim = 1*(im<128)\r\n s = trim.sum(axis=axis)\r\n \r\n# find center of strongest lines\r\n s_labels,s_nbr = measurements.label(s>(0.5*max(s)))\r\n m = measurements.center_of_mass(s,s_labels,range(1,s_nbr+1))\r\n x = [int(x[0]) for x in m]\r\n \r\n# if only the strong lines are detected add lines in between\r\n if len(x)==4:\r\n dx = diff(x)\r\n x = [x[0],x[0]+dx[0]/3,x[0]+2*dx[0]/3,\r\n x[1],x[1]+dx[1]/3,x[1]+2*dx[1]/3,\r\n x[2],x[2]+dx[2]/3,x[2]+2*dx[2]/3,x[3]]\r\n \r\n if len(x)==10:\r\n return x\r\n else:\r\n raise RuntimeError('Edges not detected.')", "def maximal_square(*args):\n array = [arg for arg in args] # parse string to array\n print(f\"{'array':-^9}\")\n for item in array:\n print(\" \".join(item))\n print()\n # generate the search matrix\n\n # TODO not sure about this size variable.\n # need to start by searching for a submatrix = size of matrix\n # then to reduce the submatrix search size each iteration until\n # max submatrix size is found\n # need to deal with the case of non-square matrix/array\n size = min(len(array), len(array[0]))\n\n while True:\n area = 0\n search = []\n\n if area > 0:\n return area\n else:\n for i in range(size):\n for j in range(size):\n search.append((i, j))\n print(f\"search = {search}\") # coordinates of search area/cells\n print()\n print(\"*\" * 60)\n print(f\"size = {size}\")\n # start searching from each cell starting at top left [0][0]\n for row in range(len(array) - size + 1):\n # print(f\"row {row} = {array[row]}\") # TODO delete line\n for cell in range(len(array[row]) - size + 1):\n local = []\n for neighbour in search:\n print(f\"search for row {row} cell {cell} is {row + neighbour[0], cell + neighbour[1]} \"\n f\"returns {array[row + neighbour[0]][cell + neighbour[1]]}\")\n local.append(array[row + neighbour[0]][cell + neighbour[1]])\n print()\n print(f\"local = {local}\") # this is the contents of the searched cells TODO delete line\n print()\n if all(x == \"1\" for x in local):\n area = size * size\n return f\"Area = {area}\"\n size -= 1", "def find_local_maximum(y, x, img, last_max, max_y, max_x):\n\n if x == 147 and y == 156:\n cv.imshow(img)\n cv.waitKey(0)\n\n last_max = img[y][x]\n max_y = y\n max_x = x\n\n # * * *\n # * x *\n # * * *\n for spaceing in range(1, 100, 1):\n\n treshhold_area = True\n max_has_changed = True\n while max_has_changed:\n max_has_changed = False\n for tmp_y in range(max_y-spaceing, max_y + 2*spaceing + 1, 1):\n # check vertical lines of pixels\n # out of bounds\n if tmp_y < 0 or tmp_y >= img.shape[0] or max_x-spaceing < 0 or max_x+spaceing >= img.shape[1]:\n continue\n\n if img[tmp_y][max_x-spaceing] != 0:\n treshhold_area = False\n\n if img[tmp_y][max_x-spaceing] > last_max:\n last_max = img[tmp_y][max_x-spaceing]\n max_y = tmp_y\n max_x = max_x-spaceing\n max_has_changed = True\n break\n else:\n img[tmp_y][max_x-spaceing] = 0\n\n if img[tmp_y][max_x+spaceing] != 0:\n treshhold_area = False\n\n if img[tmp_y][max_x+spaceing] > last_max:\n last_max = img[tmp_y][max_x+spaceing]\n max_y = tmp_y\n max_x = max_x+spaceing\n max_has_changed = True\n break\n else:\n img[tmp_y][max_x+spaceing] = 0\n\n for tmp_x in range(max_x-spaceing, max_x+2*spaceing + 1, 1):\n # check horizontal lines of pixels\n if tmp_x < 0 or tmp_x >= img.shape[1] or max_y-spaceing < 0 or max_y+spaceing >= img.shape[0]:\n continue\n\n if img[max_y-spaceing][tmp_x] != 0:\n treshhold_area = False\n\n if img[max_y-spaceing][tmp_x] > last_max:\n last_max = img[max_y-spaceing][tmp_x]\n max_y = max_y-spaceing\n max_x = tmp_x\n max_has_changed = True\n break\n else:\n img[max_y-spaceing][tmp_x] = 0\n\n if img[max_y+spaceing][tmp_x] != 0:\n treshhold_area = False\n\n if img[max_y+spaceing][tmp_x] > last_max:\n last_max = img[max_y+spaceing][tmp_x]\n max_y = max_y+spaceing\n max_x = tmp_x\n max_has_changed = True\n break\n else:\n img[max_y+spaceing][tmp_x] = 0\n\n if treshhold_area:\n break\n\n return max_y, max_x, last_max", "def get_max_cells(y):\n max_cells = 0\n for frame in range(y.shape[0]):\n cells = np.unique(y[frame])\n n_cells = cells[cells != 0].shape[0]\n if n_cells > max_cells:\n max_cells = n_cells\n return max_cells", "def cut_matrix(mat, no_edge=0):\n def cutter(mat, righter, downer):\n \"\"\" given the matrix and the two outer surrounding coordinates of the top left corner of a submatrix\n\n righter and downer traces the outer contour of a submatrix and verifies where it ends.\n righter goes right (increment in column index) when the value to its right is not an edge, else goes downwards.\n downer goes downside (increment in row index) when the value beneath it is not an edge, else goes right.\n righter and downer cuts a sub-matrix if they are in a diagonal position, corner touch corner.\n\n Args:\n mat (list of list of equal length): the cost matrix\n righter (tuple of two int): coordinate of righter\n downer (tuple of two int): coordinate of downer\n\n Returns:\n cut (tuple of two int): the row and column index the cuts (outer border) the sub-matrix beginning from the point\n surrounded by the input righter and downer\n \"\"\"\n righter_copy = righter # save the initial righter\n righter_set = set() # the righter position ever visited\n cut = (len(mat), len(mat[0])) # the default return value, if not cut, righter downer matches there, outside matrix\n # trace the righter first, to the very end\n # the stop condition is when the column index reaches the column number of the matrix + 1\n while righter[1] <= len(mat[0]):\n righter_set.add(righter)\n # righter can move right if it is already out side the matrix or next value is not an edge\n if righter[0] == len(mat) or (righter[1]+1 < len(mat[0]) and mat[righter[0]][righter[1]+1] == no_edge):\n righter = (righter[0], righter[1]+1)\n else:\n # otherwise move downwards\n righter = (righter[0]+1, righter[1])\n righter_set.remove(righter_copy) # remove the initial righter so that it won't match up with first downer\n # then move the downer, the stop condition its row index is matrix row number + 1\n while downer[0] <= len(mat):\n # in general initial downer always matches with the initial righter, why removing the initial righter\n if (downer[0]+1, downer[1]-1) in righter_set: # test diagonal position\n cut = (downer[0]+1, downer[1])\n break\n # can move down if already outside matrix or next value is not an edge\n if downer[1] == len(mat[0]) or (downer[0] + 1 < len(mat) and mat[downer[0]+1][downer[1]] == no_edge):\n downer = (downer[0]+1, downer[1])\n else: # other wise move right\n downer = (downer[0], downer[1]+1)\n # if not cut righter surely contain (len(mat), len(mat[0]-1))\n # then downer matches righter at (len(mat)-1, len(mat[0]))\n # which makes the default cut that is (len(mat), len(mat[0]))\n return cut\n\n # the crossing point (inclusive) of line_start and column start is the top left corner of sub-matrix\n line_start = 0\n column_start = 0\n res = [] # row and column index range for each submatrix\n while line_start < len(mat) and column_start < len(mat[0]):\n\n righter = None\n downer = None\n for i in range(line_start, len(mat)):\n # the righter is the position left to the top element in the first non-empty column\n row = mat[i]\n if any([v != no_edge for v in row]):\n downer = (i-1, [j for j, v in enumerate(row) if v != no_edge][0])\n break\n\n for i in range(column_start, len(mat[0])):\n # the downer is the position upper to the first element in the first non-empty row\n column = [row[i] for row in mat]\n if any([v != no_edge for v in column]):\n\n righter = ([j for j, v in enumerate(column) if v != no_edge][0], i-1)\n break\n # if can not be found means from line_start, column_start, there is no edge left\n if righter is None or downer is None:\n break\n\n line_start, column_start = cutter(mat, righter, downer) # update starting point with the last cut\n # ((row index range), (column index range))\n res.append(((min(righter[0], downer[0]+1), line_start), (min(righter[1]+1, downer[1]), column_start)))\n\n return res", "def smart_argmax(vector,w=5):\n mx = vector.max()\n mxmask = vector == mx\n n_mx = mxmask.sum()\n sargmax = np.nan # A bad invalid is returned if somehow it's not assigned.\n center_index = int(np.round(vector.shape[0] / 2))\n if n_mx == 1:\n #print('No need for smart_argmax()')\n return vector.argmax()\n hw = int(w/2)\n \n oords = np.arange(0,vector.shape[0]) \n locs = oords[mxmask]\n # 1. If maxes are touching (adjacent),\n # - If only 2, pick the one closest to center\n # - If > 2, pick the one in the middle\n d = np.diff(locs)\n if (d == 1).sum() != 0: # A difference of 1 implies consecutive integers\n # --- Save non-consecutive peaks for analysis\n singular_peak_locs = []\n if d[0] != 1: singular_peak_locs.append(locs[0])\n for s in range(1,d.shape[0]):\n if ((d[s] > 1) and (d[s-1] != 1)): singular_peak_locs.append(locs[s])\n if d[-1] != 1: singular_peak_locs.append(locs[-1])\n # --- \n if np.unique(d).shape[0] > 1: # Check to make sure all d's != 1.\n ic,nc,bn = delineate_consecutive_binned_vector_values(d,[0,1.1,2]) # bin 1 will be diff of 1\n else: # Entering this block means that this is only 1 consecutive span\n ic = np.array([0])\n nc = np.array([locs.shape[0]-1]) # minus 1 because +1 will be added back [5/22/23]\n bn = np.array([1])\n diff_bin_mask = bn == 1\n if ic is None: pdb.set_trace()\n ic = ic[diff_bin_mask]\n nc = nc[diff_bin_mask] + 1\n middle_points = []\n for j in range(0,ic.shape[0]):\n print(j)\n start_index = locs[ic[j]]\n end_index = start_index + nc[j]\n print(end_index,start_index,end_index-start_index)\n print(vector[start_index:end_index])\n print(oords[start_index:end_index])\n x = oords[start_index:end_index]\n print(x.shape)\n # Now find the middle or suitable point\n if x.shape[0] == 2:\n middle_index = np.abs(x - center_index).argmin()\n middle_index = x[middle_index]\n print('middle index: ',middle_index)\n print('Shape of 2 block')\n else:\n middle_index = int(np.median(x))\n print(middle_index)\n print('---------------------')\n middle_points.append(middle_index)\n locs = np.array( middle_points + singular_peak_locs, dtype=int ) # pared consecutives and singulars back into modified locs variable\n n_mx = locs.shape[0] # New # of maxes, with consecutives cut out\n \n reliefs = np.full(n_mx,999)\n for i,argmax in enumerate(locs):\n reliefs[i] = (mx - vector[argmax-hw:argmax+hw+1]).sum()\n \n # 1. Choose max with least topographic relief\n mn_relief = reliefs.min()\n relief_mask = reliefs == mn_relief\n n_mnrf = relief_mask.sum()\n if n_mnrf == 1:\n i = locs[relief_mask]\n sargmax_val = vector[i]\n sargmax = i\n else: # Should have to be > 1\n # 2. Maxes are tied for topographic relief,\n # choose the one closer to the center.\n imid = np.abs(locs[relief_mask] - center_index).argmin()\n i = locs[relief_mask][imid]\n sargmax_val = vector[i]\n sargmax = i\n #print('Standard argmax',vector.argmax(),vector[vector.argmax()])\n #print('Chosen argmax: ',sargmax,sargmax_val)\n if sargmax_val != vector.max(): pdb.set_trace()\n if sargmax_val != vector[sargmax]: pdb.set_trace()\n return int(sargmax)", "def medianClip(self,thr=3.0,medfiltersize=5,minaxislength=5,minSegment=50):\n sg = self.sg/np.max(self.sg)\n\n # This next line gives an exact match to Lasseck, but screws up bitterns!\n #sg = sg[4:232, :]\n\n rowmedians = np.median(sg, axis=1)\n colmedians = np.median(sg, axis=0)\n\n clipped = np.zeros(np.shape(sg),dtype=int)\n for i in range(np.shape(sg)[0]):\n for j in range(np.shape(sg)[1]):\n if (sg[i, j] > thr * rowmedians[i]) and (sg[i, j] > thr * colmedians[j]):\n clipped[i, j] = 1\n\n # This is the stencil for the closing and dilation. It's a 5x5 diamond. Can also use a 3x3 diamond\n diamond = np.zeros((5,5),dtype=int)\n diamond[2,:] = 1\n diamond[:,2] = 1\n diamond[1,1] = diamond[1,3] = diamond[3,1] = diamond[3,3] = 1\n #diamond[2, 1:4] = 1\n #diamond[1:4, 2] = 1\n\n import scipy.ndimage as spi\n clipped = spi.binary_closing(clipped,structure=diamond).astype(int)\n clipped = spi.binary_dilation(clipped,structure=diamond).astype(int)\n clipped = spi.median_filter(clipped,size=medfiltersize)\n clipped = spi.binary_fill_holes(clipped)\n\n import skimage.measure as skm\n blobs = skm.regionprops(skm.label(clipped.astype(int)))\n\n # Delete blobs that are too small\n todelete = []\n for i in blobs:\n if i.filled_area < minSegment or i.minor_axis_length < minaxislength:\n todelete.append(i)\n\n for i in todelete:\n blobs.remove(i)\n\n list = []\n\n # convert bounding box pixels to milliseconds:\n for l in blobs:\n list.append([float(l.bbox[0] * self.incr / self.fs),\n float(l.bbox[2] * self.incr / self.fs)])\n return list", "def non_max_suppression(image, direction):\n\n # get the height and width of the image\n height, width = image.shape[:2]\n\n # generate the output matrix of zeros\n output = np.zeros((height, width))\n\n # iterate through the rows and cols of the edge matrix and\n # compare to all neighboring pixels to determine if the value\n # will be preserved or suppressed, if not set in loop, will \n # be 0\n for row in xrange(1,height-1):\n for col in xrange(1,width-1):\n # get the direction value at the edge position\n theta = angle_buckets(direction[row, col])\n\n # check if 0 degree bucket\n if theta == 0:\n # for 0 degrees the point will be considered to be on the edge \n # if its gradient magnitude is greater than the magnitudes at pixels \n # in the east and west directions\n if (image[row,col] >= image[row, col-1]):\n if (image[row,col] >= image[row, col+1]):\n output[row,col] = image[row,col]\n \n # check if 90 degree bucket\n elif theta == 90:\n # for 90 degrees the point will be considered to be on the edge if its \n # gradient magnitude is greater than the magnitudes at pixels in the \n # north and south directions\n if (image[row,col] >= image[row-1, col]):\n if (image[row,col] >= image[row+1, col]):\n output[row,col] = image[row,col]\n\n # check if 135 degree bucket \n elif theta == 135:\n # for 135 degrees the point will be considered to be on the edge if its \n # gradient magnitude is greater than the magnitudes at pixels in the \n # north west and south-east directions\n if (image[row,col] >= image[row-1, col-1]):\n if (image[row,col] >= image[row+1, col+1]):\n output[row,col] = image[row,col]\n\n # check if 45 degree bucket \n elif theta == 45:\n # for 45 degrees the point will be considered to be on the edge if its \n # gradient magnitude is greater than the magnitudes at pixels in the \n # north east and south west directions\n if (image[row,col] >= image[row-1, col+1]):\n if (image[row,col] >= image[row+1, col-1]):\n output[row,col] = image[row,col]\n \n # write the output to file\n out = OUT_FOLDER+\"/suppressed.jpg\"\n cv2.imwrite(out, output)\n\n # return the edge matrix\n return output", "def listOfRowIndexesContainingAOneInTheLeftmostColumnWithTheFewestOnes(self, matrix):\n toReturn = []\n sumOfEachColumnList = []\n minSum = 9999999999 #ugh shitty max sum...\n indexOfColumnWithLeftmostMinSum = -1\n for i in range(0, len(matrix[0])):\n eachColumn = self.getColumn(matrix, i)\n sumOfEachColumn = sum(eachColumn[1:])\n if sumOfEachColumn < minSum:\n minSum = sumOfEachColumn\n sumOfEachColumnList.append(sumOfEachColumn)\n for index, eachSum in enumerate(sumOfEachColumnList):\n if eachSum == minSum:\n indexOfColumnWithLeftmostMinSum = index\n break\n for rowIndex, eachRow in enumerate(matrix):\n if rowIndex > 0:\n if eachRow[indexOfColumnWithLeftmostMinSum] == 1:\n toReturn.append(rowIndex)\n return toReturn", "def find_max_ins_rect(data):\n\n # Resize the image mask(This is to speed up the calculations of largest inscribed rectangle)\n data, r_scale = resize_image(data, 125)\n\n # Extract a channel from the input image(RGB will be the same since image is binarized)\n nrows, ncols = data.shape[0], data.shape[1]\n w = np.zeros(dtype=int, shape=data.shape)\n h = np.zeros(dtype=int, shape=data.shape)\n skip = 0\n area_max = (0, [])\n\n # Iterate through each pixel and mark on w and h\n for r in range(nrows):\n for c in range(ncols):\n if data[r][c] == skip:\n continue\n if r == 255:\n h[r][c] = 1\n else:\n h[r][c] = h[r - 1][c] + 1\n if c == 255:\n w[r][c] = 1\n else:\n w[r][c] = w[r][c - 1] + 1\n minw = w[r][c]\n # Calculate the largest area and compare with the largest area stored\n for dh in range(h[r][c]):\n minw = min(minw, w[r - dh][c])\n area = (dh + 1) * minw\n if area > area_max[0]:\n area_max = (area, [c - minw + 1, r - dh, c, r])\n\n # Coordinates of largest inscribed rectangle. List has the following order [x, y, w, h]\n rect_coor = area_max[1]\n\n del data, h, w\n\n # With the calculated scale, rescale the coordinates to obtain coordinates of the original image.\n return rescale_coordinates(rect_coor, r_scale)", "def get_mask_M3(image):\n\n # Tunning parameters. We can put this as input to the function as well\n CANNY_THRESH_1 = 30\n CANNY_THRESH_2 = 130\n\n # load the input image\n image = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n blurred = cv.GaussianBlur(image, (5, 5), 0)\n\n # obtain the edges of the image\n edges = cv.Canny(blurred, CANNY_THRESH_1, CANNY_THRESH_2)\n edges = cv.dilate(edges, None)\n edges = cv.erode(edges, None)\n\n # find contours in the edged image\n _,cnts,_ = cv.findContours(edges.copy(), cv.RETR_LIST,\n cv.CHAIN_APPROX_NONE)\n cnts = imutils.grab_contours(cnts)\n\n # sort from biggest area to smallest and take the top5\n cnts = sorted(cnts, key = cv.contourArea, reverse = True)[:5]\n\n\n mask = np.zeros(edges.shape)\n cmax, max_extent=[],0\n # loop over the contours from bigger to smaller, and find the biggest one with the right orientation\n for c in cnts:\n # # approximate to the hull.\n hull = cv.convexHull(c)\n\n # find the contour with the highest extent compared to the bounding rectangle\n area = cv.contourArea(hull)\n x,y,w,h = cv.boundingRect(c)\n rect_area = w*h\n extent = float(area)/rect_area\n\n # get the contour with max extent (area covered, approximation area)\n if max_extent<extent:\n max_extent=extent\n cmax=hull\n\n cv.fillConvexPoly(mask, cmax, (255)) # fill the mask\n\n return mask", "def balanced_hist_thresholding(b):#source: https://theailearner.com/tag/image-thresholding/\n i_s = np.min(np.where(b[0]>0))\n i_e = np.max(np.where(b[0]>0))\n i_m = (i_s + i_e)//2\n w_l = np.sum(b[0][0:i_m+1])\n w_r = np.sum(b[0][i_m+1:i_e+1])\n while (i_s != i_e):\n if (w_r > w_l):\n w_r -= b[0][i_e]\n i_e -= 1\n if ((i_s+i_e)//2) < i_m:\n w_l -= b[0][i_m]\n w_r += b[0][i_m]\n i_m -= 1\n else:\n w_l -= b[0][i_s]\n i_s += 1\n if ((i_s+i_e)//2) >= i_m:\n w_l += b[0][i_m+1]\n w_r -= b[0][i_m+1]\n i_m += 1\n return i_m", "def nonmax_suppress_1d(arr, winsize=5):\n _arr = arr.copy()\n\n for i in range(_arr.size):\n if i == 0:\n left_neighborhood = 0\n else:\n left_neighborhood = arr[max(0,i-winsize):i]\n if i >= _arr.size-2:\n right_neighborhood = 0\n else:\n right_neighborhood = arr[i+1:min(arr.size-1,i+winsize)]\n\n if arr[i] < np.max(left_neighborhood) or arr[i] <= np.max(right_neighborhood):\n _arr[i] = 0\n return _arr", "def max_connected_cells(matrix):\n max_region = 0\n for row in range(len(matrix)):\n for col in range(len(matrix[0])):\n if matrix[row][col]:\n count = count_cells_in_region(matrix, row, col)\n if count > max_region:\n max_region = count\n return max_region", "def linear_feature_filter(a):\n\n edge_length = int(len(a) ** 0.5)\n\n b = a.reshape((edge_length, edge_length))\n\n center = int((edge_length - 1) / 2)\n center_val = b[center, center]\n\n if center_val <= 0: # if the value is 0 we can just move on. If it's less than 0 (should not happen in a dhm) then repair it\n return 0\n\n b = b > 0 # cast to Bools. If DHM is over 0, True\n\n # data = np.array([[x, y, b[x, y]] for x, y in coords if not np.isnan(b[x, y])])\n\n # measure.profile_line\n # coords = itertools.product(range(edge_length), range(edge_length)) # all coordinates in the matrix\n start_coords = list(itertools.product([0], range(edge_length)))\n start_coords.extend(list(itertools.product(range(1, edge_length - 1), [edge_length - 1])))\n end_coords = [(edge_length - 1 - a, edge_length - 1 - b) for a, b in start_coords]\n\n n_filled = b.sum() # total number of nonzero DHM cells\n\n i = 0\n for start, end in zip(start_coords, end_coords):\n i += 1\n intercepted = measure.profile_line(b, start, end, linewidth=1)\n n_intercepted = intercepted.sum()\n\n\n frac_filled_on_line = (n_intercepted / len(intercepted))\n frac_filled_but_not_on_line = (n_filled - n_intercepted) / edge_length ** 2\n\n # second part of conditional: are there a lot of points aligned linearly?\n # first part of conditional: are there not a lot of surrounding pixels?\n # if both are true, the feature is probably a powerline or building edge\n if frac_filled_but_not_on_line < 40/81 and frac_filled_on_line >= (3.5 / 9):\n print(f'FILTERING PT. N on line: {n_intercepted} out of {len(intercepted)}. {start}, {end}')\n print(f'Checked {i} lines, value squashed')\n return 0\n\n #print(f'Checked {i} lines, value passed')\n return center_val", "def fixMasks(image, table_mask, column_mask):\r\n table_mask = table_mask.reshape(1024,1024).astype(np.uint8)\r\n column_mask = column_mask.reshape(1024,1024).astype(np.uint8)\r\n \r\n #get contours of the mask to get number of tables\r\n contours, table_heirarchy = cv2.findContours(table_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\r\n \r\n table_contours = []\r\n #ref: https://www.pyimagesearch.com/2015/02/09/removing-contours-image-using-python-opencv/\r\n #remove bad contours\r\n\r\n #print(contours)\r\n\r\n for c in contours:\r\n # if the contour is bad, draw it on the mask\r\n\r\n\r\n #if not is_contour_bad(c):\r\n if cv2.contourArea(c) > 2000:\r\n table_contours.append(c)\r\n \r\n if len(table_contours) == 0:\r\n return None\r\n\r\n #ref : https://docs.opencv.org/4.5.2/da/d0c/tutorial_bounding_rects_circles.html\r\n #get bounding box for the contour\r\n \r\n table_boundRect = [None]*len(table_contours)\r\n for i, c in enumerate(table_contours):\r\n polygon = cv2.approxPolyDP(c, 3, True)\r\n table_boundRect[i] = cv2.boundingRect(polygon)\r\n \r\n #table bounding Box\r\n table_boundRect.sort()\r\n \r\n col_boundRects = []\r\n for x,y,w,h in table_boundRect:\r\n \r\n col_mask_crop = column_mask[y:y+h,x:x+w]\r\n \r\n #get contours of the mask to get number of tables\r\n contours, col_heirarchy = cv2.findContours(col_mask_crop, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\r\n #get bounding box for the contour\r\n boundRect = [None]*len(contours)\r\n for i, c in enumerate(contours):\r\n polygon = cv2.approxPolyDP(c, 3, True)\r\n boundRect[i] = cv2.boundingRect(polygon)\r\n \r\n #adjusting columns as per table coordinates\r\n boundRect[i] = (boundRect[i][0] + x ,\r\n boundRect[i][1] + y ,\r\n boundRect[i][2],\r\n boundRect[i][3])\r\n \r\n col_boundRects.append(boundRect)\r\n \r\n image = image[...,0].reshape(1024, 1024).astype(np.uint8)\r\n \r\n #draw bounding boxes\r\n color = (0,255,0)\r\n thickness = 4\r\n \r\n for x,y,w,h in table_boundRect:\r\n image = cv2.rectangle(image, (x,y),(x+w,y+h), color, thickness)\r\n \r\n return image, table_boundRect, col_boundRects" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This method fetches the genome sequence based on genomeid from PATRIC
def getGenomeSequence(genomeId): r = urllib.urlopen(PatricURL+genomeId+'/'+genomeId+'.fna').read() soup = BeautifulSoup(r) #print type(soup) genomeSequence = soup.prettify().split('| '+genomeId+']')[1] return genomeSequence.replace('\n', '')
[ "def get_sequence(seq_id, anno_db, genome, seq_type='CDS', exon_split='', flank_len=0):\n def get_sequence_from_genome_by_anno_db(df, genome):\n tmp_seq = genome[df['seq_name']]\n tmp_seq_start = df['seq_start']-1\n tmp_seq_end = df['seq_end']\n df['seq'] = tmp_seq[tmp_seq_start:tmp_seq_end]\n return df\n gene_db = anno_db.query(f'seq_type == \"{seq_type}\" and transcript_id == \"{seq_id}\"')\n gene_db = gene_db.sort_values(by='seq_start')\n gene_db = gene_db.apply(get_sequence_from_genome_by_anno_db, axis=1, genome=genome)\n seq_name = gene_db['seq_name'].unique()\n seq_strand = gene_db['seq_strand'].unique()\n if len(seq_name) > 1 or len(seq_strand) > 1:\n raise TypeError('Different seq_name or seq_strand for the same gene!')\n else:\n seq_name = seq_name[0]\n seq_strand = seq_strand[0]\n if isinstance(exon_split, str):\n cds_seq = exon_split.join(gene_db['seq'])\n else:\n raise TypeError('Input exon_split should be a string but %s' % type(exon_split))\n if flank_len:\n gene_start = gene_db.iloc[0]['seq_start']\n gene_end = gene_db.iloc[-1]['seq_end']\n tmp_seq = genome[seq_name]\n flank5 = tmp_seq[(gene_start-flank_len-1):(gene_start-1)].lower()\n flank3 = tmp_seq[gene_end:(gene_end+flank_len)].lower()\n else:\n flank5 = ''\n flank3 = ''\n cds_seq = flank5 + cds_seq + flank3\n if np.all(gene_db['seq_strand'] == '-'):\n cds_seq = reverse_seq(cds_seq)\n elif np.all(gene_db['seq_strand'] == '+'):\n pass\n else:\n raise ValueError('Different strand in the elements of %s' % seq_id)\n return cds_seq", "def getseqbyid(conn, seqid):\n return strquery(conn, \"select sequence from sequences where \" \\\n + \"idsequences=%s\", (seqid,))", "def GetSequence(self):\n return self.genome", "def target_seq(genome, query, filetype='fasta'):\n\n print('finding target sequences')\n\n if arguments().sp == 'cg':\n features_file = cg_features_file\n elif arguments().sp == 'sc':\n features_file = sc_features_file\n elif arguments().sp == 'ca':\n features_file = ca_features_file\n elif arguments().sp == 'sp':\n features_file = sp_features_file\n else:\n raise ValueError('Unknown species flag specified')\n\n chromes = list(pd.read_csv(features_file)['chrom'].unique())\n if arguments().sp == 'ca':\n chromes = [str(chrom)[:9] for chrom in chromes if str(chrom)[8] == 'A']\n if arguments().sp == 'sp':\n chromes = ['I', 'II', 'III']\n\n chroms_locs = pd.DataFrame(np.nan, index=range(250000), columns=chromes)\n count = 0\n total_length = 0\n i = 0\n for record in SeqIO.parse(genome, filetype):\n if arguments().sp == 'sp':\n if record.id in chromes:\n x = nt_search(str(record.seq), query)\n count += len(x[1:])\n total_length += len(record.seq)\n chroms_locs[chromes[i]] = pd.Series(x[1:])\n i += 1\n else:\n pass\n else:\n x = nt_search(str(record.seq), query)\n count += len(x[1:])\n total_length += len(record.seq)\n chroms_locs[chromes[i]] = pd.Series(x[1:])\n i += 1\n\n chroms_locs = chroms_locs.dropna(how='all')\n\n if arguments().sp == 'cg':\n chroms_locs.to_csv(dependencies_dir + cg_hermes_on_chr)\n elif arguments().sp == 'sc':\n chroms_locs.to_csv(dependencies_dir + sc_hermes_on_chr)\n elif arguments().sp == 'ca':\n chroms_locs.to_csv(dependencies_dir + ca_hermes_on_chr)\n elif arguments().sp == 'sp':\n chroms_locs.to_csv(dependencies_dir + sp_hermes_on_chr)\n else:\n raise ValueError('Unknown species flag specified')\n\n return chroms_locs", "def getannotationseqs(db,annotationid):\n\tDebug(1,'get annotationseqs for annotationid %d' % annotationid)\n\trdata={}\n\trdata['annotationid']=annotationid\n\tres=requests.get(db.dburl+'/annotations/get_sequences',json=rdata)\n\tif res.status_code!=200:\n\t\tDebug(6,'error getting sequences for annotation %d' % annotationid)\n\t\tDebug(6,res.content)\n\t\treturn []\n\tseqids=res.json()['seqids']\n\tDebug(2,'Found %d sequences for annotationid %d' % (len(seqids),annotationid))\n\treturn seqids", "def getseq_forid(self,seqfile,id):\t\n\t\tseq_dict={}\n\t\t\n\t\tfasta_sequences = SeqIO.parse(open(seqfile),'fasta')\n\t\t\n\t\tfor fasta in fasta_sequences:\n\t\t\tif fasta.id==id:\n\t\t\t\tseq_dict[fasta.id]=fasta.seq\n\t\t\t\tbreak\n\t\t\t\t\n\t\treturn(seq_dict)", "def test_get_sequence_seqrecord(self):\n my_seq = \"TCTAGA\" + 50 * \"A\" + \"CCGCGG\" + 30 * \"T\"\n genome = {\"chrA\": SeqRecord(Seq(my_seq))}\n\n my_revcomp = str(genome[\"chrA\"].reverse_complement().seq)\n\n iv1p = GenomicSegment(\"chrA\", 0, 6, \"+\")\n iv2p = GenomicSegment(\"chrA\", 56, 62, \"+\")\n iv3p = GenomicSegment(\"chrA\", 0, 92, \"+\")\n\n iv1m = GenomicSegment(\"chrA\", 0, 6, \"-\")\n iv2m = GenomicSegment(\"chrA\", 56, 62, \"-\")\n iv3m = GenomicSegment(\"chrA\", 0, 92, \"-\")\n\n ivc1p = self.test_class(iv1p, iv2p, ID=\"ivc1p\")\n ivc1m = self.test_class(iv1m, iv2m, ID=\"ivc1m\")\n\n self.assertEquals(ivc1p.get_sequence(genome), \"TCTAGACCGCGG\")\n self.assertEquals(ivc1p.get_fasta(genome), \">ivc1p\\nTCTAGACCGCGG\\n\")\n\n self.assertEquals(ivc1m.get_sequence(genome), \"CCGCGGTCTAGA\")\n self.assertEquals(ivc1m.get_fasta(genome), \">ivc1m\\nCCGCGGTCTAGA\\n\")\n\n ivc2p = self.test_class(iv3p, ID=\"ivc2p\")\n ivc2m = self.test_class(iv3m, ID=\"ivc2m\")\n\n self.assertEquals(ivc2p.get_sequence(genome), my_seq)\n self.assertEquals(ivc2p.get_fasta(genome), \">ivc2p\\n%s\\n\" % my_seq)\n\n self.assertEquals(ivc2m.get_sequence(genome), my_revcomp)\n self.assertEquals(ivc2m.get_fasta(genome), \">ivc2m\\n%s\\n\" % my_revcomp)", "def test_get_sequence_str(self):\n my_seq = \"TCTAGA\" + 50 * \"A\" + \"CCGCGG\" + 30 * \"T\"\n genome = {\"chrA\": my_seq}\n\n my_revcomp = str(SeqRecord(Seq(genome[\"chrA\"])).reverse_complement().seq)\n\n iv1p = GenomicSegment(\"chrA\", 0, 6, \"+\")\n iv2p = GenomicSegment(\"chrA\", 56, 62, \"+\")\n iv3p = GenomicSegment(\"chrA\", 0, 92, \"+\")\n\n iv1m = GenomicSegment(\"chrA\", 0, 6, \"-\")\n iv2m = GenomicSegment(\"chrA\", 56, 62, \"-\")\n iv3m = GenomicSegment(\"chrA\", 0, 92, \"-\")\n\n ivc1p = self.test_class(iv1p, iv2p, ID=\"ivc1p\")\n ivc1m = self.test_class(iv1m, iv2m, ID=\"ivc1m\")\n\n self.assertEquals(ivc1p.get_sequence(genome), \"TCTAGACCGCGG\")\n self.assertEquals(ivc1p.get_fasta(genome), \">ivc1p\\nTCTAGACCGCGG\\n\")\n\n self.assertEquals(ivc1m.get_sequence(genome), \"CCGCGGTCTAGA\")\n self.assertEquals(ivc1m.get_fasta(genome), \">ivc1m\\nCCGCGGTCTAGA\\n\")\n\n ivc2p = self.test_class(iv3p, ID=\"ivc2p\")\n ivc2m = self.test_class(iv3m, ID=\"ivc2m\")\n\n self.assertEquals(ivc2p.get_sequence(genome), my_seq)\n self.assertEquals(ivc2p.get_fasta(genome), \">ivc2p\\n%s\\n\" % my_seq)\n\n self.assertEquals(ivc2m.get_sequence(genome), my_revcomp)\n self.assertEquals(ivc2m.get_fasta(genome), \">ivc2m\\n%s\\n\" % my_revcomp)", "def getsequences(conn, fingerprint):\n return dictlistquery(conn, \"select idsequences,sequence from sequences \" \\\n + \"where fingerprint=%s\", (fingerprint,))", "def retrieve_refseq_identifiers_for_transcript(gencode_id):\n result = {}\n result['NP'] = []\n result['NM'] = []\n result['NR'] = []\n with open(GENCODE_REFSEQ_FILE) as gencode_refseq:\n # read the lines in the file\n lines = gencode_refseq.readlines()\n for line in lines:\n # check if the unique identifier is on the current line\n if gencode_id in line:\n #Add the result to hits\n tokens = line.split('\\t')\n \n # Only add the translation to the translation list if the gene_name exactly matches the one we are looking for\n if gencode_id == tokens[0]:\n # add the results\n for token in tokens[1:]:\n token = token.strip()\n if token.startswith('NP'):\n result['NP'].append(token)\n elif token.startswith('NM'):\n result['NM'].append(token)\n elif token.startswith('NR'):\n result['NR'].append(token)\n elif len(token) == 0:\n continue\n else:\n _log.warning(\"When retrieving matching RefSeq ids for \"+gencode_id+\" unexpected token: \"+token) \n return result", "def fetch_genome_data(genome):\n with open(genome_file_path(genome), \"rb\") as f:\n return Entrez.read(f)", "async def fetch(\n session: aiohttp.ClientSession, accession: Union[int, str]\n) -> Optional[dict]:\n params = {\n \"db\": \"nuccore\",\n \"email\": EMAIL,\n \"id\": accession,\n \"retmode\": \"text\",\n \"rettype\": \"gb\",\n \"tool\": TOOL,\n }\n\n async with session.get(FETCH_URL, params=params) as resp:\n body = await resp.text()\n\n if resp.status != 200:\n if \"Failed to retrieve sequence\" not in body:\n logger.warning(\"Unexpected Genbank error: %s\", body)\n\n return None\n\n gb = Bio.SeqIO.read(io.StringIO(body), \"gb\")\n\n data = {\n \"accession\": gb.id,\n \"definition\": gb.description,\n \"sequence\": str(gb.seq),\n \"host\": \"\",\n }\n\n for feature in gb.features:\n if feature.type == \"source\":\n try:\n data[\"host\"] = feature.qualifiers[\"host\"][0]\n except (IndexError, KeyError):\n data[\"host\"] = \"\"\n\n return data", "def _get_protein_codon_DNA_seq(self,protein_name,codon_number):\n\t\tif (protein_name is None) or (protein_name not in self.reference_protein_locations):\n\t\t\treturn None\n\t\tprot_start_pos = self.reference_protein_locations[protein_name][0]\n\t\tcodon_start_pos = prot_start_pos + 3*(codon_number - 1)\n\t\tcodon_end_pos = codon_start_pos + 2\n\t\treturn self.reference.seq[codon_start_pos - 1 : codon_end_pos].upper()", "def get_chrom(data, tx_id):\n # get transcript obj\n tx = data.transcript_by_id(tx_id)\n #return chrom\n return tx.contig", "async def fetch(settings, session, accession):\n params = {\n \"db\": \"nuccore\",\n \"email\": EMAIL,\n \"id\": accession,\n \"retmode\": \"text\",\n \"rettype\": \"gb\",\n \"tool\": TOOL\n }\n\n async with virtool.http.proxy.ProxyRequest(settings, session.get, FETCH_URL, params=params) as resp:\n\n body = await resp.text()\n\n if resp.status != 200:\n if \"Failed to retrieve sequence\" not in body:\n logger.warning(f\"Unexpected Genbank error: {body}\")\n\n return None\n\n gb = Bio.SeqIO.read(io.StringIO(body), \"gb\")\n\n data = {\n \"accession\": gb.id,\n \"definition\": gb.description,\n \"sequence\": str(gb.seq),\n \"host\": \"\"\n }\n\n for feature in gb.features:\n if feature.type == \"source\":\n try:\n data[\"host\"] = feature.qualifiers[\"host\"][0]\n except (IndexError, KeyError):\n data[\"host\"] = \"\"\n\n return data", "def get_seq(filepath):\n seqrecord = SeqIO.read(filepath, \"genbank\")\n return seqrecord.seq", "def getSequence(self, loc=None, **kargs):\n\n # This is old and ugly code.\n # But it works and has been pretty extensively tested and is 'fast enough'.\n # So don't go messing with it unless you have a very good reason.\n\n valid_args = [\"coords\", \"strand\", \"mask\"]\n for key in kargs:\n assert key in valid_args, \"getSequence() - Argument '%s' is not recognised\" % key\n\n assert loc or \"coords\" in kargs, \"No valid coords or loc specified\"\n assert self.bHasBoundSequence, \"No Available genome FASTA files\"\n\n if \"coords\" in kargs:\n loc = kargs[\"coords\"]\n\n try:\n loc = location(loc=loc)\n except Exception:\n pass\n\n assert isinstance(loc, location), \"'loc' must be a proper genome location\"\n\n left = loc[\"left\"]\n right = loc[\"right\"]\n chrom = loc[\"chr\"]\n\n if chrom not in self.seq:\n config.log.warning(\"'%s' not found\" % chrom)\n return None\n\n seekloc = (left + (left // self.seq_data[chrom][\"linelength\"]))-1 # the division by 50 is due to the presence of newlines every 50 characters.\n #print chrom, self.seq[chrom], seekloc, self.seq_data[chrom][\"offset\"], loc\n self.seq[chrom].seek(seekloc+self.seq_data[chrom][\"offset\"]) # move to the start location.\n\n delta = (right - left)+1\n\n # count the number of line endings.\n # get a niave reading.\n bonus = 0\n ret = \"\"\n while len(ret) < delta:\n self.seq[chrom].seek(seekloc+self.seq_data[chrom][\"offset\"])\n ret = self.seq[chrom].read(delta + (delta // self.seq_data[chrom][\"linelength\"]) + bonus).replace(\"\\n\", \"\").replace(\"\\r\", \"\")\n bonus += 1\n if bonus > delta: # breaks in case you send a loc that is beyond the end of the file.\n break\n\n if \"strand\" in kargs and kargs[\"strand\"] in negative_strand_labels:\n ret = utils.rc(ret)\n\n if \"mask\" in kargs and kargs[\"mask\"]:\n ret = utils.repeat_mask(ret)\n\n return ret", "def getSequences(self, genelist=None, loc_key='loc', replace_loc_key=True, strand_key=False,\n mask=False, pointify=False, delta=False, **kargs):\n raise NotImplementedError", "def test_ncbi_genome():\n tmp = mkdtemp()\n genomepy.install_genome(\"Release 6 plus ISO1 MT\", \"NCBI\", genome_dir=tmp)\n g = genomepy.Genome(\"Release_6_plus_ISO1_MT\", genome_dir=tmp)\n seq = g[\"3L\"][10637840:10637875]\n assert str(seq).upper() == \"TTTGCAACAGCTGCCGCAGTGTGACCGTTGTACTG\"\n shutil.rmtree(tmp)", "def get_sequence(chrom, start, end, range):\n # print(start)\n # print(end)\n # start = int(start) - range \n # end = int(end) + range\n # print(start)\n # print(end)\n\n # command to get the region from the two bit file from fasta\n cmd = [\"/ye/zaitlenlabstore/christacaggiano/twoBit/twoBitToFa\", \"/ye/zaitlenlabstore/christacaggiano/twoBit/hg38.2bit\",\n \"stdout\", \"-seq=\" + chrom, \"-start=\" + str(start), \"-end=\" + str(end)]\n\n # call command and get output\n result = subprocess.check_output(cmd)\n\n return result.decode().upper()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This method gets the features for a particular genomeId frfom PATRIC
def getFeaturesForGenome(genomeId, CDS_ONLY): data_table = pd.read_table(PatricURL +genomeId+'/'+genomeId+'.PATRIC.features.tab') print data_table.shape if CDS_ONLY: return data_table[(data_table.feature_type == 'CDS')] else: return data_table
[ "def get_features(self, ctx, ref, feature_id_list):\n # ctx is the context object\n # return variables are: returnVal\n #BEGIN get_features\n ga = GenomeAnnotationAPI_local(self.services, ctx['token'], ref)\n returnVal = ga.get_features(feature_id_list)\n #END get_features\n\n # At some point might do deeper type checking...\n if not isinstance(returnVal, dict):\n raise ValueError('Method get_features return value ' +\n 'returnVal is not type dict as required.')\n # return the results\n return [returnVal]", "def get_features2(self, ctx, params):\n # ctx is the context object\n # return variables are: returnVal\n #BEGIN get_features2\n\n if 'ref' not in params:\n raise ValueError('ref field in parameters object is required')\n\n feature_id_list = None\n if 'feature_id_list' in params:\n feature_id_list = params['feature_id_list']\n\n exclude_sequence = False\n if 'exclude_sequence' in params:\n if params['exclude_sequence'] == 1:\n exclude_sequence = True\n elif params['exclude_sequence'] != 0:\n raise ValueError('exclude_sequence field in parameters object must be set to either 1 or 0')\n\n ga = GenomeAnnotationAPI_local(self.services, ctx['token'], params['ref'])\n returnVal = ga.get_features(\n feature_id_list=feature_id_list,\n exclude_sequence=exclude_sequence)\n\n #END get_features2\n\n # At some point might do deeper type checking...\n if not isinstance(returnVal, dict):\n raise ValueError('Method get_features2 return value ' +\n 'returnVal is not type dict as required.')\n # return the results\n return [returnVal]", "def getfeatures(gdf):\n return [json.loads(gdf.to_json())['features'][0]['geometry']]", "def cli(ctx, feature_id):\n return ctx.gi.feature.get_feature_cvterms(feature_id)", "def get_feature_functions(self, ctx, ref, feature_id_list):\n # ctx is the context object\n # return variables are: returnVal\n #BEGIN get_feature_functions\n ga = GenomeAnnotationAPI_local(self.services, ctx['token'], ref)\n returnVal = ga.get_feature_functions(feature_id_list)\n #END get_feature_functions\n\n # At some point might do deeper type checking...\n if not isinstance(returnVal, dict):\n raise ValueError('Method get_feature_functions return value ' +\n 'returnVal is not type dict as required.')\n # return the results\n return [returnVal]", "def get_features(gdf):\n import json\n return [json.loads(gdf.to_json())['features'][b]['geometry'] for b in range(len(gdf))]", "def get_region_feature(self, species, region, features):\n self.endpoint = '/overlap/region/'\n url = self.server + self.endpoint + species + '/' + region + '?'\n\n for f in features:\n url = url + 'feature=' + f + ';'\n\n r = requests.get(url, headers={ \"Content-Type\" : \"text/x-gff3\"})\n\n if not r.ok:\n r.raise_for_status()\n sys.exit()\n return r.text", "def get(request,hash,db_name):\n db = models.Feature_Database.objects.get(name=db_name)\n sequence = models.Sequence.objects.get(db=db,hash=hash)\n\n if db.db_version != sequence.db_version:\n print 'feature list and database out of sync!'\n # feature out of date with database, re gather features\n hash = models.Giraffe_Mappable_Model.detect_features(sequence.sequence,db_name)\n\n res = []\n\n # get automated features\n\n if 'sc' in request.GET:\n features = []\n cutters = {}\n for f in sequence.sequence_feature_set.order_by(\"start\").select_related(\n 'feature_db_index',\n 'feature_db_index__feature',\n 'feature_db_index__feature__type',\n ):\n features.append(f)\n if f.feature.type_id == models.Feature_Type.ENZYME:\n if f.feature.name in cutters:\n cutters[f.feature.name] = cutters[f.feature.name]+1\n else:\n cutters[f.feature.name] = 1\n\n for f in features:\n if f.feature.type_id == models.Feature_Type.ENZYME:\n if cutters[f.feature.name] == 1:\n res.append(f.to_dict())\n else:\n res.append(f.to_dict())\n\n else:\n for f in sequence.sequence_feature_set.order_by(\"start\").select_related(\n 'feature_db_index',\n 'feature_db_index__feature',\n 'feature_db_index__feature__type',\n ):\n res.append(f.to_dict())\n\n # get annotated features\n\n for f in sequence.sequence_feature_annotated_set.order_by(\n \"start\"\n ).select_related('feature_type'):\n res.append(f.to_dict())\n\n # now sort everything by start\n\n res.sort(cmp=lambda x,y:cmp(int(x['start']),int(y['start'])))\n\n res = [len(sequence.sequence),res]\n\n if 'sequence' in request.GET:\n # also asked for sequence\n res.append(sequence.sequence)\n\n j = json.JSONEncoder().encode(res)\n\n if 'jsonp' in request.GET:\n j = request.GET['jsonp']+'('+j+')'\n http_res = HttpResponse(j,mimetype=\"text/javascript\",status=httplib.OK)\n\n else:\n # technically we should be returning \"application/json\", but\n # in that case browsers force user to download into a file,\n # and for debugging we want to be able to see the JSON list in\n # browser. looks like most browsers will handle JSON sent back\n # as text/html anyways.\n if request.is_ajax():\n http_res = HttpResponse(j,mimetype=\"application/json\",status=httplib.OK)\n else:\n http_res = HttpResponse(j,status=httplib.OK)\n\n # we tell browser to cache this; if the sequence change, the hash would\n # change. the only danger is if we re-blat the sequence, in that case the\n # features list cached by browser will be out of date. so client\n # should attach some kind of CGI string to invalidate cache.\n http_res['Cache-Control'] = 'max-age=2592000'\n return http_res", "def feature_list(self):\n return self.features.features()", "def test_get_features_by_project_id(self):\n project1 = self.project # owned by user running test\n project2 = ProjectFactory()\n\n feature1 = FeatureFactory(project=project1, feature_id='US101')\n FeatureFactory(project=project2, feature_id='US202')\n\n url = reverse('core:features:feature-list')\n response = self.client.get(\n '{}?project={}'.format(url, project1.id)\n )\n self.assertEquals(response.status_code, 200)\n features = json.loads(response.content)\n self.assertEquals(len(features), 1)\n self.assertEquals(features[0]['feature_id'], feature1.feature_id)", "def get_FS_cols(list_patients_objects, max_features=10, technique='correlation'):\n\n if technique == 'correlation':\n features = get_patients_correlated_score(\n list_patients_objects).index.to_series().tolist()\n\n if technique == 'top_features':\n features = get_top_features(list_patients_objects)\n\n return features[:max_features]", "def get_feature(self, f: Feature):\n try:\n return self.features[f.feature_id]\n except ValueError:\n print('Feature does not exist in the dataset.')", "def get_features(self, tool_id):\n # Validate inputs\n #tool_id = _get_id_or_err(tool_id, 'tool_id')\n\n # Get Objects\n tool = a_m.get_tool_by_id(self.ssn, tool_id)\n if tool is None:\n raise ex.NotExistsError('Tool', 'id', tool_id)\n\n return a_m.get_features_by_tool(self.ssn, tool)", "def get_features(track_id: str, sp: ...) -> ...: # TODO ***************\n features = sp.audio_features('spotify:track:' + track_id)\n return([features[0]['acousticness'], features[0]['danceability'], features[0]['energy'],\n features[0]['duration_ms'], features[0]['instrumentalness'], features[\n 0]['valence'], features[0]['tempo'], features[0]['liveness'],\n features[0]['loudness'], features[0]['speechiness'], features[0]['key']])", "def get_track_features(track_id, sp):\n\n feature_filter = ['danceability', 'energy', 'instrumentalness', 'loudness', 'speechiness', 'tempo', 'valence']\n return_features = []\n\n # Get features from this track.\n features = sp.audio_features([track_id])\n\n if None in features:\n return []\n\n # Add desired features of track.\n for feature in features[0]:\n if feature in feature_filter:\n return_features.append(features[0][feature])\n\n return return_features", "def gather_data(path_list, id_list):\n\n features = np.vstack([np.genfromtxt(path_list[sid]) for sid in id_list])\n\n return features", "def getGenesFromMaf(maffile):\n\n maf_head = pd.read_csv(gzip.open(maffile),sep='\\t',comment='#')\n ##get hugo_symbol, and case_id\n return maf_head[['Hugo_Symbol', 'case_id', 'HGVSc', 'One_Consequence', 'SIFT', 'PolyPhen']]", "def _get_dataset_features(did_cache_dir, dataset_id):\n features_file = os.path.join(did_cache_dir, \"features.xml\")\n\n # Dataset features aren't subject to change...\n try:\n with io.open(features_file, encoding='utf8') as fh:\n features_xml = fh.read()\n except (OSError, IOError):\n features_xml = _perform_api_call(\"data/features/%d\" % dataset_id)\n\n with io.open(features_file, \"w\", encoding='utf8') as fh:\n fh.write(features_xml)\n\n features = xmltodict.parse(features_xml, force_list=('oml:feature',))[\"oml:data_features\"]\n\n return features", "def proc_csv(f):\n df = pd.read_csv(f, index_col = 'PassengerId')\n features = pd.DataFrame(index=df.index)\n for col in df.columns:\n mapping = _MAPPINGS[col]\n features = features.join(mapping(df[col]))\n return features", "def highlevelfeaturesRetriever(self):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Split scope and key. The first scope will be split from key.
def split_scope_key(key): split_index = key.find('.') if split_index != -1: return key[:split_index], key[split_index + 1:] else: return None, key
[ "def split_scoped_hparams(scopes, merged_hparams):\n split_values = dict([(scope, dict()) for scope in scopes])\n merged_values = merged_hparams.values()\n for scoped_key, value in six.iteritems(merged_values):\n scope = scoped_key.split(\".\")[0]\n key = scoped_key[len(scope) + 1:]\n split_values[scope][key] = value\n\n return [\n tf.contrib.training.HParams(**split_values[scope]) for scope in scopes\n ]", "def _split_key(key):\n split = key.split(\"/\")\n head = split[0]\n tail = split[1:]\n return head, \"/\".join(tail)", "def __split_key(key: str) -> Tuple[str, str]:\n try:\n section, sub_key = key.split('.', 1)\n except ValueError:\n # error message inspired by git config\n raise InvalidConfigKey('key does not contain '\n 'a section: {}'.format(key))\n return section, sub_key", "def split_cppscope(cppscope):\n\n scopes = []\n\n while cppscope:\n scopes.append(cppscope)\n cppscope, _, _ = cppscope.rpartition(\"::\")\n\n return scopes", "def split_key(key) -> Tuple[str, Optional[str]]:\n\n key_base = key\n key_index = None\n\n m = Diagnostics.reKeyIndex.search(key)\n\n if m:\n key_base = key[:m.start()]\n key_index = m.group(1)\n\n return key_base, key_index", "def normalize_key(key):\n\n if isinstance(key, str):\n group, _, key = key.partition(\".\")\n elif isinstance(key, tuple):\n group, key = key\n else:\n raise TypeError(f\"invalid key type: {type(key).__class__}\")\n return group, key or None", "def prepare_diff_key(key):\n return key.split('[')[-1].strip(\"]'\")", "def split_nested_class_from_key(key: str) -> Tuple[str, Optional[str]]:\n first_dollar_sign = key.find('$')\n if first_dollar_sign == -1:\n return key, None\n else:\n return key[:first_dollar_sign], key[first_dollar_sign + 1:]", "def splitkey(userkey):\n mylist = userkey.split('~', 1)\n user_id, key = mylist[0], mylist[1]\n return (user_id, key)", "def parse_key(cls, key):\n\n if isinstance(key, list) or isinstance(key, tuple):\n parts = list(key)\n elif isinstance(key, str):\n parts = key.split('.')\n else:\n raise TypeError(\"Only str keys or tuples/lists are allowed.\")\n\n var_set = None\n if parts[0] in cls.VAR_SETS:\n var_set = parts[0]\n\n parts = parts[1:]\n\n if parts:\n var = parts.pop(0)\n if var == '':\n raise KeyError(\"Empty variable name for key '{}'\".format(key))\n\n else:\n raise KeyError(\"No variable name given for key '{}'\".format(key))\n\n # Grab the index and sub_var parts, if present.\n index = None\n if parts:\n if parts[0] is None:\n # We were given an explicit None in a variable tuple.\n parts.pop(0)\n elif parts[0] == '':\n # Note: The index is optional. This is for when it's given as\n # an empty string.\n raise KeyError(\"Invalid, empty index in key: '{}'\".format(key))\n else:\n try:\n index = int(parts[0])\n parts.pop(0)\n except ValueError:\n # If it's not an integer, assume it's a sub_key.\n pass\n\n sub_var = None\n if parts:\n sub_var = parts.pop(0)\n\n if sub_var == '':\n raise KeyError(\n \"Invalid, empty sub_var in key: '{}'\".format(key))\n\n if parts:\n raise KeyError(\n \"Variable reference ({}) has too many parts, or an invalid \"\n \"variable set (should be one of {})\".format(key, cls.VAR_SETS))\n\n return var_set, var, index, sub_var", "def split_kv(line):\n kvdelim = '=' # key and value deliminator\n logline = {} # dictionary for logline\n # split line in key and value pairs\n # regex matches internal sub strings such as key = \"word1 word2\"\n for field in re.findall(r'(?:[^\\s,\"\"]|\"(?:\\\\.|[^\"\"])*\")+', line):\n if kvdelim in field:\n key, value = field.split(kvdelim)\n logline[key] = value\n return logline", "def create_splitword_scope(word_part):\n\n split_word = sentence.find('splitword', {'idref' : t_id})\n wordpart_idref = split_word.find('part', {'word' : word_part})\n\n last_frame = sentence.sem.frames.find_all('frame', {'name' : NEGATION_FRAME_NAME})[-1]\n\n # Create scope\n scope = chapter_input.new_tag('fe')\n scope['name'] = SCOPE_TAG_NAME\n scope['id'] = last_frame.get('id')+'_e3'\n last_frame.insert(3, scope)\n\n # Create scope <fenode>\n scope_fenode = chapter_input.new_tag('fenode')\n scope_fenode['idref'] = wordpart_idref.get('id')\n scope_fenode['is_split'] = 'yes'\n scope.insert(0, scope_fenode)", "def split_dataset(s):\r\n key = s.strip('_')\r\n group = key.split('_')[0]\r\n dset = '_'.join(key.split('_')[1:])\r\n return (group, dset)", "def _key_transform(key: CachePlayerKey) -> CacheKey:\n return key[0].name, key[1].name", "def separate_keys(s: str) -> (List[str], List[str]):\n\n keys = []\n types = []\n dict_elements = split_high_level(\n s,\n ',',\n [\"(\", \"[\", \"{\"],\n [\")\", \"]\", \"}\"]\n )\n for dict_elem in dict_elements:\n key_type = split_high_level(\n dict_elem,\n ':',\n [\"(\", \"[\", \"{\"],\n [\")\", \"]\", \"}\"]\n )\n keys.append(str2key(key_type[0]))\n types.append(key_type[1])\n\n return keys, types", "def _split_keyvals(keyval_str):\n quals = collections.defaultdict(list)\n if keyval_str is None:\n return quals\n # ensembl GTF has a stray semi-colon at the end\n if keyval_str[-1] == ';':\n keyval_str = keyval_str[:-1]\n # GFF2/GTF has a semi-colon with at least one space after it.\n # It can have spaces on both sides; wormbase does this.\n # GFF3 works with no spaces.\n # Split at the first one we can recognize as working\n parts = keyval_str.split(\" ; \")\n if len(parts) == 1:\n parts = keyval_str.split(\"; \")\n if len(parts) == 1:\n parts = keyval_str.split(\";\")\n # check if we have GFF3 style key-vals (with =)\n is_gff2 = True\n if gff3_kw_pat.match(parts[0]):\n is_gff2 = False\n key_vals = [p.split('=') for p in parts]\n # otherwise, we are separated by a space with a key as the first item\n else:\n pieces = []\n for p in parts:\n # fix misplaced semi-colons in keys in some GFF2 files\n if p and p[0] == ';':\n p = p[1:]\n pieces.append(p.strip().split(\" \"))\n key_vals = [(p[0], \" \".join(p[1:])) for p in pieces]\n for key, val in key_vals:\n # remove quotes in GFF2 files\n if (len(val) > 0 and val[0] == '\"' and val[-1] == '\"'):\n val = val[1:-1] \n if val:\n quals[key].extend(val.split(','))\n # if we don't have a value, make this a key=True/False style\n # attribute\n else:\n quals[key].append('true')\n for key, vals in quals.items():\n quals[key] = [urllib.unquote(v) for v in vals]\n return quals, is_gff2", "def _group_checkpoint_keys(keys: List[str]) -> Dict[str, List[str]]:\n groups = defaultdict(list)\n for key in keys:\n pos = key.rfind(\".\")\n if pos >= 0:\n head, tail = key[:pos], [key[pos + 1:]]\n else:\n head, tail = key, []\n groups[head].extend(tail)\n return groups", "def split_conf_parameter_key(rlk):\n try:\n p = rlk.split(\":\")\n return p[1], p[2]\n except BaseException:\n LOG.error(\"Couldn't parse configuration parameter key.\")\n return None, None", "def split_gcs_path(gcs_path: str) -> Tuple[str, str]:\n path_parts = gcs_path.replace('gs://', '').split('/')\n bucket = path_parts.pop(0)\n key = '/'.join(path_parts)\n return bucket, key" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Add children for a registry. The ``registry`` will be added as children based on its scope. The parent registry could build objects from children registry.
def _add_children(self, registry): assert isinstance(registry, Registry) assert registry.scope is not None assert registry.scope not in self.children, \ f'scope {registry.scope} exists in {self.name} registry' self.children[registry.scope] = registry
[ "def add_children(self, *children):\n for child in children:\n self.children.append(child)", "def add_children(self, new_children):\n self.children = self.get_children() + new_children", "def append_children(parent, *children):\n for c in children:\n parent.add_child(c)", "def add_children(self, subcontext_class, subcontext_key, *subcontext_args):\n cr = crawler.Crawler(self.node_path(*subcontext_args))\n self.add({subcontext_key: [subcontext_class(*subcontext_args, child).data for child in cr.children()]})", "def addAllChildrenToParent(self, widget=None, value=None):\r\n for child in self.children:\r\n self.parent.addChild(child)", "def spawn_children(self):\n for i in range(self.num_children - len(self._CHILDREN.keys())):\n self.spawn_child()", "def add_child(self, *sprites):\n for sprite in sprites:\n self.child_sprites.append(sprite)\n sprite.parent = self", "def __init__(self, new_children=None):\n super().__init__()\n if not new_children:\n new_children = []\n self.__children = []\n for c in new_children:\n self.add_child(c)", "def populate_children(self) -> None:\n for n in self.nodes:\n children = filter(lambda x: x.parent_id == n.id, self.nodes)\n for child in children:\n n.add_child(child)", "def add(self, *groups):\n if len(groups) == 1 and isinstance(groups, list):\n self.add(*groups)\n if not self._added_child_groups:\n self._added_child_groups = []\n self._added_child_groups.extend(groups)\n return self", "def register_for_new_hierarchies(self):\n pass", "def manage_children(self):\n if len(self._CHILDREN.keys()) < self.num_children:\n self.spawn_children()", "def add_child_encounter(self, encounter):\n encounter.patient = self.patient\n self.children.append(encounter)", "def getChildren(self, pid):\n childProcs = findChildProcesses(pid) \n for child in childProcs:\n print \"Child Process found: %s\" % child\n self.append(child)\n self.getChildren(child)\n return", "def set_children(self, children) :\n self.__children = children", "def add_children(self, child_nodes: List['RouteNode'], version: Optional[int]=None) -> 'RouteNode':\n self.children += child_nodes\n for node in child_nodes:\n node.parent = self\n if version is not None:\n node.assign_version_to_all_children(version)\n return self", "def setup_children(self):\n\n # Get the number of blocks of the encoder gene\n # (Note that the decoder part of the network will have an extra block)\n encoder = self.root.children[0]\n n_encoder_blocks = encoder.hyperparam('n_blocks')\n\n # In a BlockSetGene, children are blocks\n n_children = n_encoder_blocks + 1\n # How many children does this gene have already?\n n_children_now = len(self.children)\n # What change is needed to have n_children children?\n d_n_children = n_children - n_children_now\n\n if d_n_children > 0:\n # Add children\n for i in range(d_n_children):\n self.add_child()\n\n elif d_n_children < 0:\n # Remove children\n for i in range(-d_n_children):\n self.children.pop()\n\n # Deal with potential changes in spatial scales caused by the\n # addition or removal of blocks\n self._rescale_children()\n\n pass", "def _connect_all(self) -> None:\n for term in self._map.values():\n for parent_id in term.parent_ids():\n parent = self[parent_id]\n term.parents.add(parent)\n parent.children.add(term)\n\n # Build caches of hierarchy to speed up performance\n for term in self._map.values():\n term.all_parents", "def _add_child_data(self, experiment_data: ExperimentData):\n component_index = experiment_data.metadata.get(\"component_child_index\", [])\n if component_index:\n # Child components are already initialized\n return\n\n # Initialize the component experiment data containers and add them\n # as child data to the current experiment data\n child_components = self._initialize_component_experiment_data(experiment_data)\n start_index = len(experiment_data.child_data())\n for i, subdata in enumerate(child_components):\n experiment_data.add_child_data(subdata)\n component_index.append(start_index + i)\n\n # Store the indices of the added child data in metadata\n experiment_data.metadata[\"component_child_index\"] = component_index" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to get the public_views
def get_public_views(self): return self.__public_views
[ "def get_views(name):", "def get_views():\n views = []\n rnetworkview = requests.get(PAYLOAD['url'] + \"networkview?\",\n auth=(PAYLOAD['username'],\n PAYLOAD['password']),\n verify=False)\n rutfnetworkview = rnetworkview.content.decode('utf-8')\n rjsonnetworkview = json.loads(rutfnetworkview)\n for raw_view in rjsonnetworkview:\n for key in raw_view.keys():\n if key == 'name':\n views.append(raw_view[key])\n # views = ['UNO'] # Instead of pulling all views.\n return views", "def test_core_get_views_v1(self):\n pass", "def get_views_list():\n views = Views(j)\n return list(views.iterkeys())", "def views(self):\n return self._internal_read(\n lambda date, x, _: {\n 'date': date,\n 'pageViews': x['pageViews']\n }\n )", "def all_views(self):\n temp = self.views\n\n retval = []\n for cur_view in temp:\n if cur_view.type == self.type:\n retval.extend(cur_view.all_views)\n\n retval.extend(temp)\n return retval", "def get_views():\n rv = []\n for _, m, _ in pkgutil.iter_modules(path=__path__):\n i = importlib.import_module(name=\"{0}.{1}\".format(__name__, m))\n for name, obj in inspect.getmembers(i, predicate=inspect.isclass):\n if u'View' in name and obj.__module__ == i.__name__:\n rv.append(obj)\n\n return rv", "def get_views(self):\n return sorted([view for (_, _, _, view) in self.plugins.values()])", "def is_public_get_view(self, view_data: dict):\n\n if not self.is_get_requestable(view_data):\n return False\n\n if not self.is_anonymous(view_data):\n return False\n\n return True", "def views( self ):\n return self.findChildren(XView)", "def publics(self):\n return self.__publics", "def get_view_names():\n return get_names(\"views\")", "def list_views(app, appbuilder):\n _appbuilder = import_application(app, appbuilder)\n echo_header(\"List of registered views\")\n for view in _appbuilder.baseviews:\n click.echo(\n \"View:{0} | Route:{1} | Perms:{2}\".format(\n view.__class__.__name__, view.route_base, view.base_permissions\n )\n )", "def public_view3(request):\n return HttpResponse(\"PublicView\")", "def __GetLocalViews(self):\n\n import socket\n \n myClearCaseCommand = 'cleartool lsview'\n myHostName = string.lower(socket.gethostname())\n \n myListLocalView = []\n\n (mystdIn, myStdOut) = popen2.popen2(myClearCaseCommand)\n for myLine in mystdIn:\n myLowerLine = string.lower(myLine)\n myStartHostName = string.find(myLowerLine, myHostName)\n \n if myStartHostName != -1:\n myLocalView = myLine[2:myStartHostName-2]\n myListLocalView.append(string.strip(myLocalView))\n \n self.__StartViews(myListLocalView)\n \n return myListLocalView\n \n return", "def subviews(self):\n # type: () -> List[View]\n\n return []", "def has_public_view_acl(self, context: Resource):\n policy = self.request.registry.queryUtility(IAuthorizationPolicy)\n\n # view permission is set on Root object or overridden in resource hierarchy __ACL__\n principals = policy.principals_allowed_by_permission(context, \"view\")\n return Everyone in principals", "def crawl_non_default_views(self):\n return self.properties.get(\"CrawlNonDefaultViews\", None)", "def get_views(self, table_schema: str='public') -> List[Table]:\n rows = self.db.where(\n \"information_schema.views\",\n table_schema=table_schema)\n\n views = []\n for row in rows:\n row['table_type'] = 'VIEW'\n views.append(Table(self.db, row))\n return views" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to set the value to public_views
def set_public_views(self, public_views): if public_views is not None and not isinstance(public_views, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: public_views EXPECTED TYPE: str', None, None) self.__public_views = public_views self.__key_modified['public_views'] = 1
[ "def get_public_views(self):\n\n\t\treturn self.__public_views", "def setView(self, v):\n self.view = v", "def set_Public(self, value):\n super(UpdateTicketInputSet, self)._set_input('Public', value)", "def views(self, value=None):\n if value is not None:\n if value in config.choicelist_views:\n self.dims.views = int(value)\n logging.info(\"Views set to %i\" % self.dims.views)\n self.on_change()\n return 0\n else:\n logging.warn(\"Attempt to set Views failed - Value (%s) outside of acceptable range\" % str(value))\n return 1\n return self.dims.views", "def set_view_read_only(self):\n if self.reh is not None:\n self.reh.set_read_only()", "def set_public_id(self):\r\n\r\n self.public_id = get_public_id(f\"{self.id}_user\")\r\n self.save()", "def viewip(self, viewip) :\n\t\ttry :\n\t\t\tself._viewip = viewip\n\t\texcept Exception as e:\n\t\t\traise e", "def set_publicity(obj, public):\n if public: obj.Acl().put(ACL='public-read')\n else: obj.Acl().put(ACL='private')", "def set_public_id(self):\r\n\r\n self.public_id = get_public_id(f\"{self.id}_link\")\r\n self.save()", "def custom_view(self, value: discord.ui.View | None):\n self._custom_view = value", "def set_remote_view_settings(self):\r\n self.remote_view_settings = read_packet(self.i_request)\r\n self.enable_refresh_after_eval()", "def is_viewable(self, is_viewable):\n\n self._is_viewable = is_viewable", "def view_path(self, view_path):\n\n self._view_path = view_path", "def SetViewFilter(self, vf):\n self._view_filter = vf", "def public_view3(request):\n return HttpResponse(\"PublicView\")", "def is_public_get_view(self, view_data: dict):\n\n if not self.is_get_requestable(view_data):\n return False\n\n if not self.is_anonymous(view_data):\n return False\n\n return True", "def define_views(self) -> None:\n\n # Define view paths\n #self.views(['uvicore.auth.http.views'])\n\n # Define public paths\n self.public(['uvicore.auth.http.public'])\n\n # Define asset paths\n self.assets(['uvicore.auth.http.public.assets'])", "def addViews(self, document, views):\n document.setdefault(\"views\", {})\n for name, data in views.items():\n document[\"views\"][name] = data", "def update_view(self, kernel_state):\n if \"namespace_view\" in kernel_state:\n self.process_remote_view(kernel_state.pop(\"namespace_view\"))\n if \"var_properties\" in kernel_state:\n self.set_var_properties(kernel_state.pop(\"var_properties\"))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to get the other_users_views
def get_other_users_views(self): return self.__other_users_views
[ "def set_other_users_views(self, other_users_views):\n\n\t\tif other_users_views is not None and not isinstance(other_users_views, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: other_users_views EXPECTED TYPE: str', None, None)\n\t\t\n\t\tself.__other_users_views = other_users_views\n\t\tself.__key_modified['other_users_views'] = 1", "def get_views(name):", "def get_views():\n views = []\n rnetworkview = requests.get(PAYLOAD['url'] + \"networkview?\",\n auth=(PAYLOAD['username'],\n PAYLOAD['password']),\n verify=False)\n rutfnetworkview = rnetworkview.content.decode('utf-8')\n rjsonnetworkview = json.loads(rutfnetworkview)\n for raw_view in rjsonnetworkview:\n for key in raw_view.keys():\n if key == 'name':\n views.append(raw_view[key])\n # views = ['UNO'] # Instead of pulling all views.\n return views", "def restricted_to_viewing_user(self):\n if self.userobject is None:\n return None\n if self.userobject.may_view_other_users_records:\n return None\n if self.userobject.superuser:\n return None\n return self.userobject.user", "def _get_viewers(self):\n total_viewers = 0\n for stream in self.result_set:\n total_viewers += stream.viewers\n return total_viewers", "def test_core_get_views_v1(self):\n pass", "def get_view_names():\n return get_names(\"views\")", "def view_user(request):\n user = request.user\n latest_user_rides = Ride.objects.filter(\n participants__email=user.email).order_by('ride_date')\n latest_rides_tup = [(ride, True, ride.is_owner(user)) \n for ride in latest_user_rides if not ride.ride_in_past()]\n context = {\n 'latest_rides': latest_rides_tup,\n 'user': request.user,\n 'user_rides': True\n }\n return render(request, 'cabrides/index.html', context)", "def get_public_views(self):\n\n\t\treturn self.__public_views", "def num_voters(self):\n return User.objects.filter(votes__option__topic=self).distinct().count()", "def mentors_all(self):\n return self.all()\\\n .select_related('user').\\\n order_by('display_nr')", "def views( self ):\n return self.findChildren(XView)", "def test_logged_in_user_view_others_likes(self):\n\n with self.client as c: \n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.testuser.id\n \n response = c.get(f\"/users/{self.testuser2.id}/likes\")\n\n html = response.get_data(as_text=True)\n\n self.assertEqual(response.status_code, 200)\n self.assertIn(f\"@{self.testuser2.username}\", html)", "def get_overviews(self):\n overviews = []\n overviews.extend(self.alarm.get())\n overviews.extend(self.climate.get())\n overviews.extend(self.ethernet.get())\n overviews.extend(self.temperaturecontrol.get())\n overviews.extend(self.lock.get())\n overviews.extend(self.mousedetection.get())\n overviews.extend(self.nest.get())\n overviews.extend(self.smartcam.get())\n overviews.extend(self.smartplug.get())\n overviews.extend(self.vacationmode.get())\n return overviews", "def get_views(self):\n return sorted([view for (_, _, _, view) in self.plugins.values()])", "def all_views(self):\n temp = self.views\n\n retval = []\n for cur_view in temp:\n if cur_view.type == self.type:\n retval.extend(cur_view.all_views)\n\n retval.extend(temp)\n return retval", "def get_views_list():\n views = Views(j)\n return list(views.iterkeys())", "def allteammember_view(request):\n\tatm = User.objects.filter(profile__is_team_member = True, is_active=True)\n\treturn render(request, 'core_app/team.html', {'teammember': atm} )", "def get_total_users(self):\n return self.users" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to set the value to other_users_views
def set_other_users_views(self, other_users_views): if other_users_views is not None and not isinstance(other_users_views, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: other_users_views EXPECTED TYPE: str', None, None) self.__other_users_views = other_users_views self.__key_modified['other_users_views'] = 1
[ "def get_other_users_views(self):\n\n\t\treturn self.__other_users_views", "def custom_view(self, value: discord.ui.View | None):\n self._custom_view = value", "def set_view_rights(user, live_calendar, department_view, employee_view):\n\n # Delete old view rights for this live calendar\n oldDepartmentViewRights = LiveCalendarDepartmentViewRights.objects.filter(user=user, live_calendar=live_calendar).delete()\n oldEmployeeViewRights = LiveCalendarEmployeeViewRights.objects.filter(user=user, live_calendar=live_calendar).delete()\n\n # Create new view rights for this live calendar\n departments = Department.objects.filter(user=user, id__in=department_view)\n employees = Employee.objects.filter(user=user, id__in=employee_view)\n\n newDepartmentViewRights = []\n newEmployeeViewRights = []\n\n for department in departments:\n depViewRight = LiveCalendarDepartmentViewRights(user=user,\n live_calendar=live_calendar,\n department_view_rights=department)\n newDepartmentViewRights.append(depViewRight)\n for employee in employees:\n empViewRight = LiveCalendarEmployeeViewRights(user=user,\n live_calendar=live_calendar,\n employee_view_rights=employee)\n newEmployeeViewRights.append(empViewRight)\n\n LiveCalendarDepartmentViewRights.objects.bulk_create(newDepartmentViewRights)\n LiveCalendarEmployeeViewRights.objects.bulk_create(newEmployeeViewRights)", "def views_count(self, views_count):\n\n self._views_count = views_count", "def setView(self, v):\n self.view = v", "def set_UserID(self, value):\n super(SearchPhotosInputSet, self)._set_input('UserID', value)", "def test_user_cant_edit_other_users(self):\n other_user = mommy.make_recipe('backend.core.user', username='leonardo2')\n other_user.first_name = 'Trying to change other user data'\n data = model_to_dict(other_user)\n resp = self.client.put(reverse('user-update', kwargs={'pk': other_user.pk}), data,\n HTTP_AUTHORIZATION=self.jwt_authorization)\n self.assertEqual(403, resp.status_code)", "def views(self, value=None):\n if value is not None:\n if value in config.choicelist_views:\n self.dims.views = int(value)\n logging.info(\"Views set to %i\" % self.dims.views)\n self.on_change()\n return 0\n else:\n logging.warn(\"Attempt to set Views failed - Value (%s) outside of acceptable range\" % str(value))\n return 1\n return self.dims.views", "def viewed_count(self, viewed_count):\n\n self._viewed_count = viewed_count", "def set_new_user(self):\n self.current_user = random.choice(self.hosts)", "def _set_other(self, other):\n\n self._other = other\n self._get_other_attributes()", "async def set_user_admin(self, userid, value):\n raise NotImplementedError()", "def set_user_id(self,user_id):\n self.user_id = user_id", "def set_user(self, user):\n self.user = user", "def viewip(self, viewip) :\n\t\ttry :\n\t\t\tself._viewip = viewip\n\t\texcept Exception as e:\n\t\t\traise e", "def at_set(self, new_value):\r\n pass", "def set_voters(self, voters: List['Voter'], voter_idx_in_group: int):\n self.voters = voters[voter_idx_in_group + 1:]", "def hidden_for_user(self, value):\n self.logger.warn(\n \"Setting values on hidden_for_user will NOT update the remote Canvas instance.\"\n )\n self._hidden_for_user = value", "def view_path(self, view_path):\n\n self._view_path = view_path" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to get the shared_with_me
def get_shared_with_me(self): return self.__shared_with_me
[ "def shared(self):\n if \"shared\" in self._prop_dict:\n if isinstance(self._prop_dict[\"shared\"], OneDriveObjectBase):\n return self._prop_dict[\"shared\"]\n else :\n self._prop_dict[\"shared\"] = Shared(self._prop_dict[\"shared\"])\n return self._prop_dict[\"shared\"]\n\n return None", "def getSharedData(self):\n return {}", "def shared_attributes(self):\n pass", "def shared_muse_instance():\n global _shared_muse_instance\n if not _shared_muse_instance:\n _shared_muse_instance = dct.muse()\n return _shared_muse_instance", "def _get_global_(self):\n return self.__global_", "def get_shared_item(self, shared_url):\n ret = None\n \n if self.id_share_pattern.match(shared_url):\n id_search = self.id_share_pattern.search(shared_url)\n if id_search:\n file_id = id_search.group(5)\n #print('Match:', shared_url, ', fileid:', file_id)\n ret = self.get_shared_item_by_id(file_id)\n else:\n ret = self.client.get_shared_item(shared_url).__dict__\n \n if self.DEBUG:\n print(ret)\n #print('ret:', ret)\n return ret", "def share_info(self):\n global global_path\n global global_known_p\n global_path = global_path + (list(set(self.path) - set(global_path))) # removes duplicates\n global_known_p = self.known_p", "def helper(self):\n return self", "def _get_shared_metrics():\n return LocalIterator.get_metrics()", "def effectiveShareMode(): # @NoSelf", "def get_shared_item_ori(self, shared_url):\n ret = self.client.get_shared_item(shared_url)\n if self.DEBUG:\n pprint(vars(ret))\n print('ret:', ret)\n return ret", "def me(self):\n return self.properties.get(\"Me\", SocialActor())", "def get_sharing_information(self):\n return self.list_item_all_fields.get_sharing_information()", "def set_shared_muse_instance(muse_instance):\n global _shared_muse_instance\n _shared_muse_instance = muse_instance", "def mock(self):\r\n return self._mock", "def _get_cached_ref ( self ):\n return self._cached_selfref", "def getSharedVariables(self):\n return AbstractArrayList._arrayListTypes[self.getType()][0].copy()", "def get():\n global context\n return context", "def Wrapper(self) -> object:" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to set the value to shared_with_me
def set_shared_with_me(self, shared_with_me): if shared_with_me is not None and not isinstance(shared_with_me, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: shared_with_me EXPECTED TYPE: str', None, None) self.__shared_with_me = shared_with_me self.__key_modified['shared_with_me'] = 1
[ "def __set__(self, instance, value):\n instance.__dict__[self.name] = value", "def set_shared_muse_instance(muse_instance):\n global _shared_muse_instance\n _shared_muse_instance = muse_instance", "def at_set(self, new_value):\r\n pass", "def shared(self, shared):\n if shared is None:\n raise ValueError(\"Invalid value for `shared`, must not be `None`\") # noqa: E501\n\n self._shared = shared", "def set(value,force=False):", "def set(self, instance, value):\n self.descriptor.__set__(instance, value)", "def shared_key(self, shared_key: ConfigNodePropertyString):\n\n self._shared_key = shared_key", "def _set_mutable(self, mutable):\n # pylint: disable=protected-access\n object.__setattr__(self, \"_mutable\", mutable)\n self.autotune._set_mutable(mutable)\n self.experimental_distribute._set_mutable(mutable)\n self.experimental_optimization._set_mutable(mutable)\n self.threading._set_mutable(mutable)", "def set_instance(self, thing: type, value, overwrite=False):\n if thing in self.instances and not overwrite:\n raise DiayException(\"instance for %r already exists\" % thing)\n self.instances[thing] = value", "def get_shared_with_me(self):\n\n\t\treturn self.__shared_with_me", "def set_shared_instance(cls, db):\n cls._instance = db", "def __set__(self, instance, value):\n value = self.validate(value)\n super().__set__(instance, value)", "def set_shared_objects(self, shared_objects: Any = None) -> None:\n self.shared_objects = shared_objects", "def for_submissions(self, value):\n self.logger.warn(\n \"Setting values on for_submissions will NOT update the remote Canvas instance.\"\n )\n self._for_submissions = value", "def share_price(self, share_price):\n\n self._share_price = share_price", "def shared_to(self, shared_to):\n if shared_to is None:\n raise ValueError(\n \"Invalid value for `shared_to`, must not be `None`\"\n ) # noqa: E501\n\n self._shared_to = shared_to", "def shared_attributes(self):\n pass", "def SetMasterInstance(self, master_instance):\n self.master = master_instance.key\n self.put()", "def setValue(self, *args):\n return _coin.SoMFEngine_setValue(self, *args)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The method to get the created_by_me
def get_created_by_me(self): return self.__created_by_me
[ "def _get_createdBy(self) -> \"adsk::core::Ptr< adsk::core::User >\" :\n return _core.DataFile__get_createdBy(self)", "def created_by_user_id(self) -> int:\n return pulumi.get(self, \"created_by_user_id\")", "def get_creator_id(self):\n\n\t\treturn self.__creator_id", "def set_created_by(self, creator):\n self['created_by'] = creator", "def GetCreatedBy(video: dict) -> str:\n creator = 'Unknown'\n if video:\n created_by = video.get('created_by')\n if created_by:\n ctype = created_by.get('type')\n if ctype=='api_key':\n creator = 'API'\n elif ctype=='user':\n creator = created_by.get('email')\n return creator", "def author_info(self):\n return User.objects.get(pk=self.user_id)", "def author_info(self):\n return User.objects.get(pk=self.author)", "def creators_message(self):\n created = self.get_edges() \\\n .get(API_EDGE_TYPE.CREATED_BY, {}) \\\n .values()[0]\n return created.message", "def get_commented_by(self):\n\n\t\treturn self.__commented_by", "def get_author(self):\n return self.user.first_name +\" \"+ self.user.last_name", "def me(self):\n return self.properties.get(\"Me\", SocialActor())", "def edited_by(self):\n return self._edited_by", "def get_single_user():", "def creator_of_dish(self):\n return self._creator_of_dish", "def _get_lastUpdatedBy(self) -> \"adsk::core::Ptr< adsk::core::User >\" :\n return _core.DataFile__get_lastUpdatedBy(self)", "def cli(ctx, user):\n return ctx.gi.users.get_user_creator(user)", "def _extract_creator_name(self):\n return self._get_child('creator').text", "def get_author(self):\n\t\treturn self._author", "def last_modified_by(self) -> \"str\":\n return self._attrs.get(\"last_modified_by\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function obfuscates the text parsed into the text argument in the
def obfuscate(text: str) -> str: if type(text) != str: # Here error checking is handled print('Please enter a string as an argument') return None lowertext = text.lower() text_array = list(lowertext.split()) cypher = {'a': 'b', 'b': 'c', 'c': 'd', 'd': 'e', 'e': 'f', 'f': 'g', 'g': 'h', 'h': 'i', 'i': 'j', 'j': 'k', 'k': 'l', 'l': 'm', 'm': 'n', 'n': 'o', 'o': 'p', 'p': 'q', 'q': 'r', 'r': 's', 's': 't', 't': 'u', 'u': 'v', 'v': 'w', 'w': 'x', 'x': 'y', 'y': 'z', 'z': 'a', 'A': 'B', 'B': 'C', 'C': 'D', 'D': 'E', 'E': 'F', 'F': 'G', 'G': 'H', 'H': 'I', 'I': 'J', 'J': 'K', 'K': 'L', 'L': 'M', 'M': 'N', 'N': 'O', 'O': 'P', 'P': 'Q', 'Q': 'R', 'R': 'S', 'S': 'T', 'T': 'U', 'U': 'V', 'V': 'W', 'W': 'X', 'X': 'Y', 'Y': 'Z', 'Z': 'A'} alphabet = 'abcdefghijklmnopqrstuvwxyz' upperalphabet = alphabet.capitalize() # Here the word 'and' is replaced with 'the' and 'the' with 'and' for index in range(len(text_array)): if text_array[index] == 'the': text_array[index] = 'and' elif text_array[index] == 'and': text_array[index] = 'the' # Here a string to handle the upper method is created newstring1 = str(''.join(text_array)) newstring2 = '' # Here every third value is turned into an uppercase value for i in range(1, len(newstring1)+1): if i % 3 == 0: newstring2 += newstring1[i-1].upper() else: newstring2 += newstring1[i-1].lower() newlist = newstring2.split() for i in range(1, len(newlist)): # Here every fifth word is reversed if i % 5 == 0 and i != 0: newlist[i-1] = newlist[i-1][::-1] for word in range(0, len(newlist)): if word % 2 == 0 and word != 0: charlist = list(newlist[word-1]) wordhold = '' for char in charlist: wordhold += cypher[char] newlist[word-1] = wordhold finalstring = ' '.join(newlist) return finalstring
[ "def preprocess(text):\n text = normalize_unicode(text)\n text = remove_newline(text)\n text = text.lower()\n text = decontracted(text)\n text = replace_negative(text)\n text = removePunctuations(text)\n text = remove_number(text)\n text = remove_space(text)\n text = removeArticlesAndPronouns(text)\n text = removeNLTKStopWords(text)\n #text = performStemming(text)\n return text", "def _fasttext_preprocess(text: str) -> str:\n return escape_line_delimited_text(text).lower()", "def applyCoder(text, coder):\n newtext=\"\"\n for i in range(len(text)):\n if text[i].isalpha():\n newtext+=coder[text[i]]\n else:\n newtext+=text[i]\n return newtext", "def preprocessing_text(self):\n print(' >>> Cleaning text...', end='', flush=True)\n self.text = regexp.sub(\" \", \"__\", self.text.lower(), flags=regexp.MULTILINE)\n self.text = \"_\" + regexp.sub(\"[^_a-z]\", \"\", self.text, flags=regexp.MULTILINE) + \"_\"\n print(ANSI.ok_green, 'OK !', ANSI.endc)", "def convert_to_hacker(text):\n\tnew_text = []\n\n\t#initial pass subsitutes 8 for ate.\n\tpattern = re.compile(r'ate')\n\ttext = pattern.sub('8', text) \n\n\t# regex that searches through the text to find instances of the letters \n # to be converted.\n\tpattern = re.compile(r'[eiols]|\\.')\n\n\n\t# converts all the letters\n\tfor w in text:\n\t\tif re.search(pattern, w):\n\t\t\tif w == 'e':\n\t\t\t\tw = '3'\n\t\t\telif w == 'i':\n\t\t\t\tw = '1'\n\t\t\telif w == 'o':\n\t\t\t\tw = '0'\n\t\t\telif w == 's':\n\t\t\t\tw = '5'\n\t\t\telif w == 'l':\n\t\t\t\tw = '|'\n\t\t\telif w == '.':\n\t\t\t\tw = '5w33t!'\n\t\tnew_text.extend(w)\n\tnew_text = ''.join(new_text)\n\n\t# regex searching for word initial s.\n\tpattern = re.compile(r'\\b5')\n\tnew_text = pattern.sub('$', new_text)\n\n\treturn new_text", "def text_transformations(self, text):\n txt = super(TextModel, self).text_transformations(text)\n return re.sub('~+', '~', txt)", "def pre_process(text):\n\n # Remove Markup\n soup = BeautifulSoup(text, features=\"html.parser\")\n\n for script in soup([\"script\", \"style\"]):\n script.extract()\n\n text = soup.get_text()\n\n # Remove reg. ex. / punctuation, this also removes the hyphen in hyphenated words i.e freeze-dry -> freeze dry\n text = re.sub(r'[^\\w]', ' ', text)\n\n # Tokenize and transform into lower case\n text = word_tokenize(text)\n text = [w.lower() for w in text]\n\n # Remove stop words\n english_words = set(words.words())\n stop_words = set(stopwords.words('english'))\n newstopwords = ['tho', 'mr', 'tbe', '000']\n stop_words.update(newstopwords)\n filtered_text = [w for w in text if\n w.lower() in english_words and w.lower() not in stop_words and w not in string.punctuation and len(\n w) > 2]\n # Lemmatisation\n lemmatizer = WordNetLemmatizer()\n lemmatized_text = ' '.join(lemmatizer.lemmatize(token) for token in filtered_text)\n lemmatized_tokenized_text = word_tokenize(lemmatized_text)\n\n return lemmatized_tokenized_text", "def desmilify(text):\n emoticons = emot.emoticons(text)\n if type(emoticons) == dict:\n for mean,value in zip(emoticons.get('mean'),emoticons.get('value')):\n text = text.replace(value,':%s:'%'_'.join(mean.split()))\n return text", "def clean_text(text):\n text = text.lower()\n text = re.sub('\\[.*?\\]', '', text)\n text = re.sub('https?://\\S+|www\\.\\S+', '', text)\n text = re.sub('<.*?>+', '', text)\n text = re.sub('[%s]' % re.escape(string.punctuation), '', text)\n text = re.sub('\\n', '', text)\n text = re.sub('\\w*\\d\\w*', '', text)\n return text", "def prepare_for_tokenization(self, text, **kwargs):\n return text", "def anonymize(text, analyze_results):\n\n res = anonymizer_engine().anonymize(text, analyze_results)\n return res.text", "def _ignore_somecode(text):\n text = re.sub('\\r', '', text)\n text = re.sub('\\f', '', text)\n text = re.sub('\\0', '', text)\n return text", "def clean_text(text):\n text = re.sub(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\", \"URL\", text) # Replace urls with special token\n text = text.replace(\"\\'s\", \"\")\n text = text.replace(\"\\'\", \"\")\n text = text.replace(\"n\\'t\", \" n\\'t\")\n text = text.replace(\"@\", \"\")\n text = text.replace(\"#\", \"\")\n text = text.replace(\"_\", \" \")\n text = text.replace(\"-\", \" \")\n text = text.replace(\"&amp;\", \"\")\n text = text.replace(\"&gt;\", \"\")\n text = text.replace(\"\\\"\", \"\")\n text = text.replace(\".\", \"\")\n text = text.replace(\",\", \"\")\n text = text.replace(\"(\", \"\")\n text = text.replace(\")\", \"\")\n text = ' '.join(text.split())\n return text.strip()", "def preprocess_text(text):\n text = re.sub(r'[^A-Za-z0-9\\.]+', ' ', text).strip()\n text = re.sub(r'\\.\\s', ' ', text)\n return text.lower()", "def cleanup_zwnj(self, text):\r\n\t\treturn self._cleanup_zwnj_pattern.sub(' ', text)", "def clean_text(text):\n \n text = remove_html(text)\n text = remove_hyperlinks(text)\n text = map_punctuation_to_space(text)\n text = regularize_spacing(text)\n # TODO: text.strip\n # TODO: stopwords\n \n return text.lower()", "def transform_text(raw_text, operation, tool):\n if(operation == 'E'):\n if (tool == 'C'):\n return encrypt_caesar(raw_text)\n elif(tool == 'V'):\n return encrypt_vigenere(raw_text, get_keyword())\n elif(tool == 'R'):\n return encrypt_railfence(raw_text, get_nrails())\n elif(operation == 'D'):\n if (tool == 'C'):\n return decrypt_caesar(raw_text)\n elif(tool == 'V'):\n return decrypt_vigenere(raw_text, get_keyword())\n elif(tool == 'R'):\n return decrypt_railfence(raw_text, get_nrails())\n else:\n print(\"We're sorry, an error has occurred\")", "def FilterInput(self, text):\n return text", "def clean_text(text):\n\n # Lowercase\n text = text.lower()\n # Remove punctuation\n translator = str.maketrans('', '', string.punctuation)\n text = text.translate(translator)\n # Tokenize\n text = word_tokenize(text)\n return text" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns All tasks from db in a list
def get_all_tasks(): task_list = Task.objects.all().values("name") tasks = list(task_list) task_list = [task["name"] for task in tasks] return task_list
[ "def get_tasks(db, user_id = None):\n\tc = db.cursor()\n\tif user_id:\n\t\tquery = \"\"\"SELECT * FROM Tasks WHERE user_id = ?\"\"\"\n\t\tc.execute(query, (user_id, ))\t\n\telse:\n\t\tquery = \"\"\"SELECT * FROM Tasks\"\"\"\n\t\tc.execute(query)\n\ttasks = []\n\trows = c.fetchall()\n\tfor row in rows:\n\t\tfields = [\"id\", \"user_id\", \"title\", \"created_at\", \"completed_at\"]\n\t\tvalue = dict(zip(fields, row))\n\t\ttasks.append(value)\n\treturn tasks", "def select_all_tasks(connection):\n cur = connection.cursor()\n cur.execute(\"SELECT * FROM tasks\")\n\n rows = cur.fetchall()\n\n for row in rows:\n print(row)", "def task_list(self):\n return self._task_list", "def load_tasks_from_database(self):\n database = self.get_database()\n\n tasks = database.get_task_list()\n task_objs = []\n queued = []\n for row in tasks:\n #print(dict(zip(row.keys(), list(row))))#; sys.exit()\n try:\n atask = self.new_task(url=row[\"origin\"])\n self.attach_task(atask)\n for key in row.keys():\n if hasattr(atask, key):\n setattr(atask, key, row[key])\n\n #for k in row.keys(): print(row[k])\n if atask.status in [TaskStatus.Queue, TaskStatus.Start]:\n queued.append(atask)\n\n task_objs.append(atask)\n except TaskError as e:\n log.w(str(e))\n if queued:\n self.queue_tasks(queued)\n return task_objs", "def get_tasks(self, query: domain.Query) -> list:\n tasks = {}\n\n sql, values = self._filter( # fetch task data\n query,\n self._T_TSK,\n job_id=True,\n layer_id=True,\n task_id=True,\n state=True,\n key=True,\n user_id=True,\n )\n self._cur.execute(sql, values)\n for row in self._cur.fetchall():\n data = {k[k.index(\"_\") + 1:]: v for k, v in row.items()}\n tasks[data[\"task_id\"]] = data\n\n records = collections.defaultdict(list)\n sql, values = self._filter( # fetch record data\n query,\n self._T_REC,\n task_id=True,\n )\n self._cur.execute(sql, values)\n for row in self._cur.fetchall():\n data = {k[k.index(\"_\") + 1:]: v for k, v in row.items()}\n records[data[\"task_id\"]].append(data)\n\n results = []\n for task_id, task_data in tasks.items():\n t = domain.Task.decode(task_data)\n t.result = str(bytes(t.result), encoding=\"utf8\") if t.result else None\n t.records = records.get(task_id, [])\n results.append(t)\n\n return results", "def get_tasks():\n return render_template(\"tasks.html\", tasks=mongo.db.tasks.find())", "def list(config):\n store = api_todo.Todo()\n #tasks = api_sort(store.ls())\n tasks = store.ls()\n headers = ['id', 'Priority', 'done', 'description']\n data = []\n for el in tasks:\n identifier, content, _, _, active, priority = el\n data.append([identifier, priority, \"\" if active else \"X\", content])\n console.show_table(data, headers, 'tasks')", "def tasks(self, **opts):\n opts['channel_id'] = self.id\n qopts = {'order': 'priority,create_time'}\n return self.connection.listTasks(opts, qopts)", "def list_(\n ctx: typer.Context, project: Annotated[Optional[str], typer.Option(\"--list\")] = None\n) -> None:\n project = project_set(project, ctx.obj[\"project\"])\n\n try:\n task_list = TaskList(project)\n tasks = [t.title for t in task_list.tasks()] # type: ignore\n except ListNotFoundException:\n print(f\":x: List '{project}' not found\")\n raise typer.Exit(code=1)\n\n if not tasks:\n print(\":yawning_face: List empty.\")\n else:\n table = Table(title=\"Tasks\", show_header=False)\n\n for index, task in enumerate(tasks):\n table.add_row(str(index), task)\n\n Console().print(table)", "def get_task_tests(task_id):\n test_list = Test.objects.filter(task_id=task_id)\n return test_list", "def load_tasks(self, *args):\n self.tasks_list = []\n self.tasks_list = Task.select().order_by(Task.date)", "def get_tasks(collection: Collection) -> List[Task]:\n tasks: List[Task] = []\n for notion_task in collection.get_rows():\n properties: Dict[str, str] = notion_task.get_all_properties()\n\n id = notion_task.id\n title = notion_task.title\n task: Task = decode_dsl(id, title, properties)\n\n tasks.append(task)\n\n return tasks", "def tasks(**_):\n for task in filter(bool, get_all_tasks()):\n print(task)", "def get_all(cls):\r\n\r\n from . import server\r\n\r\n tasks = []\r\n servers = server.Server.find(enabled = True)\r\n for _server in servers:\r\n timeout = _server.val(\"timeout\", DEFAULT_TIMEOUT)\r\n task = Task(_server, timeout)\r\n tasks.append(task)\r\n\r\n return tasks + list(TASKS)", "def get_tasks(self):\n ret = self.tasks.values()\n return ret", "def get_defined_tasks():\n return _task_list_cache.data", "def list_tasks():\n REGISTERED_TASKS, AVAILABLE_QUEUES = update_tasks()\n #print REGISTERED_TASKS, AVAILABLE_QUEUES\n REGISTERED_TASKS = [task for task in list(REGISTERED_TASKS) if task[0:6] != \"celery\"]\n AVAILABLE_QUEUES = list(AVAILABLE_QUEUES)\n REGISTERED_TASKS.sort()\n AVAILABLE_QUEUES.sort()\n return {\"available_tasks\": REGISTERED_TASKS, \"available_queues\": AVAILABLE_QUEUES}", "def get_all_todos():\n return Todo.query.all()", "def mongo_get_tasks(job_id):\n key = dict(job_id=job_id)\n response = list(mongo.db.tasks.find(key))\n return response" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns All file ids from db in a list
def get_all_file_ids(): id_list = Score.objects.all().values("file_id") return id_list
[ "def extract_file_ids(file_path):\n with open(file_path, \"r\") as f:\n content = f.read()\n\n ids = ID_RE.finditer(content)\n return [id.group(1) for id in ids]", "def Retrieve_NP_IDs(sqlite_file,table_name,id_column = \"structure_id\"):\n # Connecting to the database file\n conn, c = Sql.Connect(sqlite_file)\n\n # Adding the NP_IDs to a list\n NPDB_ID_List = []\n for ID in c.execute(f'SELECT {id_column} from {table_name}'):\n NPDB_ID_List.append(ID)\n\n # Committing changes and closing the connection to the database file\n Sql.Close(conn)\n return NPDB_ID_List", "def file_ids(self):\n return self._file_ids", "def read_id_list(self, filename):\n with open(filename, encoding=\"utf8\") as infile:\n ids = []\n for line in infile:\n ids.append(line.strip())\n return ids", "def get_id_from_file_list(self, file_list):\n return [get_dir_delimiter_by_os().\n join(f.split(get_dir_delimiter_by_os())[-2:]).split('.')[0] for f in file_list]", "def get_by_ids(cls, id_list):\n id_list = id_list[:20] # shorten the list to 20\n request_items = {\n 'Files': {\n 'Keys': []\n }\n }\n for id_item in id_list:\n request_items['Files']['Keys'].append({\n 'id': {\n 'S': id_item\n }\n })\n response = DDB_CLIENT.batch_get_item(RequestItems=request_items)\n get_data = []\n for file in response['Responses']['Files']:\n get_data.append(cls.deserialize(file))\n return get_data", "def get_ids():", "def loadIDs() -> List[str]:\n # Get information about batch actions\n id_file = requestUntilSuccess(\n 'Path to file to MongoIDs: ',\n 'Invalid file',\n lambda x: x is not None and os.path.isfile(x) and os.access(x, os.R_OK)\n )\n\n ids = []\n with open(id_file, 'r') as f:\n rows = f.readlines()\n for index, r in enumerate(rows):\n index = index + 1\n r = r.strip()\n if ObjectId.is_valid(r):\n ids.append(r)\n else:\n _print('{2} is not a valid ObjectID'.format(\n r), 1, index, len(rows))\n # remove duplicates\n ids = list(set(ids))\n return ids", "def getAllDropboxIDs(): # @NoSelf", "def get_track_IDs():\n ids = []\n for _, dirnames, files in os.walk('../AI/fma_small'):\n if dirnames == []:\n ids.extend(int(file[:-4]) for file in files)\n return ids", "def getAllIDs(directory_path_of_csv_files):\n directory = os.fsencode(directory_path_of_csv_files)\n big_set = set()\n for csv_file in os.listdir(directory):\n csv_filename = os.fsdecode(csv_file)\n file_path = directory_path_of_csv_files + \"/\" + csv_filename\n big_set.update(existingIDs(file_path))\n return big_set", "def get_ids(keyword: str, limit: int,\n dbfile: str) -> Iterable[Dict[str, str]]:\n query = _FETCH_ID_QUERY.replace(\"{{keyword}}\", keyword).replace(\n \"{{limit}}\", str(limit))\n return sqlite.query(dbfile, query)", "def get_image_uuids(ibs, gid_list):\n image_uuid_list = ibs.db.get(IMAGE_TABLE, ('image_uuid',), gid_list)\n return image_uuid_list", "def get_all_ids(coll):\n mpid_list = []\n all_docs = coll.find()\n for d in all_docs:\n mpid_list.append(d[\"material_id\"])\n return mpid_list", "def get_recids_from_recid_paths(paths: List[str], db=None) -> set():\n return get_recids_from_paths(paths, get_recids_from_recid_file)", "def fileids(self, documents: Optional[Any] = ...):\n ...", "def get_image_eids(ibs, gid_list):\n # FIXME: MAKE SQL-METHOD FOR NON-ROWID GETTERS\n colnames = ('encounter_rowid',)\n eids_list = ibs.db.get(EG_RELATION_TABLE, colnames, gid_list,\n id_colname='image_rowid', unpack_scalars=False)\n return eids_list", "def all_associated_filepath_ids(self):\n with TRN:\n sql = \"\"\"SELECT filepath_id\n FROM qiita.filepath\n JOIN qiita.analysis_filepath USING (filepath_id)\n WHERE analysis_id = %s\"\"\"\n TRN.add(sql, [self._id])\n filepaths = set(TRN.execute_fetchflatten())\n\n sql = \"\"\"SELECT filepath_id\n FROM qiita.analysis_job\n JOIN qiita.job USING (job_id)\n JOIN qiita.job_results_filepath USING (job_id)\n JOIN qiita.filepath USING (filepath_id)\n WHERE analysis_id = %s\"\"\"\n TRN.add(sql, [self._id])\n return filepaths.union(TRN.execute_fetchflatten())", "def get_image_aids(ibs, gid_list):\n # print('gid_list = %r' % (gid_list,))\n # FIXME: MAKE SQL-METHOD FOR NON-ROWID GETTERS\n colnames = ('annot_rowid',)\n aids_list = ibs.db.get(ANNOTATION_TABLE, colnames, gid_list,\n id_colname='image_rowid', unpack_scalars=False)\n #print('aids_list = %r' % (aids_list,))\n return aids_list" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takse task_id and returns all tests to this task
def get_task_tests(task_id): test_list = Test.objects.filter(task_id=task_id) return test_list
[ "def test_get_tasks_for_project(self):\n pass", "def test_get_subtasks_for_task(self):\n pass", "def test_get_task_instances(self):\n pass", "def test_get_tasks_for_section(self):\n pass", "def test_get_tasks_for_user_task_list(self):\n pass", "def test_get_tasks_for_tag(self):\n pass", "def test_get_task_status(self):\n pass", "def test_get_time_tracking_entries_for_task(self):\n pass", "def get_results(self):\n for t in self.task:\n print t.get()", "def fixture_retrieval_task_id() -> int:\n return 4321", "def test_get_task_instances_batch(self):\n pass", "def get_task(self, task_name):", "def test_create_subtask_for_task(self):\n pass", "def tasks(**_):\n for task in filter(bool, get_all_tasks()):\n print(task)", "def test_terminal_v1_tasks_list(self):\n pass", "def testTaskOutput(self):\n class DummyTask(Task):\n pass\n Task.register(\"dummy\", DummyTask)\n\n dummyTask = Task.create('dummy')\n crawlers = FsCrawler.createFromPath(BaseTestCase.dataTestsDirectory()).glob(['mov'])\n targetPaths = []\n for crawler in crawlers:\n target = '{}_target.mov'.format(crawler.var('name'))\n targetPath = os.path.join(BaseTestCase.dataTestsDirectory(), target)\n targetPaths.append(targetPath)\n crawler.setVar('contextVarTest', 1, True)\n dummyTask.add(crawler, targetPath)\n result = dummyTask.output()\n self.assertEqual(len(result), len(crawlers))\n self.assertCountEqual(\n map(lambda x: x.var('filePath'), result),\n targetPaths\n )\n self.assertEqual(\n list(map(lambda x: x.var('contextVarTest'), result)),\n [1]*len(crawlers)\n )\n for crawler in result:\n self.assertIn('contextVarTest', crawler.contextVarNames())", "def execute(self, tests_by_task: Dict[str, TaskInfo]) -> None:\n raise NotImplementedError()", "def test_api_v3_stories_story_public_id_tasks_task_public_id_get(self):\n pass", "def test_get_tasks(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/api/v1/dags/{dag_id}/tasks'.format(dag_id='dag_id_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create a subframe with outline, using QGridLayout.
def create_sub_frame(self): frame = QtWidgets.QFrame(parent=self) frame.setLineWidth(1) frame.setMidLineWidth(1) frame.setFrameStyle(QtWidgets.QFrame.Box | QtWidgets.QFrame.Raised) lay = QtWidgets.QGridLayout(frame) lay.setContentsMargins(0, 0, 0, 0) frame.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) return frame, lay
[ "def _configureFrame(self, layout):\n\n layout.addWidget(self._leftFrame, 0, 0, 1, 1)\n layout.addWidget(self._rightFrame, 0, 1, 1, 1)\n layout.setColumnStretch(0, 3)\n layout.setColumnStretch(1, 2)\n self.setLayout(layout)\n self.adjustSize()", "def create_fig(self):\n figure_panel = Tk.Frame(self, width='6i', height='5.5i')\n figure_panel.grid(row=0, column=0, rowspan=3)\n self.fig = Figure()\n self.canvas = FigureCanvasTkAgg(self.fig, master=figure_panel)\n self.canvas.get_tk_widget().pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)\n NavigationToolbar2TkAgg(self.canvas, figure_panel)\n self.canvas.show()", "def _make_grid_in_frame(self, label):\n frame = Gtk.Frame()\n frame.set_label(label)\n self.get_content_area().add(frame)\n\n pairgrid = _PairGrid()\n frame.add(pairgrid)\n return pairgrid", "def create_background(self):\n # title and resize the window\n self.root.title(WINDOW_TITLE)\n self.root.geometry('{0}x{1}'.format(WINDOW_WIDTH, WINDOW_HEIGHT))\n\n # create buttons\n self.create_buttons()\n\n # add the initial clothes onto the screen\n self.tops_frame.pack(fill=tk.BOTH, expand=tk.YES)\n self.bottoms_frame.pack(fill=tk.BOTH, expand=tk.YES)\n self.shoe_frame.pack(fill=tk.BOTH, expand=tk.YES)", "def _doFrame(self, width=600, height=600):\n\n if 'uses_sidepane' in self.capabilities:\n self.table = self.parent.getCurrentTable()\n self.mainwin = Frame(self.table.parentframe)\n self.mainwin.grid(row=6,column=0,columnspan=4,sticky='news')\n else:\n self.mainwin = Toplevel()\n self.mainwin.title('Plugin')\n self.mainwin.geometry('%dx%d+200+200' %(width, height))\n\n self.mainwin.bind(\"<Destroy>\", self.quit)\n self.ID=self.menuentry\n return", "def _create_board(self):\n size = 50\n margin = 1\n\n for row in range(9):\n cell_row = []\n for column in range(9):\n cell = QPushButton(\"\", self)\n\n tmp_x = 0\n tmp_y = 0\n\n if column >= 3:\n tmp_x += 2\n\n if column >= 6:\n tmp_x += 2\n\n if row >= 3:\n tmp_y += 2\n\n if row >= 6:\n tmp_y += 2\n\n cell.setFont(QFont(\"Callibri\", 28))\n cell.resize(size, size)\n cell.move((size + margin) * column + tmp_x, (size + margin) * row + tmp_y)\n cell.setStyleSheet(\"background-color: white; border: 1px solid black;\")\n cell_row.append(cell)\n\n self.cells.append(cell_row)", "def create_grid(self):\n\n self.__buttons_matrix = []\n self.__label_matrix = []\n\n for y in range(self.__y):\n row = Frame(master=self.__grid_frame)\n\n buttons_row = []\n label_row = []\n for x in range(self.__x):\n self.__bg = Frame(master=row, width=5, height=5)\n self.__bg.pack(pady=0, padx=0, side=LEFT)\n\n self.__num = Label(master=self.__bg,\n text=\"\")\n\n if self.__data[y][x] is not 0:\n self.__num.config(text=self.__data[y][x])\n\n self.b = Button(master=self.__num,\n width=2,\n height=1,\n command=lambda row_i=x, column_i=y:\n self.select_button(y=column_i, x=row_i))\n\n self.b.pack(fill=BOTH)\n self.__num.pack()\n\n label_row.append(self.__num)\n buttons_row.append(self.b)\n\n row.pack()\n self.__label_matrix.append(label_row)\n self.__buttons_matrix.append(buttons_row)", "def createLayout(self):\n\t\t# If there are no divies then add an endCol (Creates a vertical layout)\n\t\tif not self.divies:\n\t\t\tself.endCol()\n\n\t\t# Create new widget to hold grid layout\n\t\tself.gridLayout = QtWidgets.QGridLayout()\n\n\t\t# Establish variables for grid layout manager\n\t\twidgetPtr\t= 0\t# Pointer to widget in list you are at\n\t\trow \t\t= 0 # Pointer to the row in the grid you are currently at\n\t\tcol \t\t= 0 # Pointer to the col in the grid you are currently at\n\t\tnextRow\t\t= 0 # Row where a endCol() call would reference you to\n\t\tnextCol \t= 0 # Col where a endRow() call would reference you to\n\t\tnewRow \t\t= 0 # Row where a newCol() call would reference you to\n\t\tnewCol \t\t= 0 # Col where a newRow() call would reference you to\n\t\trowSize\t\t= 0\n\t\tcolSize\t\t= 0\n\n\t\t# DEBUG MESSAGES\n\t\t# print('R:{} C:{}'.format(str(row),str(col)))\n\t\t# print('nextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t# print('newRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\t\t# Begin loop for layout manager\n\t\tfor divie in self.divies:\n\n\t\t\t# assign row/col indices\n\t\t\trow = nextRow\n\t\t\tcol = nextCol\n\n\t\t\t# End Row will start grid layout on next available Row\n\t\t\tif divie.type == 'endRow':\n\n\t\t\t\tif divie.size == 0:\n\t\t\t\t\tpass\n\n\t\t\t\telse:\n\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t# print('\\nendRow has size: ' + str(divie.size))\n\n\t\t\t\t\t# Will loop over all widgets in range from first available widget to dim of divie\n\t\t\t\t\tfor index,widget in enumerate(self.widgets[widgetPtr:widgetPtr+divie.size]):\n\t\t\t\t\t\t# label exists then \n\t\t\t\t\t\tif self.labels[index+widgetPtr]:\n\t\t\t\t\t\t\t# Construct mini Vertical Layout to put label and widget in same grid\n\t\t\t\t\t\t\tminiVLayout = QtWidgets.QVBoxLayout()\n\t\t\t\t\t\t\tminiVLayout.addWidget(self.labels[index+widgetPtr],alignment=QtCore.Qt.AlignCenter)\n\t\t\t\t\t\t\tminiVLayout.addWidget(widget,alignment=QtCore.Qt.AlignCenter)\n\t\t\t\t\t\t\t# Add newly created vertical layout to grid layout\n\t\t\t\t\t\t\tself.gridLayout.addLayout(miniVLayout,row,col,widget.dim[0],widget.dim[1],alignment=QtCore.Qt.AlignCenter)\n\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t# Widgets that don't have any labels\n\t\t\t\t\t\t\tself.gridLayout.addWidget(widget,row,col,widget.dim[0],widget.dim[1],alignment=QtCore.Qt.AlignCenter)\n\n\t\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t\t# print('R:{} C:{}'.format(str(row),str(col)))\n\n\t\t\t\t\t\t# Update row Row and row Col (should just increment col)\n\t\t\t\t\t\tcol += widget.dim[1]\n\t\t\t\t\t\trowSize = widget.dim[0] if widget.dim[0] > rowSize else rowSize\n\n\t\t\t\t\t# Update row Row and row Col (should just increment row and set col to abs col)\n\t\t\t\t\twidgetPtr += divie.size\n\t\t\t\t\tnextRow += rowSize\n\t\t\t\t\trow += rowSize\n\t\t\t\t\tnewCol = col if col > newCol else newCol\n\t\t\t\t\tnewRow = row if row > newRow else newRow\n\n\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t# print('nextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t\t\t\t# print('newRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\n\t\t\t# End Col will start grid layout on next available col\n\t\t\telif divie.type == 'endCol':\n\n\t\t\t\tif divie.size == 0:\n\t\t\t\t\tpass\n\n\t\t\t\telse:\n\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t# print('\\nendCol has size: ' + str(divie.size))\n\n\t\t\t\t\t# Will loop over all widgets in range from first available widget to dim of divie\n\t\t\t\t\tfor index,widget in enumerate(self.widgets[widgetPtr:widgetPtr+divie.size]):\n\t\t\t\t\t\t# label exists then \n\t\t\t\t\t\tif self.labels[index+widgetPtr]:\n\t\t\t\t\t\t\t# Construct mini Vertical Layout to put label and widget in same grid\n\t\t\t\t\t\t\tminiVLayout = QtWidgets.QVBoxLayout()\n\t\t\t\t\t\t\tminiVLayout.addWidget(self.labels[index+widgetPtr],alignment=QtCore.Qt.AlignCenter)\n\t\t\t\t\t\t\tminiVLayout.addWidget(widget,alignment=QtCore.Qt.AlignCenter)\n\t\t\t\t\t\t\t# Add newly created vertical layout to grid layout\n\t\t\t\t\t\t\tself.gridLayout.addLayout(miniVLayout,row,col,widget.dim[0],widget.dim[1],alignment=QtCore.Qt.AlignCenter)\n\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t# Widgets that don't have any labels\n\t\t\t\t\t\t\tself.gridLayout.addWidget(widget,row,col,widget.dim[0],widget.dim[1],alignment=QtCore.Qt.AlignCenter)\n\n\t\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t\t# print('R:{} C:{}'.format(str(row),str(col)))\n\n\t\t\t\t\t\t# Update row Row and row Col (should just increment col)\n\t\t\t\t\t\trow += widget.dim[0]\n\t\t\t\t\t\tcolSize = widget.dim[1] if widget.dim[1] > colSize else colSize\n\n\t\t\t\t\t# Update row Row and row Col (should just increment row and set col to abs col)\n\t\t\t\t\twidgetPtr += divie.size\n\t\t\t\t\tnextCol += colSize\n\t\t\t\t\tcol += colSize\n\t\t\t\t\tnewRow = row if row > newRow else newRow\n\t\t\t\t\tnewCol = col if col > newCol else newCol\n\n\t\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t\t# print('nextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t\t\t\t# print('newRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\n\t\t\t# Adjust grid layout to start at new row\n\t\t\telif divie.type == 'newRow':\n\t\t\t\tnextCol = 0\n\t\t\t\tnextRow = newRow\n\n\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t# print('\\nR:{} C:{}'.format(str(row),str(col)))\n\t\t\t\t# print('\\tnextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t\t\t# print('\\tnewRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\n\t\t\t# Adjust grid layout to start at new col\n\t\t\telif divie.type == 'newCol':\n\t\t\t\tnextRow = 0\n\t\t\t\tnextCol = newCol\n\n\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t# print('\\nR:{} C:{}'.format(str(row),str(col)))\n\t\t\t\t# print('\\tnextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t\t\t# print('\\tnewRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\n\t\t\t# Puts current grid layout onto tab and starts on new tab\n\t\t\telif divie.type == 'makeTab':\n\t\t\t\tif not self.mini:\n\t\t\t\t\t# Create a new widget and current layout\n\t\t\t\t\ttab = QtWidgets.QWidget()\n\t\t\t\t\ttab.setLayout(self.gridLayout)\n\t\t\t\t\t# Add widget as a tab\n\t\t\t\t\tself.tabs.addTab(tab,divie.name)\n\n\t\t\t\t\t# Grid Layout must be reset\n\t\t\t\t\tself.gridLayout = QtWidgets.QGridLayout()\n\t\t\t\t\trow=col=nextRow=nextCol=newRow=newCol=rowSize=colSize = 0\n\t\t\t\t\n\t\t\t\telse:\n\t\t\t\t\t# Create a new mini widget and current layout\n\t\t\t\t\tminiTab = QtWidgets.QWidget()\n\t\t\t\t\tminiTab.setLayout(self.gridLayout)\n\t\t\t\t\t# Add widget as a tab\n\t\t\t\t\tself.miniTabs.addTab(miniTab,divie.name)\n\n\t\t\t\t\t# Grid Layout must be reset\n\t\t\t\t\tself.gridLayout = QtWidgets.QGridLayout()\n\t\t\t\t\trow=col=nextRow=nextCol=newRow=newCol=rowSize=colSize = 0\n\n\n\t\t\t# Creates a new group box. The widgets added after this call will be put into box\n\t\t\telif divie.type == 'startGroup':\n\t\t\t\t# Set group name\n\t\t\t\tself.groupBox.setTitle(divie.name)\n\t\t\t\t# Store all index variables for later use\n\t\t\t\tself.masterGridLayout\t= self.gridLayout\n\t\t\t\tself.masterNextRow \t\t= nextRow\n\t\t\t\tself.masterNextCol \t\t= nextCol\n\t\t\t\tself.masterNewRow \t\t= newRow\n\t\t\t\tself.masterNewCol \t\t= newCol\n\t\t\t\tself.mini \t\t\t\t= True\n\n\t\t\t\t# Reset grid layout for mini grid layout\n\t\t\t\trow=col=nextRow=nextCol=newRow=newCol=rowSize=colSize = 0\n\t\t\t\tself.gridLayout = QtWidgets.QGridLayout() \n\t\t\t\tself.groupBox.setObjectName('groupBox')\n\t\t\t\tself.groupBox.setStyleSheet(\n \t\t\t\t'QGroupBox#groupBox {border: 2px solid gray;' + \n \t\t\t\t'border-radius: 3px; padding: 10px;}' \n\t\t\t\t)\n\n\n\t\t\t# Ends the current group box.\n\t\t\telif divie.type == 'endGroup':\n\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t# print('\\tnextRow: {} nextCol: {}'.format(str(self.masterNextRow),str(self.masterNextCol)))\n\t\t\t\t# print('\\tnewRow: {} newCol: {}'.format(str(self.masterNewRow),str(self.masterNewCol)))\n\n\t\t\t\t# Sets mini grid layout to the groupBox\n\t\t\t\tif self.miniTabs.count():\n\t\t\t\t\tself.gridLayout.addWidget(self.miniTabs)\n\t\t\t\t\tself.miniTabs\t= QtWidgets.QTabWidget()\n\n\t\t\t\tself.groupBox.setLayout(self.gridLayout)\n\n\t\t\t\t# Now place groupbox in the grid\n\t\t\t\tif not divie.size:\n\t\t\t\t\tself.masterGridLayout.addWidget(self.groupBox,self.masterNextRow,self.masterNextCol,self.gridLayout.rowCount(),self.gridLayout.columnCount())\n\t\t\t\t\tnextCol = self.masterNextCol + self.gridLayout.columnCount()\n\t\t\t\t\tnewRow = self.gridLayout.rowCount() + self.masterNextRow if self.gridLayout.rowCount() + self.masterNextRow > self.masterNewRow else self.masterNewRow\n\n\t\t\t\telse:\n\t\t\t\t\tself.masterGridLayout.addWidget(self.groupBox,self.masterNextRow,self.masterNextCol,divie.size[0],divie.size[1])\n\t\t\t\t\tnextCol = self.masterNextCol + divie.size[1]\n\t\t\t\t\tnewRow = divie.size[0] + self.masterNextRow if divie.size[0] + self.masterNextRow > self.masterNewRow else self.masterNewRow\n\n\t\t\t\t# Update necessary index values\n\t\t\t\tnewCol = nextCol if nextCol > self.masterNewCol else self.masterNewCol\n\n\t\t\t\tif not nextCol >= newCol:\n\t\t\t\t\tnextRow = self.masterNextRow\n\n\t\t\t\t# Resets grid layout to what it was before group box\n\t\t\t\tself.gridLayout = self.masterGridLayout\n\t\t\t\tself.groupBox \t= QtWidgets.QGroupBox()\n\t\t\t\tself.mini \t\t= False\n\n\t\t\t\t# DEBUG MESSAGES\n\t\t\t\t# print('\\tnextRow: {} nextCol: {}'.format(str(nextRow),str(nextCol)))\n\t\t\t\t# print('\\tnewRow: {} newCol: {}'.format(str(newRow),str(newCol)))\n\n\n\t\t\t# Corner Case Fix\n\t\t\tif nextCol >= newCol:\n\t\t\t\tnextRow = 0\n\t\t\t\tnewCol = nextCol\n\n\t\t\tif nextRow >= newRow:\n\t\t\t\tnextCol = 0\n\t\t\t\tnewRow = nextRow\n\n\n\t\t# Set the layout that you just created\n\t\tif self.tabs.count():\n\t\t\tself.setCentralWidget(self.tabs)\n\t\telse:\n\t\t\tself.centralWidget.setLayout(self.gridLayout)", "def __make_helper_frame__( self, parent, ):\r\n a_frame = Frame( parent, width=600, height=20, bg =\"gray\", relief=RAISED, borderwidth=1 )\r\n\r\n lrow = 0\r\n lcol = 0\r\n a_label = Label( a_frame, text = \"auto\", relief = RAISED, width = 100, ) # wraplength = 90 )\r\n a_label.grid( row = lrow, column = lcol, columnspan=10, sticky=E + W + N + S ) # sticky=W+E+N+S ) # relief = RAISED)\r\n\r\n self.show_dict[ \"helper_info\" ] = a_label # if not there then houston we have a prblem\r\n #self.helper_label = a_label # only helper writes to it\r\n\r\n return a_frame", "def construct_notebook(self):\n panel = Panel(self)\n self.notebook = Notebook(panel, style=NB_LEFT)\n self.construct_tabs()\n sizer = BoxSizer(HORIZONTAL)\n sizer.Add(self.notebook, 1, EXPAND)\n panel.SetSizer(sizer)", "def subfigure(self, pos, size, **extra):\n gs = gridspec.GridSpec(1, 1)\n gs.update(left=self.w(pos[0]), right=self.w(pos[0] + size[0]),\n\t top=self.h(pos[1] + (-1)**(self.yinv) * size[1]), bottom=self.h(pos[1]))\n ax = self.fig.add_subplot(gs[0], **extra)\n self.current_figure = ax\n self.subfigures[ax] = { 'pos' : pos, 'width' : float(size[0]), 'height' : float(size[1]) } \n self.current_figure_pos = pos\n self.current_figure_width = float(size[0])\n self.current_figure_height = float(size[1])\n return ax", "def _set_frame(self):\n rectangle(self._canvas, self._plot_x0, self._plot_y0, self._plot_width, self._plot_height, outline=OUTLINE_COLOR)", "def init_layout(self):\n super(WxSplitter, self).init_layout()\n widget = self.widget()\n for child in self.children():\n if isinstance(child, WxSplitItem):\n widget.AppendWindow(child.widget())\n widget.SizeWindows()", "def setup_ui(self, must_scroll=True):\n # Vertical layout to place the parameter widgets\n self.vertical_layout.setContentsMargins(0, 0, 0, 0)\n self.vertical_layout.setSpacing(0)\n\n # Widget to hold the vertical layout\n self.widget = QWidget()\n self.widget.setLayout(self.vertical_layout)\n\n # Label for description\n self.description_label.setText(self.description_text)\n\n self.group_frame.setLineWidth(0)\n self.group_frame.setFrameStyle(QFrame.NoFrame)\n vlayout = QVBoxLayout()\n vlayout.setContentsMargins(0, 0, 0, 0)\n vlayout.setSpacing(0)\n self.group_frame.setLayout(vlayout)\n\n if must_scroll:\n vlayout.addWidget(self.scroll_area)\n self.scroll_area.setWidgetResizable(True)\n self.scroll_area.setWidget(self.widget)\n else:\n vlayout.addWidget(self.widget)\n\n # Main layout of the container\n if self.description_text:\n self.main_layout.addWidget(self.description_label)\n self.main_layout.setContentsMargins(0, 0, 0, 0)\n self.setLayout(self.main_layout)\n\n self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.MinimumExpanding)\n\n if not isinstance(self.parameters, list):\n parameters = [self.parameters]\n else:\n parameters = self.parameters\n\n if len(parameters) == 0:\n self.set_empty_parameters()\n return\n\n self.main_layout.addWidget(self.group_frame)\n\n self.qt5_parameter_factory = Qt5ParameterFactory()\n if self.extra_parameters is not None:\n for extra_parameter in self.extra_parameters:\n if (type(extra_parameter) == tuple and\n len(extra_parameter) == 2):\n self.qt5_parameter_factory.register_widget(\n extra_parameter[0], extra_parameter[1])\n\n for parameter in parameters:\n parameter_widget = self.qt5_parameter_factory.get_widget(parameter)\n parameter_widget.setAutoFillBackground(True)\n self.vertical_layout.addWidget(parameter_widget)\n\n self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)", "def configure_parent(self):\n self.geometry('400x400')\n self.config(bg='SlateGray3')\n self.resizable(0, 0)\n self.title('labesoft address book')", "def title_frame(self):\n title_frame = tk.Frame(self.frame, bg = \"light blue\")\n title_frame.pack(anchor=\"n\", fill=\"x\", expand=False)\n text = tk.Text(title_frame, height = 12, bg = \"light blue\", font=(\"Helvetica\", 13), bd = 0, relief = tk.FLAT, wrap = tk.WORD)\n text.grid(column = 0, row = 0)\n intro = \"Here you will see a collection of information returned for each state you wished you consider.\"\n note1 = \"The score represents the percentage at which your given state's educational wellbeing is at relative to the highest in America.\"\n note2 = \"You will also see which policies were most and least effective in your state's overall educational outcomes.\"\n note3 = \"Below will be a series of scatterplots with regression lines, visualizing how that state's best policy correlates with each outcome variable available.\"\n text.insert(tk.END, intro + \"\\n\"*2 + note1 + \"\\n\"*2 + note2 + \"\\n\"*2 + note3)\n text.configure(state='disabled')\n # Quit button widget\n quitButton = tk.Button(title_frame, text = 'Quit', width = 25, command = self.close_windows)\n quitButton.grid(column = 1, row = 0, sticky=\"e\")\n quit_label = ttk.Label(title_frame, text = \"Note: quitting may take awhile if you have many plots\").grid(column = 1, \n row = 1, padx = 35, pady = 25, sticky=\"ne\")", "def create_widget(self):\n self.widget = wxGroupBox(self.parent_widget())", "def add_frame(self, background, background2, foreground, width, height, relief, bd): \n \n #========================Sqlite3 Database Connection=========================\n try: \n conn = sqlite3.connect(r\"data.db\")\n cursor = conn.cursor()\n except FileNotFoundError:\n tkinter.messagebox.showerror(\"Database Error\",\"Try Again !\") \n #=============================Semester=================================\n i = len(self.container_list)\n #========widget names============\n course_frames = {}\n course_codes = {}\n credits = {} \n scores = {}\n grades = {} \n remove_course_buttons = {}\n \n self.courses[\"course_list\"+str(i)] = [] #A list that is going to be saved in the courses dictionary\n self.course_codes[\"course_code_list\"+str(i)] = [] #A list to append each course_code widget created \n self.credit_units[\"credit_unit_list\"+str(i)] = [] #A list to append each credit widget created \n self.grades[\"grade_list\"+str(i)] = [] #A list to append each grade widget created \n #=========================================================================================================================================================================\n self.count_label = Label(self.master,text=\"Semester count: \"+str(len(self.semester_names)+1),font=('normal',10,'bold'),fg=foreground,background=background)\n self.count_label.place(x=670,y=0)\n #=========================================================================================================================================================================\n self.containers[\"container\"+str(i)] = Frame(self.innerFrame,background=background,width=575,height=350,relief='flat',bd=0)\n self.containers[\"container\"+str(i)].grid(row=0+i, column=0, padx=120, pady=30)\n \n self.id_frames[\"id_frame\"+str(i)] = Frame(self.containers[\"container\"+str(i)],background=background,width=555,height=30,relief=relief,bd=3)\n self.id_frames[\"id_frame\"+str(i)].place(x=0,y=0)\n \n self.semester_frames[\"semester\"+str(i)] = scrollingFrame(self.containers[\"container\"+str(i)],background=background2,width=width,height=height,relief=relief,bd=bd)\n self.semester_frames[\"semester\"+str(i)].place(x=0,y=30)\n \n self.result_frames[\"result_frame\"+str(i)] = Frame(self.containers[\"container\"+str(i)],background=background,width=555,height=50,relief='flat',bd=0)\n self.result_frames[\"result_frame\"+str(i)].place(x=0,y=295)\n \n #==================ID FRAME======================\n if len(self.semester_names) == 0: #if there is 0 semester on the frame\n semester = 'Semester 1'\n if len(self.semester_names) == 1: #if there is 1 semester on the frame\n semester = 'Semester 2'\n if len(self.semester_names) == 2: #if there is 2 semester on the frame\n semester = 'Semester 3'\n if len(self.semester_names) == 3: #if there is 3 semester on the frame\n semester = 'Semester 4'\n if len(self.semester_names) == 4: #if there is 4 semester on the frame\n semester = 'Semester 5'\n if len(self.semester_names) == 5: #if there is 5 semester on the frame\n semester = 'Semester 6'\n if len(self.semester_names) == 6: #if there is 6 semester on the frame\n semester = 'Semester 7'\n if len(self.semester_names) == 7: #if there is 7 semester on the frame\n semester = 'Semester 8'\n if len(self.semester_names) == 8: #if there is 8 semester on the frame\n semester = 'Semester 9'\n if len(self.semester_names) == 9: #if there is 9 semester on the frame\n semester = 'Semester 10'\n if len(self.semester_names) == 10: #if there is 10 semester on the frame\n semester = 'Semester 11'\n if len(self.semester_names) == 11: #if there is 11 semester on the frame\n semester = 'Semester 12'\n if len(self.semester_names) == 12: #if there is 12 semester on the frame\n semester = 'Semester 13'\n if len(self.semester_names) == 13: #if there is 13 semester on the frame\n semester = 'Semester 14'\n if len(self.semester_names) == 14: #if there is 14 semester on the frame\n semester = 'Semester 15'\n if len(self.semester_names) == 15: #if there is 15 semester on the frame #\"Semester \"+str(i+1)\n semester = 'Semester 16' \n \n \n self.id_semesters[\"id_semester\"+str(i)] = Label(self.id_frames[\"id_frame\"+str(i)],text=semester,font=('normal',11,'bold'),fg=foreground,background=background)\n self.id_semesters[\"id_semester\"+str(i)].place(x=5,y=0)\n \n def add_course():\n \"\"\"\n 1. create Labels to help identify entries to be filled inside each semester frame\n 2. create a variable 'n' to hold the number of courses being added to each semester frame each time this function is being called\n 3. create a dictionary 'course_frames' to hold all label_frame objects\n 4. create a dictionary 'course_codes' to hold all course_code entry objects which are placed in the label_frame\n 5. create a dictionary 'credits' to hold all credit spinbox objects which are placed in the label_frame\n 6. create a dictionary 'scores' to hold all scores entry objects which are placed in the label_frame\n 7. create a dictionary 'grades' to hold all grades combobox objects which are placed in the label_frame\"\"\"\n course_code_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='Course code',font=('normal',10),fg=foreground,bg=background2).place(x=60,y=0)\n course_code_opt_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='(optional)',font=('normal',8,'italic'),fg=foreground,bg=background2).place(x=135,y=1) \n credit_unit_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='Credit unit',font=('normal',10),fg=foreground,bg=background2).place(x=230,y=0)\n credit_unit_imp_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='*',font=('normal',10,'bold'),fg='red',bg=background2).place(x=293,y=2)\n score_grade_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='Score/100 or Grade',font=('normal',10),fg=foreground,bg=background2).place(x=355,y=0)\n score_grade_imp_lbl = Label(self.semester_frames[\"semester\"+str(i)].frame,text='*',font=('normal',10,'bold'),fg='red',bg=background2).place(x=488,y=2)\n #============================Courses============================= \n n = len(self.courses[\"course_list\"+str(i)])\n \n if n == 0: # meaning the first course is being placed\n y_index = 18\n else: \n y_index = 0\n \n course_frames[\"course_frame\"+str(n)] = Frame(self.semester_frames[\"semester\"+str(i)].frame,width=530,height=45,bg=background2,relief='sunken',bd=1)\n course_frames[\"course_frame\"+str(n)].grid(row=0+n, column=0, padx=10, pady=y_index)\n \n course_codes[\"course_code\"+str(n)] = Entry(course_frames[\"course_frame\"+str(n)],font=('normal',10),width=15,relief='groove',bd=2)\n course_codes[\"course_code\"+str(n)].place(x=60,y=10)\n \n credits[\"credit\"+str(n)] = Spinbox(course_frames[\"course_frame\"+str(n)],font=('normal',9),width=6,from_= 0, to= 15,bd=1,justify='right')\n credits[\"credit\"+str(n)].place(x=225,y=10)\n \n #============================Database============================\n cursor.execute(\"SELECT * FROM grades\")\n all_grades = cursor.fetchall()\n for grade in all_grades:\n A = grade[0]\n B = grade[1]\n C = grade[2]\n D = grade[3]\n E = grade[4]\n F = grade[5]\n \n def change(*args,**kwargs):\n _score = score.get()\n if _score == \"\":\n grades[\"grade\"+str(n)].delete(0,END)\n \n elif int(_score) >= int(A):\n grades[\"grade\"+str(n)].insert(0,\"A\")\n \n elif int(_score) >=int(B) and int(_score)<=int(A)-1:\n grades[\"grade\"+str(n)].insert(0,\"B\")\n \n elif int(_score) >=int(C) and int(_score)<=int(B)-1:\n grades[\"grade\"+str(n)].insert(0,\"C\") \n \n elif int(_score) >=int(D) and int(_score)<=int(C)-1:\n grades[\"grade\"+str(n)].insert(0,\"D\") \n \n elif int(_score) >=int(E) and int(_score)<=int(D)-1:\n grades[\"grade\"+str(n)].insert(0,\"E\")\n \n elif int(_score) >= 10 and int(_score)<=int(E)-1:\n grades[\"grade\"+str(n)].insert(0,\"F\") \n \n \n score = StringVar()\n score.trace(\"w\", lambda l, idx, mode: change()) \n \n scores[\"score\"+str(n)] = Entry(course_frames[\"course_frame\"+str(n)],textvariable=score,font=('normal',9),width=7,relief='groove',bd=2,justify='right')\n scores[\"score\"+str(n)].place(x=350,y=10) \n\n grades[\"grade\"+str(n)] = ttk.Combobox(course_frames[\"course_frame\"+str(n)],font=('normal',9),width=4,justify='right')\n grades[\"grade\"+str(n)]['values'] = ['A','B','C','D','E','F']\n grades[\"grade\"+str(n)].place(x=435,y=10)\n \n def remove_course():\n \"\"\" Remove the last course_frame and it's children irrespective of the remove button pressed\n 1. Create a variable 'temp_frame' to store the last course_frame in the self.courses list\n 2. Remove that frame from the semester frame by 'grid_forget()'\n 3. Remove the frame from the list too\n 4. Configure the course count to be equated to the current length of courses list\n 5. Configure the semester frame to adjust after the course frame has been removed\"\"\"\n temp_frame = self.courses[\"course_list\"+str(i)][-1] \n temp_frame.grid_forget()\n \n self.courses[\"course_list\"+str(i)].remove(self.courses[\"course_list\"+str(i)][-1]) # Remove the last item from this list\n self.course_codes[\"course_code_list\"+str(i)].remove(self.course_codes[\"course_code_list\"+str(i)][-1]) # Remove the last item from this list\n self.credit_units[\"credit_unit_list\"+str(i)].remove(self.credit_units[\"credit_unit_list\"+str(i)][-1]) # Remove the last item from this list\n self.grades[\"grade_list\"+str(i)].remove(self.grades[\"grade_list\"+str(i)][-1]) # Remove the last item from this list\n \n course_codes.popitem()\n credits.popitem()\n scores.popitem()\n grades.popitem()\n \n self.course_counts[\"course_count\"+str(i)].configure(text = str(len(self.courses[\"course_list\"+str(i)])))\n \n self.semester_frames[\"semester\"+str(i)].frame.update_idletasks() \n self.semester_frames[\"semester\"+str(i)].onCanvasConfigure(None) \n \n if n != 0:\n remove_course_buttons[\"remove_course\"+str(n)] = Button(course_frames[\"course_frame\"+str(n)],text='X',font=('arial',9,'bold'),fg='gray60',background=background2,activebackground=background2,bd=0,command=remove_course)\n remove_course_buttons[\"remove_course\"+str(n)].place(x=510,y=10) \n else:\n pass\n \n self.courses[\"course_list\"+str(i)].append(course_frames[\"course_frame\"+str(n)]) # Append the course_frame widget created when this function is called to the course_list\n self.course_codes[\"course_code_list\"+str(i)].append(course_codes[\"course_code\"+str(n)]) # Append the course_code widget created when this function is called to the course_codes_list\n self.credit_units[\"credit_unit_list\"+str(i)].append(credits[\"credit\"+str(n)]) # Append the credit widget created when this function is called to the credits_list\n self.grades[\"grade_list\"+str(i)].append(grades[\"grade\"+str(n)]) # Append the grade widget created when this function is called to the grades_list\n \n self.semester_frames[\"semester\"+str(i)].frame.update_idletasks()\n self.semester_frames[\"semester\"+str(i)].onCanvasConfigure(None) \n\n #=================Disable Remove frame button once this function is called=================\n def do_nothing(event):\n pass\n self.remove_frame_buttons[\"remove_frame\"+str(i)].place_forget()\n self.remove_frame_buttons[\"remove_frame\"+str(i)].bind('<Button-1>',do_nothing)\n \n #=====================Course Count=======================\n self.course_counts[\"course_count\"+str(i)] = Label(self.id_frames[\"id_frame\"+str(i)],text=str(len(self.courses[\"course_list\"+str(i)])),font=('normal',10,'bold'),fg=foreground,bg=background)\n self.course_counts[\"course_count\"+str(i)].place(x=530,y=0)\n \n #===================Calculate GPA========================\"Semester\"+str(i+1)\n def gpa(event): \n \n course_list = [] # A list to hold actual course code values entered by the user; will range in one\n credit_list = [] # A list to hold actual credit unit values entered by the user; will range in two \n grade_list = [] # A list to hold actual grade values entered by the user; will range in three \n \n #--------------------------------Handle Course Code---------------------------------------\n course_entries = list(course_codes.values()) # Create a list to append entry variales of course codes for a particular semester\n for one in range(0,len(course_entries)):\n course_list.append(course_entries[one].get())\n \n #--------------------------------Handle Credit Unit---------------------------------------\n credit_entries = list(credits.values()) # Create a list to append entry variales of credit units for a particular semester\n for two in range(0,len(credit_entries)):\n credit_list.append(credit_entries[two].get())\n \n try: #convert all items in this list to integer\n credit_list = [int(a) for a in credit_list]\n except ValueError: \n tkinter.messagebox.showerror(\"Entry error\",\"Please confirm your credit units are numbers\") \n \n #------------------------------------Handle Grades-----------------------------------------\n grade_entries = list(grades.values()) # Create a list to append entry variales of grades for a particular semester\n for three in range(0,len(grade_entries)):\n grade_list.append(grade_entries[three].get()) \n \n grade_list2 = [] # A list to hold integer representations of grades\n for b in range(0,len(grade_list)):\n if grade_list[b] == \"A\":\n grade_list2.append(5)\n elif grade_list[b] == \"B\":\n grade_list2.append(4)\n elif grade_list[b] == \"C\":\n grade_list2.append(3)\n elif grade_list[b] == \"D\":\n grade_list2.append(2) \n elif grade_list[b] == \"E\":\n grade_list2.append(1)\n elif grade_list[b] == \"F\":\n grade_list2.append(0) \n else:\n tkinter.messagebox.showerror(\"Entry error\",\"Missing your grades\\nPlease confirm\")\n #--------------------------------------5.0 GPA calculation----------------------------------\n quality_point = [] \n if len(grade_list2) == len(credit_list):\n for c in range(0,len(grade_list2)):\n quality_point.append(grade_list2[c] * credit_list[c])\n else:\n tkinter.messagebox.showerror(\"Entry error\",\"Confirm credit unit and\\ngrade entries are entered correctly\")\n \n sum1 = sum(credit_list)\n sum2 = sum(quality_point)\n \n my_gpa = sum2/sum1\n \n if float(my_gpa) > 2.50:\n color = 'green' \n elif float(my_gpa) < 2.50:\n color = 'red'\n self.result[\"gpa\"+str(i)] = Label(self.result_frames[\"result_frame\"+str(i)],text=str(round(my_gpa,2)),font=('normal',13,'bold'),justify='left',fg=color,bg=background,relief='groove',bd=2)\n self.result[\"gpa\"+str(i)].place(x=515,y=0) \n \n self.gpa_buttons[\"gpa\"+str(i)] = Button(self.result_frames[\"result_frame\"+str(i)],text=semester+\" GPA\",font=('arial',10,'bold'),fg=background,bg=foreground,activebackground=foreground,activeforeground=background,relief='ridge',bd=2)\n self.gpa_buttons[\"gpa\"+str(i)].bind('<Button-1>', gpa)\n self.gpa_buttons[\"gpa\"+str(i)].place(x=245,y=0) \n \n self.add_course_buttons[\"add_course\"+str(i)] = Button(self.id_frames[\"id_frame\"+str(i)],text='+ Add Course',font=('normal',9,'bold'),fg=background,bg=foreground,activebackground=background,bd=2,relief='ridge',command=add_course)\n self.add_course_buttons[\"add_course\"+str(i)].place(x=250,y=0)\n \n def remove_semester(event):\n \"\"\"1. Pass event as an argument into this function inorder to track what button calls this function\n 2. Each time this function is called delete the frame that contains the semester selected from the 'self.containers' dictionary\n 3. Remove the last item of the list 'self.semester_names' each time this function is called\n 4. Create a for loop to iterate through the 'self.id_semester' list getting each label value\n 4.1. Insisde this for loop use the 'event.widget' statement to get the the button value that is pressed, then slice it to get the index \n 4.2. Also slice each value of the 'self.id_semester' list and check if any matches the index result matches 4.1. if it does remove that value from the list\n 5. Each time you call this function i.e each time a semester frame is deleted re-adjust the frames below it to fit to size\n 6. Each time you call this function re-adjust the scrollable frame\n 7. After the semester frame has been deleted, create a for loop to range through the length of semester frames left \n 7.1. Inside this for loop change the text of the labels on all semester frames\n 8. Put all the above code in a try block, except there is an IndexError continue\"\"\" \n \n try:\n self.containers[\"container\"+str(i)].grid_forget()\n \n self.semester_names.remove(self.semester_names[-1]) \n for id_value in self.id_semester:\n if str(event.widget)[-17] == str(id_value)[-15]:\n self.id_semester.remove(id_value)\n \n self.innerFrame.update_idletasks()\n self.scrollFrame.onCanvasConfigure(None) \n \n for c in range(0, len(self.id_semester)):\n self.id_semester[c]['text'] = self.semester_names[c]\n \n self.count_label.configure(text=\"Semester count: \"+str(len(self.semester_names)))\n except IndexError:\n tkinter.messagebox.showerror(\"Index Error\",\"Oops, something went wrong!!!\\nGo back to home page and try again\")\n #------------Go back-------------\n for w in self.master.winfo_children():\n w.destroy()\n Gp_calculator(self.master) \n \n \n self.remove_frame_buttons[\"remove_frame\"+str(i)] = Button(self.id_frames[\"id_frame\"+str(i)],text='X',font=('arial',10,'bold'),fg='gray60',bg=background,activebackground=background,bd=0)\n self.remove_frame_buttons[\"remove_frame\"+str(i)].bind('<Button-1>',remove_semester)\n self.remove_frame_buttons[\"remove_frame\"+str(i)].place(x=530,y=0) \n\n \n self.container_list = list(self.containers.keys())\n \n self.semester_names.append(self.id_semesters[\"id_semester\"+str(i)]['text'])\n self.id_semester = list(self.id_semesters.values())\n self.gpa_button = list(self.gpa_buttons.values())\n \n self.innerFrame.update_idletasks()\n self.scrollFrame.onCanvasConfigure(None)", "def create_widgets(self):\n frame = Frame(self, relief=SUNKEN, height=700)\n frame_buttons = Frame(self)\n self.button_ok = Button(frame_buttons, text='Close', command=self.\n ok, takefocus=False)\n self.scrollbar = Scrollbar(frame, orient=VERTICAL, takefocus=False)\n self.text = Text(frame, wrap=WORD, highlightthickness=0, fg=self.fg,\n bg=self.bg)\n self.scrollbar.config(command=self.text.yview)\n self.text.config(yscrollcommand=self.scrollbar.set)\n self.button_ok.pack()\n self.scrollbar.pack(side=RIGHT, fill=Y)\n self.text.pack(side=LEFT, expand=True, fill=BOTH)\n frame_buttons.pack(side=BOTTOM, fill=X)\n frame.pack(side=TOP, expand=True, fill=BOTH)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the training dataset, test dataset and number of labels.
def train_test() -> Tuple[TextClassificationDataset, TextClassificationDataset, int]: train_examples, test_examples = datasets.IMDB.splits( text_field=data.Field(lower=False, sequential=False), label_field=data.Field(sequential=False, is_target=True) ) def dataset(examples: data.dataset.Dataset) -> TextClassificationDataset: return TextClassificationDataset( texts=[example.text for example in examples], labels=[float(example.label == 'pos') for example in examples] ) return dataset(train_examples), dataset(test_examples), 2
[ "def read_dataset():\n\ttrain_data = np.genfromtxt('train_data.txt', dtype=int, delimiter=',')\n\ttrain_labels = np.genfromtxt('train_labels.txt', dtype=int, delimiter=',')\n\ttest_data = np.genfromtxt('test_data.txt', dtype=int, delimiter=',')\n\ttest_labels = np.genfromtxt('test_labels.txt', dtype=int, delimiter=',')\n\treturn train_data, train_labels, test_data, test_labels", "def get_data():\n data, targets = make_classification(\n n_samples=1000,\n n_features=45,\n n_informative=12,\n n_redundant=7,\n random_state=134985745,\n )\n return data, targets", "def output_training_and_test_data(self):\n dataframes = []\n for name in [\"training\", \"testing\"]:\n labels = []\n paths = []\n splits = []\n for i in range(NUM_LABELS):\n label_dir = f\"{name}/{i}\"\n img_dir = os.path.join(self.processed_dataset_dir, label_dir)\n for file in os.listdir(img_dir):\n if file.endswith(\".png\"):\n labels.append(str(i))\n paths.append(os.path.join(img_dir, file))\n splits.append(0 if name == \"training\" else 2)\n dataframes.append(pd.DataFrame({\"image_path\": paths, \"label\": labels, \"split\": splits}))\n return pd.concat(dataframes, ignore_index=True)", "def get_label_stats():\n myData = Dataset(params=params, run_dir=run_dir)\n train_dl, _ = myData.dataloader()\n typer.echo(get_labels_counts(train_dl, myData.num_classes))", "def getLabels():\n import pandas as pd\n\n # Open train-labels.csv\n labelTrain = pd.read_csv(\n \"/Users/kaisoon/Google Drive/Code/Python/COMP90049_KT/SentiAnalysis/data/train-labels.csv\"\n )\n labelTrain = labelTrain['label']\n\n # Open eval-labels.csv\n labelTest = pd.read_csv(\n \"/Users/kaisoon/Google Drive/Code/Python/COMP90049_KT/SentiAnalysis/data/eval-labels.csv\"\n )\n labelTest = labelTest['label']\n\n return (labelTrain, labelTest)", "def get_training_data():\n mndata = MNIST('examples')\n\n X_train = []\n Y_train = []\n\n images, labels = mndata.load_training()\n\n for image in images:\n X_train.append(np.array(image, float))\n\n for label in labels:\n y = np.zeros((10, ), float)\n y[label] = 1.0\n Y_train.append(y)\n\n return X_train, Y_train", "def get_test_data():\n mndata = MNIST('examples')\n\n X = []\n Y = []\n\n images, labels = mndata.load_testing()\n\n for image in images:\n X.append(np.array(image, float))\n\n for label in labels:\n y = np.zeros((10,), float)\n y[label] = 1.0\n Y.append(y)\n\n return X, Y", "def get_train_data_and_label(self) -> typing.Tuple[pd.DataFrame, pd.Series]:\n return self._extract_data_and_label(self.train_filters)", "def get_test_data(self):\r\n\r\n if self._test_data is None:\r\n idx = int(math.ceil(\r\n self._training_split * len(self._labeled_data)))\r\n self._test_data = self._labeled_data[idx:]\r\n\r\n return self._test_data", "def get_labels(self) -> np.ndarray:\n return self.test_labels", "def getTestData():\n\ttestFileList = listdir('../../datasets/testDigits')\n\tn = len(testFileList)\n\tX_te = zeros((n, 1024));\n\tT_te = zeros((n,10));\n\n\t# split the filename to obtain information\n\tfor i in range(n):\n\t\tfileNameStr = testFileList[i]\n\t\tfileStr = fileNameStr.split('.')[0]\n\t\tclassNumStr = int(fileStr.split('_')[0])\n\t\tT_te[i, classNumStr] = 1\n\t\tX_te[i, :] = img2vect('../../datasets/trainingDigits/%s' % fileNameStr)\n\treturn X_te, T_te", "def load_bc_dataset():\n # import ipdb\n # ipdb.set_trace()\n f_train = \"data/Training_Data.txt\"\n f_test = \"data/Testing_Data.txt\"\n h_train, data_train = load_dataset(f_train)\n h_test, data_test = load_dataset(f_test)\n assert h_train == h_test, \"training data file header: {}\\\n is not equal to testing file header: {}\".format(h_train, h_test)\n n_col = len(h_train)\n assert data_train.shape[1] == n_col & data_test.shape[1] == n_col,\\\n \"training data feature num: {} should equal testing data feature num:\\\n {}\".format(data_train.shape[1], data_test.shape[1])\n # index_train = data_train[:, 0]\n # index_test = data_test[:, 0]\n X_train = data_train[:, 1:-1]\n X_test = data_test[:, 1:-1]\n y_train = data_train[:, -1]\n y_test = data_test[:, -1]\n\n # index = np.concatenate((index_train, index_test))\n X = np.vstack((X_train, X_test))\n y = np.concatenate((y_train, y_test)).astype(np.int)\n assert y.sum() == 115\n return X, y", "def test():\t\n\tcifar10_root = osp.join(\"dataset\", \"cifar-10-batches-py\")\n\n\ttest_data = unpickle(osp.join(cifar10_root, \"test_batch\"))\n\t\n\tx_test = test_data[b\"data\"]\n\ty_test = np.array(test_data[b\"labels\"])\n\n\treturn x_test, y_test", "def getTrainingData():\n\ttrainingFileList = listdir('../../datasets/trainingDigits')\n\tn = len(trainingFileList)\n\tX_tr = zeros((n, 1024));\n\tT_tr = zeros((n,10));\n\n\t# split the filename to obtain information\n\tfor i in range(n):\n\t\tfileNameStr = trainingFileList[i]\n\t\tfileStr = fileNameStr.split('.')[0]\n\t\tclassNumStr = int(fileStr.split('_')[0])\n\t\tT_tr[i, classNumStr] = 1\n\t\tX_tr[i, :] = img2vect('../../datasets/trainingDigits/%s' % fileNameStr)\n\treturn X_tr, T_tr", "def get_multiclass_training_data():\n fname = \"data/dataset.csv\"\n dataframe = load_data(fname)\n dictionary = extract_dictionary(dataframe)\n X_train = generate_feature_matrix(dataframe, dictionary)\n Y_train = dataframe['label'].values.copy()\n\n return (X_train, Y_train, dictionary)", "def load_data():\n\n # Get the data.\n train_data_filename = maybe_download('train-images-idx3-ubyte.gz')\n train_labels_filename = maybe_download('train-labels-idx1-ubyte.gz')\n test_data_filename = maybe_download('t10k-images-idx3-ubyte.gz')\n test_labels_filename = maybe_download('t10k-labels-idx1-ubyte.gz')\n\n # Extract it into numpy arrays.\n train_data = extract_data(train_data_filename, FLAGS.train_size + FLAGS.validation_size)\n train_labels = extract_labels(train_labels_filename, FLAGS.train_size + FLAGS.validation_size)\n test_data = extract_data(test_data_filename, FLAGS.test_size)\n test_labels = extract_labels(test_labels_filename, FLAGS.test_size)\n\n validation_data = train_data[:FLAGS.validation_size, ...]\n validation_labels = train_labels[:FLAGS.validation_size]\n train_data = train_data[FLAGS.validation_size:, ...]\n train_labels = train_labels[FLAGS.validation_size:]\n\n return train_data, train_labels, validation_data, validation_labels, test_data, test_labels", "def load_data():\n train_file = os.path.join(DATA_DIR, 'knnClassify3cTrain.txt')\n test_file = os.path.join(DATA_DIR, 'knnClassify3cTest.txt')\n train = np.loadtxt(train_file,\n dtype=[('x_train', ('f8', 2)),\n ('y_train', ('f8', 1))])\n test = np.loadtxt(test_file,\n dtype=[('x_test', ('f8', 2)),\n ('y_test', ('f8', 1))])\n return train['x_train'], train['y_train'], test['x_test'], test['y_test']", "def load_data_set():\n # Load the dataset with TensorFlow Datasets.\n dataset, dataset_info = tfds.load('oxford_flowers102', as_supervised=True, shuffle_files=True, with_info=True)\n\n # Create a training set, a validation set and a test set.\n training_set, validation_set, test_set = dataset['train'], dataset['validation'], dataset['test']\n\n # Get the number of classes in the dataset from the dataset info.\n num_classes = dataset_info.features['label'].num_classes\n\n # Load mapping from label to category name\n with open('label_map.json', 'r') as f:\n class_names = json.load(f)\n\n return dataset, dataset_info, training_set, validation_set, test_set, num_classes, class_names", "def prepdatasets():\n\n Squad().get_train_data()\n NQ().get_train_data()\n TriviaQA().get_train_data()\n\n return 0" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns list of all videos available in the directory
def list_videos(): videos = [f for f in listdir(HOST_VIDEOS_DIR) if path.isfile(path.join(HOST_VIDEOS_DIR, f))] return videos
[ "def list_ucf_videos():\n global _VIDEO_LIST\n if not _VIDEO_LIST:\n #index = request.urlopen(UCF_ROOT, context=unverified_context).read().decode('utf-8')\n index = request.urlopen(UCF_ROOT).read().decode('utf-8')\n videos = re.findall('(v_[\\w_]+\\.avi)', index)\n _VIDEO_LIST = sorted(set(videos))\n return list(_VIDEO_LIST)", "def videos():\n result = []\n for filename in ('talks.yml', 'recommended.yml'):\n result.extend(video_links(load_yaml(filename)))\n return result", "def get_video_list(dir_path):\n image_list = os.listdir(dir_path)\n ext = [\".avi\", \".mp4\"]\n return [im for im in image_list if im.endswith(tuple(ext))]", "def openFolderVideos(self,node):\n listVideos = []\n\n print \"Searching videos in [\" + node.file + \"]\"\n # Search files in folder\n listFic = os.listdir(node.file)\n listFic.sort()\n for fic in listFic:\n suffix=fic[fic.rfind(\".\"):].lower()\n #print \"suffix:\" + suffix\n if suffix in self.LIST_VIDEO_SUFFIX:\n videoFic = os.path.join(node.file, fic)\n print \"Found video: \" + videoFic\n listVideos.append(videoFic)\n \n self.openVideoList(listVideos)\n return False # no visual modif", "def available_videos(self):\n return [x.slug for x in self.videos if x.get_video_status().web_available]", "def gen_ind_video_path(self, videos_path):\n v_path = []\n for (dirpath, dirnames, filenames) in os.walk(videos_path):\n v_path += [os.path.join(dirpath, f) for f in filenames if f.split('.')[-1] == 'avi']\n return v_path", "def scanfolder(root):\n\tmovies = []\n\tfor path, dirs, files in os.walk(root):\n\t\tfor f in files:\n\t\t\tif f.endswith('.mkv') or f.endswith('.m2ts') or f.endswith('.avi'):\n\t\t\t #print os.path.join(path, f)\n\t\t\t movies.append(os.path.join(path,f))\n\t#print movies\t \n\treturn movies", "def videos(self):\n return [x.video for x in self.section_set.exclude(video=None).order_by('order', 'name')]", "def list_dir(root_dir, dir_path, label):\n path = os.path.join(dir_path, os.path.join('videos', label))\n return sorted(list(enumerate_images(path, root_dir)), \n key=lambda f: int(os.path.splitext(os.path.basename(f))[0].replace('frame', '')))", "def list_exported_media(self):\n movies = (['', ''])\n shows = (['', ''])\n movie_path = self.movie_path\n tvshow_path = self.tvshow_path\n if xbmcvfs.exists(self.nx_common.check_folder_path(movie_path)):\n movies = xbmcvfs.listdir(movie_path)\n if xbmcvfs.exists(self.nx_common.check_folder_path(tvshow_path)):\n shows = xbmcvfs.listdir(tvshow_path)\n return movies + shows", "def test_validate_videos(data_dir):\n paths = data_dir.glob(\"*\")\n valid_videos, invalid_videos = zamba.utils.get_valid_videos(paths)\n assert len(invalid_videos) == 0", "def videos(self) -> Dict[str, Video]:\n return self._videos", "def videos(self):\n self.__vi = []\n for etq in raiz[0]:\n # print(depurar1(etq.text))\n self.__vi.append(self.depurar1(etq.text))\n self.__vi.sort()\n return self.__vi", "def getUserVideos(self):\n return self.base.get(\"user_videos\", [])", "def list_videos(cls, ids):\n\n items = []\n\n rs = [\n {\n \"search_query\": '\"' + id + '\"',\n \"sp\": \"EgIQAQ%3D%3D\",\n \"app\": \"desktop\",\n \"persist_app\": 1,\n }\n for id in ids\n ]\n\n for result in [cls.run_search(r)[0] for r in rs]:\n logger.info(\"session.get triggered: list_videos (experimental)\")\n result.update({\"id\": result[\"id\"][\"videoId\"]})\n items.extend([result])\n\n return json.loads(\n json.dumps({\"items\": items}, sort_keys=False, indent=1)\n )", "def get_next_video(self):\n\n if self.args.videos == 'one':\n files = [os.path.join(self.args.data_path, self.args.fname)]\n elif self.args.videos == 'list':\n files = [os.path.join(self.args.data_path, fname) for fname in self.args.list]\n elif self.args.videos == 'all':\n files = glob(os.path.join(self.args.data_path, '*' + self.args.file_type))\n else:\n files = []\n\n for file in files:\n yield file", "def get_videos(self):\n matchups = models.Matchup.objects.select_related('home', 'away').all()\n matchup_prefetch = Prefetch('matchups', queryset=matchups)\n return models.Video.objects.prefetch_related(matchup_prefetch)\\\n .filter(is_visible=True)", "def show_video():\n mp4list = glob.glob('video/*.mp4')\n if len(mp4list) > 0:\n for mp4 in sorted(mp4list, key = lambda fn: float(fn.split('video')[3].split('.mp4')[0])):\n video = io.open(mp4, 'r+b').read()\n encoded = base64.b64encode(video)\n ipythondisplay.display(HTML(data='''<video alt=\"test\" autoplay \n loop controls style=\"height: 400px;\">\n <source src=\"data:video/mp4;base64,{0}\" type=\"video/mp4\" />\n </video>'''.format(encoded.decode('ascii'))))\n else: \n print(\"Could not find video\")", "def getFriendsVideos(self):\n return self.base.get(\"friends_videos\", [])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This test compares the solution from the MDP interface with the WindyGridWorld's solution.
def test_windy_grid_world_value_iteration(self): mdp = WindyGridWorldMDP() q_values = mdp.optimal_q_values # keep arguments due to expected q_star expected_q_values = np.array([ [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -88.17534503], [-88.17534503, -88.17534503, -88.29359158, -88.05590408], [-88.05590408, -88.05590408, -88.17534503, -87.93525666], [-87.93525666, -87.81339058, -88.05590408, -87.93525666], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -88.17534503], [-88.17534503, -88.17534503, -88.29359158, -88.05590408], [-88.05590408, -88.05590408, -88.17534503, -87.93525666], [-87.93525666, -87.69029353, -88.05590408, -87.81339058], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -88.17534503], [-88.17534503, -88.17534503, -88.29359158, -88.05590408], [-88.05590408, -87.93525666, -88.17534503, -87.81339058], [-87.81339058, -87.56595307, -87.93525666, -87.69029353], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -88.17534503], [-87.17534503, -87.17534503, -87.29359158, -87.05590408], [-88.05590408, -87.81339058, -88.17534503, -87.69029353], [-87.69029353, -87.44035665, -87.81339058, -87.56595307], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -88.17534503], [-88.17534503, -87.18534503, -88.29359158, -87.93525666], [-87.93525666, -87.31349158, -87.18534503, -87.56595307], [-87.56595307, -87.56595307, -87.31349158, -87.44035665], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -88.41065565, -88.52654908, -88.29359158], [-88.29359158, -88.29359158, -88.41065565, -87.18534503], [-88.17534503, -87.31349158, -88.29359158, -87.81339058], [-87.81339058, -87.44035665, -87.31349158, -87.44035665], [-87.44035665, -87.56595307, -87.44035665, -87.56595307], [-88.97864878, -88.97864878, -88.97864878, -88.86732201], [-88.86732201, -88.86732201, -88.97864878, -88.75487073], [-88.75487073, -88.75487073, -88.86732201, -88.64128358], [-88.64128358, -88.64128358, -88.75487073, -88.52654908], [-88.52654908, -88.52654908, -88.64128358, -88.41065565], [-88.41065565, -87.44035665, -88.52654908, -87.31349158], [-88.29359158, -87.31349158, -88.41065565, -87.31349158], [-87.18534503, -87.44035665, -88.29359158, -87.31349158], [-87.31349158, -87.44035665, -87.44035665, -87.56595307], [-87.56595307, -87.56595307, -87.44035665, -87.56595307] ]) assert np.array_equal(q_values.round(3), expected_q_values.round(3))
[ "def test_projection_logic(self):", "def test_solvable_2d(self):\n mazes = [\n parse_2d_maze('A.www\\n' + 'wxwww'),\n parse_2d_maze('.w....\\n' + 'w..wxw\\n' + '.Awwww')\n ]\n solutions = [\n [(0, 0), (0, 1), (1, 1)],\n [(2, 1), (1, 1), (1, 2), (0, 2), (0, 3), (0, 4), (1, 4)]\n ]\n for maze, solution in zip(mazes, solutions):\n self.assertEqual(maze.solve(), solution)", "def test_chsh_game_commuting_measurement_value(self):\n prob_mat, pred_mat = self.chsh_nonlocal_game()\n\n chsh = NonlocalGame(prob_mat, pred_mat)\n res = chsh.commuting_measurement_value_upper_bound(k=1)\n expected_res = 0.8535\n self.assertEqual(np.isclose(res, expected_res, atol=0.5), True)", "def test_eight(self):\n\n # N = 8 Case\n eight_queen_board = BoardAgent()\n eight_queen_board.populate_solutions(eight_queen_board.generate_tree(8))\n\n self.assertTrue(eight_queen_board.boardIsSafe((0, 4, 7, 5, 2, 6, 1, 3)))\n self.assertTrue(eight_queen_board.boardIsSafe((0, 5, 7, 2, 6, 3, 1, 4)))\n\n self.assertFalse(eight_queen_board.boardIsSafe((0, 5, 7, 2, 6, 3, 10, 4))) # Out of bounds\n self.assertFalse(eight_queen_board.boardIsSafe((0, 5, 7, 2, 6, -1, 1, 4))) # Board is not completely filled\n self.assertFalse(eight_queen_board.boardIsSafe((0, 5, 7, 2, 6, 3, 1, 0))) # In the same row\n\n self.assertTrue((7, 3, 0, 2, 5, 1, 6, 4) in eight_queen_board.solution)\n self.assertEqual(len(eight_queen_board.solution),92)\n print('\\nN = 8: ')\n print(eight_queen_board.solution)", "def test_station_track_and_switches_two_trains():\n class Stations_switches_problem():\n \"\"\"\n\n swith - c\n\n tracks - ......\n\n\n .\n 1 -> .\n ..0 -> ................................... c .0-> .. 1->.....\n\n A B\n simplifies swith condition\n \"\"\"\n def __init__(self):\n \"\"\" parmaeters \"\"\"\n\n self.taus = {\"pass\": {\"0_A_B\": 4, \"1_A_B\": 4},\n \"headway\": {\"0_1_A_B\": 2, \"1_0_B_A\": 4},\n \"stop\": {\"0_B\": 1, \"1_B\": 1}, \"res\": 2}\n self.trains_timing = {\"tau\": self.taus,\n \"initial_conditions\": {\"0_A\": 1, \"1_A\": 1},\n \"penalty_weights\": {\"0_A\": 2, \"1_A\": 0.5}}\n\n self.trains_paths = {\n \"Paths\": {0: [\"A\", \"B\"], 1: [\"A\", \"B\"]},\n \"J\": [0, 1],\n \"Jd\": {},\n \"Josingle\": {},\n \"Jround\": {},\n \"Jtrack\": {\"B\": [[0, 1]]},\n \"Jswitch\": {},\n \"add_swithes_at_s\": [\"B\"]\n }\n\n self.p_sum = 2\n self.p_pair = 1.\n self.p_qubic = 2.\n self.d_max = 5\n\n Q = make_Qubo(Stations_switches_problem())\n\n assert np.array_equal(Q, np.load(\"test/files/Qfile_track.npz\")[\"Q\"])\n\n sol = np.load(\"test/files/solution_track.npz\")\n\n assert energy(sol, Q) == -8+0.3", "def test_consistency(self):\r\n import itertools\r\n num_solves = 4\r\n vars_lists = []\r\n ineqs_lists = []\r\n var_ids_order_created = []\r\n for k in range(num_solves):\r\n sum = 0\r\n constraints = []\r\n var_ids = []\r\n for i in range(100):\r\n var = Variable(name=str(i))\r\n var_ids.append(var.id)\r\n sum += var\r\n constraints.append(var >= i)\r\n var_ids_order_created.append(var_ids)\r\n obj = Minimize(sum)\r\n p = Problem(obj, constraints)\r\n objective, constr_map = p.canonicalize()\r\n all_ineq = itertools.chain(constr_map[s.EQ], constr_map[s.LEQ])\r\n var_offsets, var_sizes, x_length = p._get_var_offsets(objective, all_ineq)\r\n # Sort by offset.\r\n vars_ = sorted(var_offsets.items(), key=lambda (var_id, offset): offset)\r\n vars_ = [var_id for (var_id, offset) in vars_]\r\n vars_lists.append(vars_)\r\n ineqs_lists.append(constr_map[s.LEQ])\r\n\r\n # Verify order of variables is consistent.\r\n for i in range(num_solves):\r\n self.assertEqual(var_ids_order_created[i],\r\n vars_lists[i])\r\n for i in range(num_solves):\r\n for idx, constr in enumerate(ineqs_lists[i]):\r\n var_id, _ = lu.get_expr_vars(constr.expr)[0]\r\n self.assertEqual(var_ids_order_created[i][idx],\r\n var_id)", "def test_valid_solved_puzzle(self):\n valid_solved_data = get_test_data.get_data_from(self.path + 'solved_10000_grids_startwith_123456789.txt')\n self._run_random_data_against_expected(valid_solved_data, len(valid_solved_data))", "def _test():\n dec_hi = [10, 10, 5, 6, 6, 10]\n dec_lo = [0, 0, 1, 0, 1, 0]\n dummy = Osyczka2(dec_hi, dec_lo)\n obj_hi, obj_lo = dummy.get_objective_extremes()\n\n model = Osyczka2(dec_hi, dec_lo, obj_hi, obj_lo)\n evals, best = max_walk_sat(model)\n print(\"\\n\")\n print(\"Evals : \", evals)\n print(\"Best : \", best)\n f1, f2 = model.get_objectives(best)\n print(\"F1 : \", f1)\n print(\"F2 : \", f2)", "def test_calc_hg(self):\n W = np.array([[1, 0, 0], [-1, 0, 0], [1, 0, 0]])\n gnn.W = W\n result = gnn.calc_hg(graph)\n expected = expected = np.array([18, 0, 18])\n assert_array_equal(result, expected)", "def test_chsh_bcs_game_to_nonlocal_game(self):\n bcs_game = self.chsh_bcs_game()\n chsh = NonlocalGame.from_bcs_game(bcs_game)\n\n # Compute expected prob_mat\n prob_mat = np.array([[1 / 4, 1 / 4], [1 / 4, 1 / 4]])\n np.testing.assert_array_equal(chsh.prob_mat, prob_mat)\n\n # Compute expected pred_mat\n pred_mat = np.zeros((4, 2, 2, 2))\n # Compute first constraint: v1 ^ v2 = 0\n constraint1 = np.array([[1, 0], [0, 0], [0, 0], [0, 1]])\n pred_mat[:, :, 0, 0] = constraint1\n pred_mat[:, :, 0, 1] = constraint1\n # Compute second constraint: v1 ^ v2 = 1\n pred_mat[:, :, 1, 0] = np.array([[0, 0], [1, 0], [0, 1], [0, 0]])\n pred_mat[:, :, 1, 1] = np.array([[0, 0], [0, 1], [1, 0], [0, 0]])\n np.testing.assert_array_equal(chsh.pred_mat, pred_mat)", "def test_AnalyticalBeadSystems(transformation=[[3,4], [4,5], [3,5]], num_iterations=100):\n\n for pair in transformation:\n test = AnalyticalBeadSystems(pair, num_iterations)\n _iid_positions_A = test.create_iid_bead_systems(printer=False)\n _iid_positions_A_stacked = np.stack([_posits.value_in_unit_system(unit.md_unit_system) for _posits in _iid_positions_A])\n _proposed_positions = test.forward_transformation(_iid_positions_A, printer=False)\n _backward_positions = test.backward_transformation(_proposed_positions, printer=False)\n _backward_positions_stacked = np.stack([_posits.value_in_unit_system(unit.md_unit_system) for _posits in _backward_positions])\n\n\n POSITION_THRESHOLD = 1.0e-6\n _position_differences = np.array([simulated_frame - final_frame for simulated_frame, final_frame in zip(_iid_positions_A_stacked,_backward_positions_stacked)])\n assert all(frame.sum() < POSITION_THRESHOLD for frame in _position_differences)\n\n WORK_STDDEV_THRESHOLD = 0.1\n WORK_SUM_THRESHOLD = 0.1\n work_sum, work_forward_stddev, work_reverse_stddev = test.work_comparison(printer = False)\n print(\"work forward stddev: {}\".format(work_forward_stddev))\n print(\"work reverse stddev: {}\".format(work_reverse_stddev))\n assert (work_forward_stddev <= WORK_STDDEV_THRESHOLD), \"forward work stddev {} exceeds threshold {}\".format(work_forward_stddev, WORK_STDDEV_THRESHOLD)\n assert (work_reverse_stddev <= WORK_STDDEV_THRESHOLD), \"reverse work stddev {} exceeds threshold {}\".format(work_reverse_stddev, WORK_STDDEV_THRESHOLD)\n assert np.all(abs(work_sum) <= WORK_SUM_THRESHOLD), \"sum of works {} exceeds threshold {}\".format(work_sum, WORK_SUM_THRESHOLD)", "def test_disp100(nq, ne):\n\n uc = UnitCell( )\n at1=Atom(symbol='Fe', mass=57) ; pos1=(0.0,0.0,0.0)\n at2=Atom(symbol='Al') ; pos2=(0.5,0.5,0.5)\n site1 = Site(pos1, at1)\n site2 = Site(pos2, at2)\n uc.addAtom( at1, pos1, \"Fe1\" )\n uc.addAtom( at2, pos2, \"Al1\" )\n print uc\n\n kptlist = uc.getMonkhorstPackGrid((20,20,20)).reshape(8000,3)\n sqecalc = AbInitio.kernelGenerator.SqeCalculator.SqeCalculator(uc, kpoints=kptlist)\n\n sqecalc.readIDFeigenvectors(filename='pols_FeAl222.idf')\n sqecalc.readEigenvaluesFromIDFomega2(filename='omega2_FeAl222.idf')\n\n sqecalc._DebyeWallerCalculator._energies = sqecalc._energies\n sqecalc._DebyeWallerCalculator._polvecs = sqecalc._polvecs\n\n estart = 0.0\n deltae = 50.0 / ne\n sqecalc._etransferTol = deltae\n\n deltaqx = 3.0 / nq\n sqecalc._qtransferTolRadius = deltaqx\n qstart = numpy.array([0.0, 0.0, 0.0])\n deltaq = numpy.array([deltaqx, 0.0, 0.0])\n\n sqe = numpy.zeros((nq,ne), dtype='float')\n\n for iq in range(nq):\n for ie in range(ne):\n qtransfer = qstart + iq * deltaq\n etransfer = estart + ie * deltae\n sqe[iq,ie] = sqecalc.calcSqeCohCreateAllmodes(qtransfer, etransfer)\n print iq, ie, sqe[iq,ie]\n\n pylab.imshow(sqe)\n pylab.show()\n end = raw_input()\n return", "def test_model(self):\n power_ebsilon = -31.769\n power_tespy = round(\n self.nw.busses['total output power'].P.val / 1e6, 3)\n msg = (\n 'The total power calculated (' + str(power_tespy) + ') does not '\n 'match the power calculated with the EBSILON model (' +\n str(power_ebsilon) + ').')\n assert power_tespy == power_ebsilon, msg\n\n T_c79_ebsilon = 296.254\n T_c79_tespy = round(self.nw.get_conn('79').T.val, 3)\n msg = (\n 'The temperature at connection 79 calculated (' +\n str(T_c79_tespy) + ') does not match the temperature calculated '\n 'with the EBSILON model (' + str(T_c79_ebsilon) + ').')\n assert T_c79_tespy == T_c79_ebsilon, msg", "def test_five(self):\n\n # N = 5 Case\n five_queen_board = BoardAgent()\n five_queen_board.populate_solutions(five_queen_board.generate_tree(5))\n\n self.assertTrue(five_queen_board.boardIsSafe((0, 2, 4, 1, 3)))\n self.assertTrue(five_queen_board.boardIsSafe((0, 3, 1, 4, 2)))\n\n self.assertFalse(five_queen_board.boardIsSafe((0, 2, 4, 2, 5))) # Out of bounds\n self.assertFalse(five_queen_board.boardIsSafe((0, 2, 4, 2, -1))) # Board is not completely filled\n self.assertFalse(five_queen_board.boardIsSafe((0, 2, 4, 2, 3))) # In the same row\n\n self.assertEqual(five_queen_board.solution,[(0, 2, 4, 1, 3), (0, 3, 1, 4, 2), (1, 3, 0, 2, 4), (1, 4, 2, 0, 3), (2, 0, 3, 1, 4), (2, 4, 1, 3, 0), (3, 0, 2, 4, 1), (3, 1, 4, 2, 0), (4, 1, 3, 0, 2), (4, 2, 0, 3, 1)])\n self.assertTrue((2,0,3,1,4) in five_queen_board.solution)\n self.assertEqual(len(five_queen_board.solution),10)\n print('\\nN = 5: ')\n print(five_queen_board.solution)", "def test_nodal_global_efficiency():\n\n # Groundtruth\n result = np.load(\"groundtruth/graphs/nodal_global_efficiency.npy\")\n result = result.reshape([1, -1])\n\n # Data\n inv_mtx = np.load(\"sample_data/graphs/inv_mtx.npy\")\n\n # Run\n nodal_ge = nodal_global_efficiency(inv_mtx)\n nodal_ge = nodal_ge.reshape([1, -1])\n\n # Run\n np.testing.assert_array_equal(nodal_ge, result)", "def test_homoscedastic_least_squares_roptimal_design(self):\n poly_degree = 1;\n num_design_pts = 2\n design_samples = np.linspace(-1,1,num_design_pts)\n noise_multiplier = None\n design_factors = univariate_monomial_basis_matrix(\n poly_degree,design_samples)\n num_pred_pts = 3\n pred_samples = np.random.uniform(-1,1,num_pred_pts)\n pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)\n\n opts = {'beta':0,'pred_factors':pred_factors,\n 'pred_samples':pred_samples[np.newaxis,:],'nonsmooth':False}\n \n opt_problem = AlphabetOptimalDesign('R',design_factors,opts=opts)\n solver_opts = {'disp':True,'iprint': 0, 'ftol':1e-12,'maxiter':2000}\n #solver_opts = {'solver':'ipopt','print_level':0,\n # 'tol':1e-8,'acceptable_obj_change_tol':1e-8,\n # 'derivative_test':'first-order','maxiter':1000}\n #solver_opts.update({'constraint_jacobianstructure':partial(get_r_oed_jacobian_structure,num_pred_pts,num_design_pts)})\n mu_R ,res= opt_problem.solve(solver_opts,return_full=True)\n homog_outer_prods = compute_homoscedastic_outer_products(design_factors)\n variance = compute_prediction_variance(\n mu_R,pred_factors,homog_outer_prods)\n assert (res.x[0]<=variance.min())\n\n\n del opts['beta']\n if 'constraint_jacobianstructure' in solver_opts:\n del solver_opts['constraint_jacobianstructure']\n opt_problem = AlphabetOptimalDesign('I',design_factors,opts=opts)\n mu_I = opt_problem.solve(solver_opts)\n variance = compute_prediction_variance(\n mu_I,pred_factors,homog_outer_prods)\n assert np.allclose(mu_R,mu_I)", "def test_make_parallel_worlds(self):\n expected = figure_9b.graph\n actual = make_parallel_worlds_graph(figure_9a.graph, {frozenset([-x]), frozenset([-d])})\n expected2 = make_parallel_worlds_graph(figure_9a.graph, {(-x, -z)})\n self.assert_graph_equal(expected, actual)\n self.assert_graph_equal(expected2, make_parallel_worlds_graph(figure_9a.graph, {(-x, -z)}))\n self.assertTrue(Y @ (-z, -x) in expected2.nodes())\n self.assertTrue(Y @ (-x, -z) in expected2.nodes())", "def test_full(run_in_tmpdir, problem, multiproc):\n\n geometry, lower_left, upper_right = problem\n\n # OpenMC-specific settings\n settings = openmc.Settings()\n settings.particles = 100\n settings.batches = 10\n settings.inactive = 0\n space = openmc.stats.Box(lower_left, upper_right)\n settings.source = openmc.Source(space=space)\n settings.seed = 1\n settings.verbosity = 1\n\n # Create operator\n chain_file = Path(__file__).parents[2] / 'chain_simple.xml'\n op = openmc.deplete.Operator(geometry, settings, chain_file)\n op.round_number = True\n\n # Power and timesteps\n dt1 = 15.*24*60*60 # 15 days\n dt2 = 1.5*30*24*60*60 # 1.5 months\n N = floor(dt2/dt1)\n dt = np.full(N, dt1)\n power = 2.337e15*4*JOULE_PER_EV*1e6 # MeV/second cm from CASMO\n\n # Perform simulation using the predictor algorithm\n openmc.deplete.pool.USE_MULTIPROCESSING = multiproc\n openmc.deplete.PredictorIntegrator(op, dt, power).integrate()\n\n # Get path to test and reference results\n path_test = op.output_dir / 'depletion_results.h5'\n path_reference = Path(__file__).with_name('test_reference.h5')\n\n # If updating results, do so and return\n if config['update']:\n shutil.copyfile(str(path_test), str(path_reference))\n return\n\n # Load the reference/test results\n res_test = openmc.deplete.ResultsList.from_hdf5(path_test)\n res_ref = openmc.deplete.ResultsList.from_hdf5(path_reference)\n\n # Assert same mats\n for mat in res_ref[0].mat_to_ind:\n assert mat in res_test[0].mat_to_ind, \\\n \"Material {} not in new results.\".format(mat)\n for nuc in res_ref[0].nuc_to_ind:\n assert nuc in res_test[0].nuc_to_ind, \\\n \"Nuclide {} not in new results.\".format(nuc)\n\n for mat in res_test[0].mat_to_ind:\n assert mat in res_ref[0].mat_to_ind, \\\n \"Material {} not in old results.\".format(mat)\n for nuc in res_test[0].nuc_to_ind:\n assert nuc in res_ref[0].nuc_to_ind, \\\n \"Nuclide {} not in old results.\".format(nuc)\n\n tol = 1.0e-6\n for mat in res_test[0].mat_to_ind:\n for nuc in res_test[0].nuc_to_ind:\n _, y_test = res_test.get_atoms(mat, nuc)\n _, y_old = res_ref.get_atoms(mat, nuc)\n\n # Test each point\n correct = True\n for i, ref in enumerate(y_old):\n if ref != y_test[i]:\n if ref != 0.0:\n correct = np.abs(y_test[i] - ref) / ref <= tol\n else:\n correct = False\n\n assert correct, \"Discrepancy in mat {} and nuc {}\\n{}\\n{}\".format(\n mat, nuc, y_old, y_test)\n\n # Compare statepoint files with depletion results\n\n t_test, k_test = res_test.get_eigenvalue()\n t_ref, k_ref = res_ref.get_eigenvalue()\n k_state = np.empty_like(k_ref)\n\n n_tallies = np.empty(N + 1, dtype=int)\n\n # Get statepoint files for all BOS points and EOL\n for n in range(N + 1):\n statepoint = openmc.StatePoint(\"openmc_simulation_n{}.h5\".format(n))\n k_n = statepoint.k_combined\n k_state[n] = [k_n.nominal_value, k_n.std_dev]\n n_tallies[n] = len(statepoint.tallies)\n # Look for exact match pulling from statepoint and depletion_results\n assert np.all(k_state == k_test)\n assert np.allclose(k_test, k_ref)\n\n # Check that no additional tallies are loaded from the files\n assert np.all(n_tallies == 0)", "def test_exam_lsolve2b(self):\n result = 0\n x = symbol('x')\n y = symbol('y')\n eqns = [3*x+y==7, 2*x-5*y==8]\n solution = lsolve(eqns, [x,y])\n solx = solution[0].rhs()\n soly = solution[1].rhs()\n # It should have returned x==43/17 and y==-10/17\n if(solx != numeric(43,17) or soly != numeric(-10,17)):\n result = 1\n print \"solution of the system \", [str(item) for item in eqns], \" for [x,y] \"\n print \"erronously returned \", [str(item) for item in solution]\n self.assertEqual(result,0)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
See if a read is aligned within `wiggle` of it's real start base.
def is_correct_aln(read, rname, wiggle=5): dwgsim_read = dwgsim_parser(read.qname) if rname != dwgsim_read.seqname: return False return (dwgsim_read.start_1 - read.pos) <= wiggle or (dwgsim_read.start_2 - read.pos) <= wiggle
[ "def has_align(self):\n return self._db_info_cache[\"sequence-aligned\"]", "def is_chunk(xbe, offset: int, section: str) -> bool:\n raw_offset = get_raw_address(offset, section)\n xbe.seek(raw_offset + 4) # skip entry\n geometry_header_pointer = unpack(\"I\", xbe.read(4))[0]\n\n return geometry_header_pointer != 0", "def is_in_region(position, aligns):\r\n\r\n for align in aligns:\r\n if align.start <= position < align.end: return True\r\n return False", "def isAligned(self, geom, sector):\n if not geom:\n raise TypeError, \"missing parted.Geometry parameter\"\n\n if sector is None:\n raise TypeError, \"missing sector parameter\"\n\n return self.__alignment.is_aligned(geom.getPedGeometry(), sector)", "def contains_offset(self, offset):\n\n if self.PointerToRawData is None:\n # bss and other sections containing only uninitialized data must have 0\n # and do not take space in the file\n return False\n PointerToRawData_adj = self.get_PointerToRawData_adj()\n return ( PointerToRawData_adj <= offset < PointerToRawData_adj + self.SizeOfRawData )", "def _find_minimum_alignment(offset: int, base_alignment: int, prev_end: int) -> int:\n # Essentially, we need to find the minimum k such that:\n # 1) offset = m * base_alignment * 2**k, where m > 0 and k >= 0;\n # (by definition of alignment)\n # 2) offset - prev_offset < base_alignment * 2**k\n # (otherwise the compiler can just as well take m' = m - 1).\n if offset % base_alignment != 0:\n raise ValueError(\n f\"Field offset ({offset}) must be a multiple of the base alignment ({base_alignment}).\"\n )\n\n alignment = base_alignment\n while offset % alignment == 0:\n if offset - prev_end < alignment:\n return alignment\n\n alignment *= 2\n\n raise ValueError(\n f\"Could not find a suitable alignment for the field at offset {offset}; \"\n \"consider adding explicit padding.\"\n )", "def read_ok(read):\n if any([ord(c)-33 < _BASE_QUAL_CUTOFF for c in list(read.qual)]):\n return False\n else:\n return True", "def __is_offset_beyond_bounds(current_offset, current_remaining_dim,\n partition_min, partition_max):\n return (current_offset > partition_max or\n current_offset + current_remaining_dim < partition_min)", "def check_4k_alignment(show_alert=False):\n aligned = True\n cmd = ['WMIC', 'partition', 'get', 'StartingOffset']\n offsets = []\n\n # Get offsets\n result = run_program(cmd, encoding='utf-8', errors='ignore', check=False)\n offsets = result.stdout.splitlines()\n\n # Check offsets\n for off in offsets:\n off = off.strip()\n if not off.isnumeric():\n # Skip\n continue\n\n try:\n aligned = aligned and int(off) % 4096 == 0\n except ValueError:\n # Ignore, this check is low priority\n pass\n\n # Show alert\n if show_alert:\n show_alert_box('One or more partitions are not 4K aligned')\n raise Not4KAlignedError", "def isExceedExtend(read, introns):\n if len(introns) == 0:\n return 100\n else:\n exons = np.array(getBlock(read, introns))\n introns = np.array(introns)\n exonLength = exons[:, 1] - exons[:, 0]\n intronLength = introns[:, 1] - introns[:, 0]\n result = 0\n if exonLength[-1] / intronLength[-1] <= 0.01:\n result += 1\n if exonLength[0] / intronLength[0] <= 0.01:\n result += 10\n return result", "def is_synced(self):\n if self.is_empty:\n return False\n if not np.all(self.good_traces(include_box=True)):\n return False\n side = np.clip(self.traceid, -1, 1)\n if len(side) == 0:\n return False\n return side[0] == -1 and side.size % 2 == 0 and np.all(side[1:] + side[:-1] == 0)", "def check_pileupread( pileupread ):\n if pileupread.alignment.is_duplicate:\n return( False )\n if pileupread.is_del:\n return( False )\n if pileupread.is_refskip:\n return( False )\n if not pileupread.query_position:\n return( False )\n if pileupread.alignment.mapq < args.mapq:\n return( False )\n if pileupread.alignment.query_qualities[pileupread.query_position] < args.base_phred_quality:\n return( False )\n\n return( True )", "def is_horz_aligned(c):\n return (all([\n c[i].sentence.is_visual()\n and bbox_horz_aligned(bbox_from_span(c[i]), bbox_from_span(c[0]))\n for i in range(len(c))\n ]))", "def in_current(self, offset):\n return (self.current_offset <= offset) \\\n and (offset < self.offsets[self.current_shard_n + 1])", "def check_grid_aligned(meta, img, bounds, mip, throw_error=False):\n shape = Vec(*img.shape)[:3]\n alignment_check = bounds.expand_to_chunk_size(meta.chunk_size(mip), meta.voxel_offset(mip))\n alignment_check = Bbox.clamp(alignment_check, meta.bounds(mip))\n is_aligned = np.all(alignment_check.minpt == bounds.minpt) and np.all(alignment_check.maxpt == bounds.maxpt)\n \n if throw_error and is_aligned == False:\n msg = NON_ALIGNED_WRITE.format(\n mip=mip, chunk_size=meta.chunk_size(mip), \n offset=meta.voxel_offset(mip), \n got=bounds, check=alignment_check\n )\n raise exceptions.AlignmentError(msg)\n\n return is_aligned", "def is_preamble(data) -> bool:\n if len(data) < 16:\n return False\n # set cut-off for 0/1 between minimum and maximum values in data\n thresh = min(data) + ((max(data) - min(data)) / 2)\n normed = [1 if b >= thresh else 0 for b in data]\n # print(f'NORMED PREAMB: {normed}')\n for i, b in enumerate(Radio.PREAMB_KEY):\n if normed[i] != b:\n return False\n return True", "def includes(self, tag):\n return self.begin <= tag.begin and self.end >= tag.end", "def is_valid(self):\n\n return super(Alignment, self).is_valid() and self._validate_lengths()", "def check(self, read_tuple_name):\n\n\t\tparts = read_tuple_name.split(\"__\")\n\n\t\tif len(parts[0]) != self.prefix_width or len(parts[1]) != self.read_tuple_id_width:\n\t\t\treturn False\n\n\t\tsegments = parts[2][1:-1].split(\"),(\")\n\t\tfor segment in segments:\n\t\t\tint_widths = list(map(len, segment.split(\",\")))\n\t\t\tif self.genome_id_width != int_widths[0]:\n\t\t\t\treturn False\n\t\t\tif self.chr_id_width != int_widths[1]:\n\t\t\t\treturn False\n\t\t\tif self.coor_width != int_widths[3] or self.coor_width != int_widths[4]:\n\t\t\t\treturn False\n\n\t\treturn True", "def hasAllocatedAngles(self):\n return self._dxdz is not None and self._dydz is not None" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Computes the path of the ARC data file given the type. Each ARC split has the corresponding data located in one file.
def type_to_data_file(arc_type): assert(isinstance(arc_type, ARCType)) data_dir = os.path.join(ARC_CACHE_DIR, "ARC-V1-Feb2018-2") split, category = tuple(arc_type.name.lower().split("_")) category = "ARC-{}".format(category.capitalize()) split = "{}-{}".format(category, split.capitalize()) basename = split + ".jsonl" return os.path.join(data_dir, category, basename)
[ "def get_data_file(*path_segments):\n return os.path.join(getdatapath(), *path_segments)", "def generate_file_uri(self, data_type: (str, URL, Path), file_name):\n if not file_name:\n raise ValueError(\" filename must exist \")\n return self.uri_for(data_type) / file_name", "def rlid_document_path(file_name, document_type):\r\n rlid_path = {\r\n # Most populous card series are divided into four-digit bins.\r\n # Rest are divided into two-digit bins.\r\n \"property-card\": os.path.join(\r\n REPO_PATH[\"property-card\"],\r\n file_name[:4] if file_name[:2] in [\"17\", \"18\"] else file_name[:2],\r\n fixed_file_name(file_name),\r\n ),\r\n \"tax-map\": os.path.join(REPO_PATH[\"tax-map\"], fixed_file_name(file_name)),\r\n # Tax map source repo has a one-deep bin, first four digits.\r\n \"tax-map-staging\": os.path.join(\r\n REPO_PATH[\"tax-map-staging\"], file_name[:4], fixed_file_name(file_name)\r\n ),\r\n }\r\n if document_type not in rlid_path:\r\n raise NotImplementedError(\r\n \"document type {!r} not implemented.\".format(document_type)\r\n )\r\n\r\n return rlid_path[document_type]", "def get_TROPOS_dataset_file_name(start_time: Union[datetime, None] = None, end_time: Union[datetime, None] = None,\n file_type: str = 'profiles') -> str:\n if file_type is None:\n pattern = f\"*[0-9].nc\"\n elif start_time and end_time and ('profiles' in file_type):\n pattern = f\"*[0-9]_{start_time.strftime('%H%M')}_{end_time.strftime('%H%M')}_{file_type}.nc\"\n elif start_time:\n pattern = f\"*{start_time.strftime('%H_%M_%S')}_{file_type}.nc\"\n else:\n pattern = f\"*[0-9]_{file_type}.nc\"\n return pattern", "def GetArchetype(self) -> \"char const *\":\n return _ITKIOImageBaseBasePython.itkArchetypeSeriesFileNames_GetArchetype(self)", "def _getfullpath(self):\n\n time = datetime.now()\n\n # root/2018/11/30/08.csv\n if self._mode == 0:\n subpath = time.strftime(self._dirfmt[self._res] \\\n + self._filefmt[self._res])\n\n # root/2018/11/30/1501231921.csv\n elif self._mode == 1:\n subpath = time.strftime(self._dirfmt[self._res] \\\n + self._fullfmt[self._res] + '/' ) \\\n + str(int(time.replace(tzinfo=timezone.utc).timestamp()))\n\n # root/2018/11/30/20181130.csv\n elif self._mode == 2:\n subpath = time.strftime(self._dirfmt[self._res] \\\n + self._fullfmt[self._res])\n\n else:\n raise ValueError('Unrecognized file naming operation')\n\n return self._root / '{}.{}'.format(subpath, self._ext)", "def concat_data(self, path, types, folder):\n self.types = types\n\n # list of files to concatenate\n file_list = [_ for _ in self.files(path) if types in _ and folder in _]\n\n filepath = path + f'{folder}_{types}_base.txt'\n\n # if file not exists in directory\n if filepath not in os.listdir(path):\n\n with open(filepath, 'w') as f1:\n for i, file in enumerate(file_list):\n # read files in the file_list\n with open(file, 'r') as f2:\n # read lines of the file if exists\n while True:\n line = f2.readline()\n if not line:\n break\n # write line in the file\n f1.write(line)\n\n # close files\n f2.close()\n f1.close()\n\n else: # if file already exists\n print(f'{filepath} already exists')\n\n return filepath", "def _extract_alt_dir(self, path, type):\n\n ps = path.split('/')\n\n assert ps[-1]=='coadd'\n\n ps[-1] = type\n return '/'.join(ps)", "def getSequenceDataPath(filename):\n return getPath([getRootDataDirectory(),\n \"sequence_data\"], filename)", "def load_climate_data(data_type, file_path):\n\n return _load_climate_data(\n _find_loader(data_type),\n file_path\n )", "def get_path(self, file_type, create_missing_folders=True):\n if file_type.lower() not in self.FILE_TYPES:\n raise ValueError('Invalid file type: {}'.format(file_type)\n + ', it must be one of', self.FILE_TYPES)\n\n if file_type.lower() == 'impact template':\n return self.get_impact_template_path(create_missing_folders)\n\n if file_type.lower() == 'rotamer library':\n return self.get_rotamer_library_path(create_missing_folders)\n\n if file_type.lower() == 'solvent template':\n return self.get_solvent_template_path(create_missing_folders)", "def get_data_filename(kind):\n # TODO assert that the file exisits\n return get_data_dir() + '/' + kind + '.csv'", "def _get_path_for_type(type_):\n if type_.lower() in CORE_TYPES:\n return Path('index.html#%s' % type_.lower())\n elif '.' in type_:\n namespace, name = type_.split('.')\n return Path('types', namespace, _get_file_name(name))\n else:\n return Path('types', _get_file_name(type_))", "def create_filepath(ds, prefix='filename', root_path='.'):\n start = pd.to_datetime(str(ds.time.data[0])).strftime('%Y-%m-%d')\n end = pd.to_datetime(str(ds.time.data[-1])).strftime('%Y-%m-%d')\n filepath = f'{root_path}/{prefix}_{start}_{end}.nc'\n return filepath", "def _extract_contour_id(self, contour_filename, contour_type):\n\n start = contour_filename.index(parsers.ContourParser.FN_PREFIX)\n start += len(parsers.ContourParser.FN_PREFIX)\n if contour_type == 'i':\n end = contour_filename.index(\n parsers.ContourParser.ICONTOUR_FN_POSTFIX)\n if contour_type == 'o':\n end = contour_filename.index(\n parsers.ContourParser.OCONTOUR_FN_POSTFIX)\n return int(contour_filename[start + 1: end])", "def uri_for(self, data_type : (str, URL, Path)):\n return self.get_base_uri() / data_type", "def _sdc_path(cls) -> None:", "def get_data_path(fn):\n # getouterframes returns a list of tuples: the second tuple\n # contains info about the caller, and the second element is its\n # filename\n callers_filename = inspect.getouterframes(inspect.currentframe())[1][1]\n path = os.path.dirname(os.path.abspath(callers_filename))\n data_path = os.path.join(path, 'data', fn)\n return data_path", "def getDataPath(self, img): \n if self.__singleMode:\n return os.path.join(GG.utils.DATA_PATH, img)\n else:\n pathFile = os.path.join(GG.utils.LOCAL_DATA_PATH, img)\n if not os.path.isfile(pathFile):\n imgData = self.__system.getResource(img) \n if imgData:\n if not os.path.isdir(os.path.dirname(pathFile)):\n GG.utils.createRecursiveDir(os.path.dirname(pathFile))\n imgFile = open(pathFile, \"wb\")\n imgFile.write(imgData)\n imgFile.close()\n else:\n return GG.utils.IMG_ERROR\n return pathFile" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
send the status of the heaps as 3 integers to the client socket
def send_heaps_status(conn_soc,game): obj = pack('iii',game.nA, game.nB, game.nC) send_data(conn_soc, obj)
[ "def receive_game_status(client_soc):\n (nA, nB, nC) = receive_data(client_soc, 12, 'iii')\n print(\"Heap A: {}\\nHeap B: {}\\nHeap C: {}\".format(nA, nB, nC))", "def get_status(): # {\n statuses = thePlayer.get_status()\n try:\n status = \"\\n\".join(statuses)\n except TypeError:\n status = \"\\n\".join([\"\"]*7)\n bstatus = status.encode()\n self.send_header(\"Content-Length\", str(len(bstatus)))\n self.end_headers()\n self.wfile.write(bstatus)\n self.wfile.flush()", "def handle_getStatus_event():\n global STATUS_VALS\n socketio.emit('updateStatus', str(STATUS_VALS), callback=messageReceived)", "def send_packet():", "def getRigStatus(host, port):\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n except socket.error, exc:\n print \"Caught exception socket.error : %s\" % exc\n return\n\n # get rig status\n s.sendall(b'{\"id\":0, \"jsonrpc\": \"2.0\", \"method\": \"miner_getstat1\"}')\n data = s.recv(1024)\n s.close()\n\n try:\n decoded = json.loads(data)['result']\n return decoded\n\n except (ValueError, KeyError, TypeError):\n print \"JSON format error\"\n return None", "def comm_status(self):\r\n # TODO Note this has a lot of repeated code from forward UCM method. consider refactoring\r\n mesdict = {\"commstate\": \"good\"}\r\n\r\n page = self.URLmap.get(\"comm_state\", \"/comm.cgi?\")\r\n requestURL = \"http://\" + self.UCMip + page\r\n UCMrequest = urllib2.Request(requestURL)\r\n \r\n method = self.HTTPmethods.get(\"comm_state\", \"POST\")\r\n messtr = json.dumps(mesdict)\r\n UCMrequest.add_data(messtr)\r\n UCMresponsedict = {\"message_subject\": \"commstate_update\"}\r\n \r\n now = datetime.utcnow().isoformat() + 'Z'\r\n if settings.DEBUGGING_LEVEL >= 2:\r\n print(\"Sending a message to test connection at {time}\".format(time = now))\r\n topic = self.create_topic(\"commstate\")\r\n try:\r\n result = urllib2.urlopen(UCMrequest, timeout = 10)\r\n HTTPcode = result.getcode()\r\n if HTTPcode == 200:\r\n UCMresponsedict[\"commstate\"] = \"good\"\r\n elif HTTPcode == 400:\r\n UCMresponsedict[\"commstate\"] = \"SGD_timeout\"\r\n else:\r\n UCMresponsedict[\"commstate\"] = \"ambiguous\"\r\n\r\n print(\"<{name}> channel status update from {time}: {status}\".format(name =self.UCMname, time = now, status = UCMresponsedict[\"commstate\"]))\r\n notification = json.dumps(UCMresponsedict)\r\n self.vip.pubsub.publish(peer = 'pubsub', topic = topic, headers = {}, message = notification)\r\n except urllib2.URLError, e:\r\n print('an urllib2 error of type {error} occurred while sending comms test message to {ucm}'.format(error = e, ucm = self.UCMname))\r\n _log.error('Comm_state urllib error')\r\n except socket.timeout, e:\r\n _log.error('Comm_state time out')", "def send_supvisors_status(self, status):\n self.supvisors.logger.debug('send SupvisorsStatus {}'.format(status))\n self.socket.send_string(EventHeaders.SUPVISORS, zmq.SNDMORE)\n self.socket.send_json(status.serial())", "def updateDoorStatusInApp(self):\r\n\t\t#Setup the target of the UDP messages and the send the current door state\r\n\t\thost = \"192.168.43.1\"\r\n\t\tport = 8888\r\n\t\ttarget_address = (host,port)\r\n\t\tdata = self.doorLatch.status\r\n\t\tself.socket.sendto(data.encode('utf-8'), target_address)", "def serverSendAuthResp(conn:socket.socket, version:int, status:int):\n conn.send(struct.pack('BB', version, status))", "def protocol_send(self, data, sock):", "def sendInfo(conn, data):\n\ttry:\n\t\tlengthPrefix = str(len(data)).encode('utf-8') + ':#:'\n\t\tpayload = lengthPrefix + data\n\t\tconn.sendall(payload)\n\texcept socket.error:\n\t\traise", "def send_process_status(self, status):\n self.supvisors.logger.debug('send ProcessStatus {}'.format(status))\n self.socket.send_string(EventHeaders.PROCESS, zmq.SNDMORE)\n self.socket.send_json(status.serial())", "async def status(self, cont):\n embed = Embed(colour = self.embed_colour, description = \"­\\n\")\n embed.set_author(name = self.bot_name+\" Status\", icon_url = self.bot_url)\n\n name_value = {\n \"Ping\": f\"{round(self.bot.latency * 1000)} ms\",\n \"Server Count\": f\"{len(self.bot.guilds)}\",\n \"Member Count\": f\"{sum([s.member_count for s in self.bot.guilds])}\"\n }\n\n for name, value in zip(name_value.keys(), name_value.values()):\n embed.add_field(name = name, value = value, inline = False)\n\n await cont.channel.send(embed = embed)", "def _update_status(self, reply):\n self.status.frequency = reply.frequency\n self.status.temperature = reply.temperature\n # The pump reports current in 0.1 A.\n self.status.current = reply.current / 10\n self.status.voltage = reply.voltage\n self.status.status_bits = reply.flag_bits", "def rpc_status(self, sender, *args):\n \n if (len(args) != 0):\n raise rpc.RPCFault(604, 'status: no arguments')\n dic = {}\n dic['online'] = self.factory.online\n dic['startup_time'] = time.ctime(self.factory.startuptime)\n dic['startup_at'] = volent.descinterval(\n self.factory.startuptime,\n limit=2)\n dic['last_new_bot'] = volent.descinterval(\n self.factory.activitytime,\n limit=2)\n dic['bots_running'] = len(self.factory.actors)\n dic['bots_started'] = self.factory.actorsstarted\n \n return dic", "def sendStatusKeys(self, cmd): \n \n cmd.inform('text=\"Number of AG cameras = %d\"' % self.numberOfCamera)\n for n in range(nCams):\n if self.cams[n] != None:\n if self.cams[n].isReady():\n tempstr = '%5.1f' % self.cams[n].getTemperature()\n cmd.inform('agc%d_stat=READY' % (n + 1))\n else:\n tempstr = '<%5.1f>' % self.cams[n].temp\n cmd.inform('agc%d_stat=BUSY' % (n + 1))\n cmd.inform('text=\"[%d] %s SN=%s status=%s temp=%s regions=%s bin=(%d,%d) expArea=%s\"'\n % (n + 1, self.cams[n].devname, self.cams[n].devsn,\n self.cams[n].getStatusStr(), tempstr, self.cams[n].regions,\n self.cams[n].hbin, self.cams[n].vbin, self.cams[n].expArea))\n else:\n cmd.inform('agc%d_stat=ABSENT' % (n + 1))", "def handle_client_data(self, data, client_sock):\n prot = data[0].lower()\n if prot == \"n\":\n # Sent by the central server when a new node joins\n address = json.loads(data[1:])\n # print(f\"{self._worker.name} added a node to their network.\")\n self._worker.add_peer(address)\n client_sock.close()\n elif prot == \"h\":\n # Receive new block header\n block_header = json.loads(data[1:])\n client_sock.close()\n self._worker.add_block_header(block_header)\n elif prot == \"t\":\n # Receive new transaction\n tx_json = json.loads(data[1:])[\"tx_json\"]\n client_sock.close()\n self._worker.add_transaction(tx_json)\n elif prot in \"rx\":\n # Receive request for transaction proof or balance\n # Send \"spv\" back so client can exclude this reply\n client_sock.sendall(\"spv\".encode())\n client_sock.close()\n else:\n client_sock.close()", "def _update_counters(self):\n self._bytes_sent = self._connection.bytes_sent\n self._bytes_received = self._connection.bytes_received", "def handle_210(message, address, client):\n handle_200(message, address, client)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
attempt to receive a single char from the server
def receive_char(client_soc): c = receive_data(client_soc, 1, 'c')[0] return c.decode('ascii')
[ "def receive_byte(self):\n return unpack('B', self.read(1))[0]", "def read_one_line(sock):\r\n newline_received = False\r\n message = \"\"\r\n while not newline_received:\r\n character = sock.recv(1).decode()\r\n if character == '\\n':\r\n newline_received = True\r\n elif character == '\\r':\r\n pass\r\n else:\r\n message += character\r\n return message", "def _get_byte(self):\r\n tmp = self.read(1)\r\n if len(tmp) == 0:\r\n raise UssPortReadError(\"Read timeout\")\r\n return ord(tmp)", "def sendchar(self):\n\n self.transport.write(self.payload[self.index])\n self.index += 1\n if self.index >= len(self.payload):\n # Just stop and wait to get reaped.\n self.loop.stop()", "def _two_byte_cmd(self, cmd):\n # logger.debug ('recv _two_byte_cmd %s', name_option(cmd),)\n if cmd == SB:\n ## Begin capturing a sub-negotiation string\n self.telnet_got_sb = True\n self.telnet_sb_buffer = array.array('c')\n elif cmd == SE:\n ## Stop capturing a sub-negotiation string\n self.telnet_got_sb = False\n self._sb_decoder()\n elif cmd == IP:\n self.deactivate()\n logger.debug('%s Interrupt; closing.', self.addrport())\n elif cmd == AO:\n flushed = len(self.recv_buffer)\n self.recv_buffer = array.array('c')\n logger.debug('Abort Output (AO); %s bytes discarded.', flushed)\n elif cmd == AYT:\n self.send_str(bytes('\\b'))\n logger.debug('Are You There (AYT); \"\\\\b\" sent.')\n elif cmd == EC:\n self.recv_buffer.fromstring('\\b')\n logger.debug('Erase Character (EC); \"\\\\b\" queued.')\n elif cmd == EL:\n logger.warn('Erase Line (EC) received; ignored.')\n elif cmd == GA:\n logger.warn('Go Ahead (GA) received; ignored.')\n elif cmd == NOP:\n logger.debug('NUL ignored.')\n elif cmd == DM:\n logger.warn('Data Mark (DM) received; ignored.')\n elif cmd == BRK:\n logger.warn('Break (BRK) received; ignored.')\n else:\n logger.error('_two_byte_cmd invalid: %r', cmd)\n self.telnet_got_iac = False\n self.telnet_got_cmd = None", "def character_received(self, char):\n CR, LF, NUL = '\\r\\n\\x00'\n char_disp = char\n self.log.debug('character_received: {!r}'.format(char))\n if not self.can_write(char) or not char.isprintable():\n # ASCII representation of unprtintables for display editing\n char_disp = self.standout(teldisp.name_unicode(char))\n if self.is_literal:\n # Within a ^v loop of ``literal_received()``, insert raw\n self._lastline.append(char)\n self.local_echo(char_disp)\n elif (self._last_char == CR and char in (LF, NUL) and self.strip_eol):\n # ``strip_eol`` is True, pass on '\\n' or '\\x00' following CR,\n pass\n elif self._last_char == CR and char in (LF, NUL):\n # ``strip_eol`` is False, preserve '\\n' or '\\x00'\n self._lastline.append(char)\n elif char == CR:\n # callback ``line_received()`` always on CR\n if not self.strip_eol:\n self.lastline._append(CR)\n self.line_received(self.lastline)\n elif self.tab_completion and char == '\\t': # ^I tab auto-completion\n try:\n if not self.tab_received(self.lastline):\n self.bell()\n except ValueError as err:\n self.log.debug(err)\n self.bell()\n except Exception:\n self._display_tb(*sys.exc_info(), level=logging.INFO)\n finally:\n self.display_prompt(redraw=True)\n elif not char.isprintable() and char not in (CR, LF, NUL,):\n self.bell()\n elif char.isprintable() and char not in ('\\r', '\\n'):\n self._lastline.append(char)\n self.local_echo(char_disp)\n self._last_char = char", "def receiveChar(self, timeout = None):\n\n command = 'RECEIVE CHAR'\n if timeout is not None:\n timeout *= 1000\n command += ' {}'.format(timeout)\n\n d = self.sendCommand(command)\n d = d.addCallback(self.checkFailure)\n d = d.addCallback(self.resultPlusTimeoutFlag)\n return d", "def receive_input(self, char):\n self._cmd_prompt.receive_input(char)", "def getchar():\n\t\"\"\"Function taken from Github : https://gist.github.com/jasonrdsouza/1901709\"\"\"\n\timport tty, termios, sys\n\tfd = sys.stdin.fileno()\n\told_settings = termios.tcgetattr(fd)\n\ttry:\n\t\ttty.setraw(sys.stdin.fileno())\n\t\tch = sys.stdin.read(1)\n\tfinally:\n\t\ttermios.tcsetattr(fd, termios.TCSADRAIN, old_settings)\n\treturn ch", "def read(self, num_bytes=31):\n\ttry:\n response = self.i2c_reader.read(num_bytes)\n response = filter(lambda x: x != '\\x00', response) # remove null chars\n \n # Get a list of received characters to return\n if ord(response[0]) == 1:\n # On the Raspberry Pi the MSB needs to be changed to 0 for all\n # received chars except the first\n char_list = map(lambda x: chr(ord(x) & ~0x80), list(response[1:]))\n return ''.join(char_list)\n else:\n # The response was an error\n return 'Error'\n except IOError:\n return 'Input/output error'", "def ler_socket(self):\n retorno = None\n retorno = self.socket.recv(32)\n if (retorno is not None) and len(retorno) > 0:\n # print\"Retorno \", retorno\n\n return retorno", "def _receive(self, length):\n data = map(ord, self._receive_internal(length))\n if len(data) < length:\n raise ButtshockError(\"Received unexpected length %d, expected %d!\" % (len(data), length))\n return data", "def op_read_char(self, unused, time, input_routine):\n # According to the spec, the first argument is always one, and\n # exists only for Historical Reasons(tm)\n assert unused == 1\n\n # TODO: shiny timer stuff not implemented yet.\n if time != 0 or input_routine != 0:\n raise ZCpuNotImplemented\n\n char = self._ui.keyboard_input.read_char()\n self._write_result(char)", "def ser_read(self): \n return self.char_read.read()", "def peek(self):\r\n if (self.peek_char):\r\n return self.peek_char\r\n if self.available():\r\n self.peek_char = self.ser_port.read(1)\r\n return self.peek_char\r\n return -1", "def read_string(conn):\n try:\n str_len = ClientThread.read_int32(conn)\n\n str_bytes = conn.recv(str_len)\n decoded_str = str_bytes.decode('utf-8')\n\n return decoded_str\n\n except Exception as e:\n print(\"Unable to read string from connection. {}\".format(e))\n\n return None", "def recieve(sock):\r\n\r\n try:\r\n data = sock.recv(1024)\r\n #print(data) unlock if you want to see encrypted data raw\r\n decrypted_data = DEScrypt(data, 2, session_key)\r\n data_str = decrypted_data.decode(\"utf-8\")\r\n return \"Server: \" + data_str\r\n except:\r\n print(\"Session ended with gary the chatbot\")\r\n sys.exit(0)", "def socket_recv(self):\n recv = 0\n try:\n data = self.sock.recv(self.BLOCKSIZE_RECV)\n recv = len(data)\n if 0 == recv:\n raise Disconnected('Closed by client')\n except socket.error as err:\n raise Disconnected('socket errno %d: %s' % (err[0], err[1],))\n self.bytes_received += recv\n self.last_input_time = time.time()\n\n ## Test for telnet commands, non-telnet bytes\n ## are pushed to self.recv_buffer (side-effect),\n for byte in data:\n self._iac_sniffer(byte)\n return recv", "def dataReceived(data):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
send a value as single char to the client socket
def send_char(conn_soc, c): send_data(conn_soc, pack('c',c.encode('ascii')))
[ "def sendchar(self):\n\n self.transport.write(self.payload[self.index])\n self.index += 1\n if self.index >= len(self.payload):\n # Just stop and wait to get reaped.\n self.loop.stop()", "def send_byte(self, byte):\n self.write(pack('B', byte))", "def char_write(self, handle, value, wait_for_response=False):\r\n pass", "def receive_char(client_soc):\n c = receive_data(client_soc, 1, 'c')[0]\n return c.decode('ascii')", "def respond(client):\n response = input(\"Enter a value: \")\n client.send(bytes(response, 'utf8'))\n client.close()", "def send(self, buf):", "def add_char( self, value ):\n self.buffer_value.append( bytearray( value ) )", "def send_packet():", "def protocol_send(self, data, sock):", "def send_ascii(chan, line):\n for c in line:\n chan.send_nowait(ord(c))\n if c != '\\n':\n chan.send_nowait(10)", "def send(self, data : bytes):\r\n header=numheader.encode32(len(data))\r\n self.socket.send(header+data)", "def send(self, message): \n self.server_socket.send(bytes(message, FORMAT))", "def write(self, data, pack=struct.pack, eol=struct.pack('!b', 0)):\n send = self.send\n if data == 0:\n send(eol)\n else:\n for char in data:\n if sys.version_info[0] > 2:\n char = char.encode('utf-8')\n send(pack('!c', char))", "def __send_bytes(self, data):\n self.socket.sendall(data)", "def write_char(self, char):\n self._write(ord(char), True)", "def s_send_string(socket: zmq.Socket, message: bytes):\n flags = 0\n socket.send(message, flags)", "def __send_command(self, command):\n self.socket.send(command)", "def send_code(self, code):\n\t\tself.socket.sendall(code.value.encode('utf8'))", "def send(self, value=None):\r\n if self._closed:\r\n raise ChannelClosed()\r\n self._send(value)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
attempt to receive the game status from the server (as 3 integers that represent the heaps status)
def receive_game_status(client_soc): (nA, nB, nC) = receive_data(client_soc, 12, 'iii') print("Heap A: {}\nHeap B: {}\nHeap C: {}".format(nA, nB, nC))
[ "def send_heaps_status(conn_soc,game):\n obj = pack('iii',game.nA, game.nB, game.nC)\n send_data(conn_soc, obj)", "def get_status(): # {\n statuses = thePlayer.get_status()\n try:\n status = \"\\n\".join(statuses)\n except TypeError:\n status = \"\\n\".join([\"\"]*7)\n bstatus = status.encode()\n self.send_header(\"Content-Length\", str(len(bstatus)))\n self.end_headers()\n self.wfile.write(bstatus)\n self.wfile.flush()", "def fetch_status():\n return json.loads(requests.get('http://omegle.com/status').text)", "def rpc_status(self, sender, *args):\n \n if (len(args) != 0):\n raise rpc.RPCFault(604, 'status: no arguments')\n dic = {}\n dic['online'] = self.factory.online\n dic['startup_time'] = time.ctime(self.factory.startuptime)\n dic['startup_at'] = volent.descinterval(\n self.factory.startuptime,\n limit=2)\n dic['last_new_bot'] = volent.descinterval(\n self.factory.activitytime,\n limit=2)\n dic['bots_running'] = len(self.factory.actors)\n dic['bots_started'] = self.factory.actorsstarted\n \n return dic", "def readStatus(message):", "def handle_getStatus_event():\n global STATUS_VALS\n socketio.emit('updateStatus', str(STATUS_VALS), callback=messageReceived)", "def get_status(game):\n if game.over == True:\n if game.won == True:\n return \"Game over, you win!\"\n else:\n return \"Game over, try again!\"\n else:\n return \"Keep guessing, you have \" + str(game.remaining_attempts) + \" guesses left.\"", "def GetStatus(self):\n self.__SendMsg(\"status\")\n ##TODO: Parse the response into some struct so it can be queried later.\n\n ## \"Status\" is the only command that returns a multi\n ## line response so handle it separately.\n response = \"\"\n while(self.SocketIsReadable()):\n data = self.my_Socket.recv(1)\n if not data:\n break\n else:\n response += data.decode(\"UTF-8\")\n return response", "def getGameStatus(self):\r\n return self.game.gameStatus.pointer", "def getRigStatus(host, port):\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n except socket.error, exc:\n print \"Caught exception socket.error : %s\" % exc\n return\n\n # get rig status\n s.sendall(b'{\"id\":0, \"jsonrpc\": \"2.0\", \"method\": \"miner_getstat1\"}')\n data = s.recv(1024)\n s.close()\n\n try:\n decoded = json.loads(data)['result']\n return decoded\n\n except (ValueError, KeyError, TypeError):\n print \"JSON format error\"\n return None", "def request_game_state(self):\n try:\n self.update_state(self.network.send(\"get\"))\n except Exception as e:\n print(e)", "def get_status(self):\n if not self.alive: return 'Dead'\n elif self.cells_left == 0: return 'Win'\n else: return 'Ok'", "def _player_info(self) -> dict:\n return self._json_decode(self._send(\"getPlayerStatus\"))", "def status(self):\n response = requests.get(\"http://%s:%d/v1/status\" % (self.propsd_server, self.propsd_port))\n return json.loads(response.text)", "def get_lol_status_for_shard(self):\n url = self.method_ref + 'shard-data'\n r = requests.get(url, params=self.payload)\n return r.json()", "def get_status():\n # request status from vlc http server\n req = urllib2.Request(VLC_STATUS)\n # build authentication header\n password = keyring.get_password('vlc_status', 'conky')\n base64string = base64.encodestring('%s:%s' % ('', password))[:-1]\n password = None\n authheader = \"Basic %s\" % base64string\n # add encoded header to request\n req.add_header(\"Authorization\", authheader)\n try:\n handle = urllib2.urlopen(req)\n except IOError as e:\n sys.exit(1)\n source = handle.read()\n json_data = json.loads(source)\n\n return json_data", "def comm_status(self):\r\n # TODO Note this has a lot of repeated code from forward UCM method. consider refactoring\r\n mesdict = {\"commstate\": \"good\"}\r\n\r\n page = self.URLmap.get(\"comm_state\", \"/comm.cgi?\")\r\n requestURL = \"http://\" + self.UCMip + page\r\n UCMrequest = urllib2.Request(requestURL)\r\n \r\n method = self.HTTPmethods.get(\"comm_state\", \"POST\")\r\n messtr = json.dumps(mesdict)\r\n UCMrequest.add_data(messtr)\r\n UCMresponsedict = {\"message_subject\": \"commstate_update\"}\r\n \r\n now = datetime.utcnow().isoformat() + 'Z'\r\n if settings.DEBUGGING_LEVEL >= 2:\r\n print(\"Sending a message to test connection at {time}\".format(time = now))\r\n topic = self.create_topic(\"commstate\")\r\n try:\r\n result = urllib2.urlopen(UCMrequest, timeout = 10)\r\n HTTPcode = result.getcode()\r\n if HTTPcode == 200:\r\n UCMresponsedict[\"commstate\"] = \"good\"\r\n elif HTTPcode == 400:\r\n UCMresponsedict[\"commstate\"] = \"SGD_timeout\"\r\n else:\r\n UCMresponsedict[\"commstate\"] = \"ambiguous\"\r\n\r\n print(\"<{name}> channel status update from {time}: {status}\".format(name =self.UCMname, time = now, status = UCMresponsedict[\"commstate\"]))\r\n notification = json.dumps(UCMresponsedict)\r\n self.vip.pubsub.publish(peer = 'pubsub', topic = topic, headers = {}, message = notification)\r\n except urllib2.URLError, e:\r\n print('an urllib2 error of type {error} occurred while sending comms test message to {ucm}'.format(error = e, ucm = self.UCMname))\r\n _log.error('Comm_state urllib error')\r\n except socket.timeout, e:\r\n _log.error('Comm_state time out')", "def status_for(self, player) -> dict:\n if player not in (self.player1, self.player2):\n raise PylgrumInternalError(\n \"Can't generate game status for uninvolved player\"\n )\n\n r_val = {\n \"game_id\": self.game_id,\n \"description\": \"game between {} and {}\".format(\n self.player1.contestant_id,\n self.player2.contestant_id,\n ),\n \"current_player\": self._current_player.contestant_id,\n }\n try:\n visible_discard = self.visible_discard\n except CardNotFoundError:\n r_val['visible_discard'] = {\n 'suit': \"\",\n 'card': \"\"\n }\n else:\n r_val['visible_discard'] = {\n 'suit': visible_discard.suit.name,\n 'card': visible_discard.rank.name\n }\n ## only the curent player can see the acquired card\n if self.current_player == player:\n if (self.current_move is not None and\n self.current_move.acquired is not None):\n r_val['new_card'] = {\n 'suit': self.current_move.acquired.suit.name,\n 'card': self.current_move.acquired.rank.name\n }\n r_val['hand'] = [{\"suit\": x.suit.name, \"card\": x.rank.name} for x in player.hand.cards]\n return r_val", "def game_status(self):\n\t\tactive_players = []\n\t\tfor player in self.players.itervalues():\n\t\t\tif player.alive:\n\t\t\t\tactive_players.append(player.name)\n\n\t\treturn {\n\t\t\t'game_active': self.active,\n\t\t\t'turn':self.turn,\n\t\t\t'active_players': active_players\n\t\t}" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A terrible hash function that can be used for testing. A hash function should produce unpredictable results, but it is useful to see what happens to a hash table when you use the worstpossible hash function. The function returned from this factory function will always return the same number, regardless of the key.
def terrible_hash(bin): def hashfunc(item): return bin return hashfunc
[ "def terrible_hash(bin):\r\n def hashfunc(item):\r\n return bin\r\n return hashfunc", "def hash_function_integers(key, table_size):\n return key % table_size", "def hashFunctionTest():\n m = 128\n h = HashFunction(m)\n print(h)\n\n count = [0] * m\n for i in range(m*2):\n count[h.h(random.randint(-10000,10000))] += 1\n print count", "def test_hash(self):\n value = divisors.PrimeFactorization({17: 3, 23: 1, 41: 2})\n self.assertEqual(hash(value), 17 ** 3 * 23 * 41 ** 2)", "def hash(x):\r\n return (randint(1, 5*c)*x + randint(1, 5*c)) % c", "def call_hash_function(self, key):\r\n return self._hash_function(key) % self.capacity", "def hash_number(number: Union[int, float]) -> int:\n state = c_hash.det_hash(number)\n return hash_uniform(state)", "def hash_function_strings(key, table_size):\n small_prime = 31\n hash_value = 0\n for index in range(len(key)):\n hash_value = (small_prime * hash_value + ord(key[index])) % table_size\n return hash_value", "def new(self) -> HashFunction:\n return self.hashfunc(self.algorithm)", "def hash32(value): # -> int:\n ...", "def create_hash(key, size): # O(1)\n if isinstance(key, (int, float)): # O(1)\n key_string = '{0}'.format(key) # O(1)\n char = key_string[0] # O(1)\n return int(char) % size # O(1)\n return ord(key[0]) % size # O(1)", "def hash_function_phone_number(key, table_size):\n area, exchange, extension = map(int, key.split(\"-\"))\n small_prime = 31\n hash_value = (\n ((area * small_prime + exchange) % table_size) * small_prime + extension\n ) % table_size\n return hash_value", "def create_hash(func: Union[str, int]) -> Hash:\n code = multihash.coerce_code(func)\n fun = functions.get(code, None)\n if fun is None:\n raise LookupError(\"multihash function {func} not yet supported\".format(func=func))\n return fun()", "def get_function_hash(f: 'function', ignore_not_implemented=False) -> int:\n bytecode_nested_list = get_bytecode_tree(f, ignore_not_implemented)\n bytecode_flat_list = flat_list(bytecode_nested_list)\n bytecode_flat_list = [\n bytecode.encode()\n if isinstance(bytecode,str)\n else bytecode\n for bytecode in bytecode_flat_list\n ]\n \n from hashlib import sha256\n hashable_bytecode = b''.join(bytecode_flat_list)\n h = sha256(hashable_bytecode).hexdigest()\n return h", "def _get_hash_function(self):\r\n try:\r\n func = getattr(hashlib, self.hash_type)()\r\n except AttributeError:\r\n raise RuntimeError('Invalid or unsupported hash type: %s' %\r\n (self.hash_type))\r\n\r\n return func", "def test_hash_table_hash_key_single():\n hash = HT()\n assert hash._hash_key('b') == 98", "def stable_hash(self, source, digits=9):\r\n\r\n return int(sha1(source.encode()).hexdigest(), 16) % (10 ** digits)", "def deterministic_hash(thing, length=10):\n hashable = hashablize(thing)\n jsonned = json.dumps(hashable, cls=NumpyJSONEncoder)\n # disable bandit\n digest = sha1(jsonned.encode('ascii')).digest()\n return b32encode(digest)[:length].decode('ascii').lower()", "def test_hash_function(self):\n\n # Both values must be the same, if not the hash does\n # not work.\n hashed_value1 = WordFilter.hash(\"256\", \"Hello World!\")\n hashed_value2 = WordFilter.hash(\"256\", \"Hello World!\")\n\n self.assertNotEqual(hashed_value1, \"Hello World!\")\n self.assertEqual(hashed_value1, hashed_value2)\n\n # Test values with 384 hash.\n hashed_value1 = WordFilter.hash(\"384\", \"Hello World!\")\n hashed_value2 = WordFilter.hash(\"384\", \"Hello World!\")\n\n self.assertNotEqual(hashed_value1, \"Hello World!\")\n self.assertEqual(hashed_value1, hashed_value2)\n\n # Test values with invalid inputs\n with self.assertRaises(ValueError):\n WordFilter.hash(\"256\", None)\n with self.assertRaises(ValueError):\n WordFilter.hash(\"256\", 12)\n with self.assertRaises(ValueError):\n WordFilter.hash(None, \"hello\")", "def __hash__(self) -> int:\n # Return the Python hash of the cryptographic hash.\n return hash(self.__event_hash__)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns NVCC GPU code generation options.
def _nvcc_gencode_options(cuda_version: int) -> List[str]: if sys.argv == ['setup.py', 'develop']: return [] envcfg = os.getenv('CUPY_NVCC_GENERATE_CODE', None) if envcfg is not None and envcfg != 'current': return ['--generate-code={}'.format(arch) for arch in envcfg.split(';') if len(arch) > 0] if envcfg == 'current' and build.get_compute_capabilities() is not None: ccs = build.get_compute_capabilities() arch_list = [ f'compute_{cc}' if cc < 60 else (f'compute_{cc}', f'sm_{cc}') for cc in ccs] else: # The arch_list specifies virtual architectures, such as 'compute_61', # and real architectures, such as 'sm_61', for which the CUDA # input files are to be compiled. # # The syntax of an entry of the list is # # entry ::= virtual_arch | (virtual_arch, real_arch) # # where virtual_arch is a string which means a virtual architecture and # real_arch is a string which means a real architecture. # # If a virtual architecture is supplied, NVCC generates a PTX code # the virtual architecture. If a pair of a virtual architecture and a # real architecture is supplied, NVCC generates a PTX code for the # virtual architecture as well as a cubin code for the real one. # # For example, making NVCC generate a PTX code for 'compute_60' virtual # architecture, the arch_list has an entry of 'compute_60'. # # arch_list = ['compute_60'] # # For another, making NVCC generate a PTX code for 'compute_61' virtual # architecture and a cubin code for 'sm_61' real architecture, the # arch_list has an entry of ('compute_61', 'sm_61'). # # arch_list = [('compute_61', 'sm_61')] # # See the documentation of each CUDA version for the list of supported # architectures: # # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#options-for-steering-gpu-code-generation aarch64 = (platform.machine() == 'aarch64') if cuda_version >= 12000: arch_list = [('compute_50', 'sm_50'), ('compute_52', 'sm_52'), ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), ('compute_80', 'sm_80'), ('compute_86', 'sm_86'), ('compute_89', 'sm_89'), ('compute_90', 'sm_90'), 'compute_90'] if aarch64: # Jetson TX1/TX2 are excluded as they don't support JetPack 5 # (CUDA 11.4). arch_list += [ # ('compute_53', 'sm_53'), # Jetson (TX1 / Nano) # ('compute_62', 'sm_62'), # Jetson (TX2) ('compute_72', 'sm_72'), # Jetson (Xavier) ('compute_87', 'sm_87'), # Jetson (Orin) ] elif cuda_version >= 11080: arch_list = [('compute_35', 'sm_35'), ('compute_37', 'sm_37'), ('compute_50', 'sm_50'), ('compute_52', 'sm_52'), ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), ('compute_80', 'sm_80'), ('compute_86', 'sm_86'), ('compute_89', 'sm_89'), ('compute_90', 'sm_90'), 'compute_90'] if aarch64: # Jetson TX1/TX2 are excluded as they don't support JetPack 5 # (CUDA 11.4). arch_list += [ # ('compute_53', 'sm_53'), # Jetson (TX1 / Nano) # ('compute_62', 'sm_62'), # Jetson (TX2) ('compute_72', 'sm_72'), # Jetson (Xavier) ('compute_87', 'sm_87'), # Jetson (Orin) ] elif cuda_version >= 11040: # To utilize CUDA Minor Version Compatibility (`cupy-cuda11x`), # CUBIN must be generated for all supported compute capabilities # instead of PTX: # https://docs.nvidia.com/deploy/cuda-compatibility/index.html#application-considerations arch_list = [('compute_35', 'sm_35'), ('compute_37', 'sm_37'), ('compute_50', 'sm_50'), ('compute_52', 'sm_52'), ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), ('compute_80', 'sm_80'), ('compute_86', 'sm_86'), 'compute_86'] if aarch64: # Jetson TX1/TX2 are excluded as they don't support JetPack 5 # (CUDA 11.4). arch_list += [ # ('compute_53', 'sm_53'), # Jetson (TX1 / Nano) # ('compute_62', 'sm_62'), # Jetson (TX2) ('compute_72', 'sm_72'), # Jetson (Xavier) ('compute_87', 'sm_87'), # Jetson (Orin) ] elif cuda_version >= 11010: arch_list = ['compute_35', 'compute_50', ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), ('compute_80', 'sm_80'), ('compute_86', 'sm_86'), 'compute_86'] elif cuda_version >= 11000: arch_list = ['compute_35', 'compute_50', ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), ('compute_80', 'sm_80'), 'compute_80'] elif cuda_version >= 10000: arch_list = ['compute_30', 'compute_50', ('compute_60', 'sm_60'), ('compute_61', 'sm_61'), ('compute_70', 'sm_70'), ('compute_75', 'sm_75'), 'compute_70'] else: # This should not happen. assert False options = [] for arch in arch_list: if type(arch) is tuple: virtual_arch, real_arch = arch options.append('--generate-code=arch={},code={}'.format( virtual_arch, real_arch)) else: options.append('--generate-code=arch={},code={}'.format( arch, arch)) return options
[ "def preferred_virtual_gpu_options(self):\n return self._preferred_virtual_gpu_options", "def init_pycuda():\n drv.init()\n context = drv.Device(0).make_context()\n devprops = { str(k): v for (k, v) in context.get_device().get_attributes().items() }\n cc = str(devprops['COMPUTE_CAPABILITY_MAJOR']) + str(devprops['COMPUTE_CAPABILITY_MINOR'])\n return context, cc", "def gpu_list_desc(use_for=None):\n return ('define which GPUs to use{}: \"all\", \"None\", or a comma-separated list, e.g. \"1,2\"'\n .format('' if use_for is None else ' for ' + use_for))", "def cuda_select_nvcc_arch_flags(cuda_version, cuda_arch_list=\"Auto\", detected=\"\"):\n\t\n\tcuda_known_gpu_architectures = [\"Fermi\", \"Kepler\", \"Maxwell\"]\n\tcuda_common_gpu_architectures = [\"3.0\", \"3.5\", \"5.0\"]\n\tcuda_limit_gpu_architecture = None\n\tcuda_all_gpu_architectures = [\"3.0\", \"3.2\", \"3.5\", \"5.0\"]\n\t\n\tif cuda_ver_cmp(cuda_version, \"7.0\") < 0:\n\t\tcuda_limit_gpu_architecture = \"5.2\"\n\t\n\tif cuda_ver_cmp(cuda_version, \"7.0\") >= 0:\n\t\tcuda_known_gpu_architectures += [\"Kepler+Tegra\", \"Kepler+Tesla\", \"Maxwell+Tegra\"]\n\t\tcuda_common_gpu_architectures += [\"5.2\"]\n\t\t\n\t\tif cuda_ver_cmp(cuda_version, \"8.0\") < 0:\n\t\t\tcuda_common_gpu_architectures += [\"5.2+PTX\"]\n\t\t\tcuda_limit_gpu_architecture = \"6.0\"\n\t\n\tif cuda_ver_cmp(cuda_version, \"8.0\") >= 0:\n\t\tcuda_known_gpu_architectures += [\"Pascal\"]\n\t\tcuda_common_gpu_architectures += [\"6.0\", \"6.1\"]\n\t\tcuda_all_gpu_architectures += [\"6.0\", \"6.1\", \"6.2\"]\n\t\t\n\t\tif cuda_ver_cmp(cuda_version, \"9.0\") < 0:\n\t\t\tcuda_common_gpu_architectures += [\"6.1+PTX\"]\n\t\t\tcuda_limit_gpu_architecture = \"7.0\"\n\t\n\tif cuda_ver_cmp(cuda_version, \"9.0\") >= 0:\n\t\tcuda_known_gpu_architectures += [\"Volta\"]\n\t\tcuda_common_gpu_architectures += [\"7.0\"]\n\t\tcuda_all_gpu_architectures += [\"7.0\", \"7.2\"]\n\t\t\n\t\tif cuda_ver_cmp(cuda_version, \"10.0\") < 0:\n\t\t\tcuda_common_gpu_architectures += [\"7.0+PTX\"]\n\t\t\tcuda_limit_gpu_architecture = \"7.5\"\n\t\n\tif cuda_ver_cmp(cuda_version, \"10.0\") >= 0:\n\t\tcuda_known_gpu_architectures += [\"Turing\"]\n\t\tcuda_common_gpu_architectures += [\"7.5\", \"7.5+PTX\"]\n\t\tcuda_all_gpu_architectures += [\"7.5\"]\n\t\t\n\t\tif cuda_ver_cmp(cuda_version, \"11.0\") < 0:\n\t\t\tcuda_limit_gpu_architecture = \"8.0\"\n\t\n\tif not cuda_arch_list:\n\t\tcuda_arch_list = \"Auto\"\n\t\n\tif cuda_arch_list == \"All\":\n\t\tcuda_arch_list = cuda_known_gpu_architectures\n\telif cuda_arch_list == \"Common\":\n\t\tcuda_arch_list = cuda_common_gpu_architectures\n\telif cuda_arch_list == \"Auto\":\n\t\tif detected:\n\t\t\tif isinstance(detected, list):\n\t\t\t\tcuda_arch_list = detected\n\t\t\telse:\n\t\t\t\tcuda_arch_list = re.sub(\"[ \\t]+\", \";\", detected).split(\";\")\n\t\t\t\n\t\t\tif cuda_limit_gpu_architecture:\n\t\t\t\tfiltered_cuda_arch_list = []\n\t\t\t\tfor arch in cuda_arch_list:\n\t\t\t\t\tif arch:\n\t\t\t\t\t\tif cuda_arch_cmp(arch, cuda_limit_gpu_architecture) >= 0:\n\t\t\t\t\t\t\tfiltered_cuda_arch_list.append(cuda_common_gpu_architectures[-1])\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tfiltered_cuda_arch_list.append(arch)\n\t\t\t\tcuda_arch_list = filtered_cuda_arch_list\n\t\telse:\n\t\t\tcuda_arch_list = cuda_common_gpu_architectures\n\telif isinstance(cuda_arch_list, str):\n\t\tcuda_arch_list = re.sub(\"[ \\t]+\", \";\", cuda_arch_list).split(\";\")\n\t\n\tcuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])\n\t\n\tcuda_arch_bin = []\n\tcuda_arch_ptx = []\n\tfor arch_name in cuda_arch_list:\n\t\tarch_bin = []\n\t\tarch_ptx = []\n\t\tadd_ptx = False\n\t\t\n\t\tif arch_name.endswith(\"+PTX\"):\n\t\t\tadd_ptx = True\n\t\t\tarch_name = arch_name[:-len(\"+PTX\")]\n\t\t\n\t\tif re.fullmatch(\"\"\"[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?\"\"\", arch_name):\n\t\t\tarch_bin = [arch_name]\n\t\t\tarch_ptx = [arch_name]\n\t\telse:\n\t\t\tif arch_name == \"Fermi\": arch_bin=[\"2.0\", \"2.1(2.0)\"]\n\t\t\telif arch_name == \"Kepler+Tegra\": arch_bin=[\"3.2\"]\n\t\t\telif arch_name == \"Kepler+Tesla\": arch_bin=[\"3.7\"]\n\t\t\telif arch_name == \"Kepler\": arch_bin=[\"3.0\", \"3.5\"]; arch_ptx=[\"3.5\"]\n\t\t\telif arch_name == \"Maxwell+Tegra\": arch_bin=[\"5.3\"]\n\t\t\telif arch_name == \"Maxwell\": arch_bin=[\"5.0\", \"5.2\"]; arch_ptx=[\"5.2\"]\n\t\t\telif arch_name == \"Pascal\": arch_bin=[\"6.0\", \"6.1\"]; arch_ptx=[\"6.1\"]\n\t\t\telif arch_name == \"Volta\": arch_bin=[\"7.0\"]; arch_ptx=[\"7.0\"]\n\t\t\telif arch_name == \"Turing\": arch_bin=[\"7.5\"]; arch_ptx=[\"7.5\"]\n\t\t\telse: raise ValueError(\"Unknown CUDA Architecture Name \"+arch_name+\n\t\t\t \" in cuda_select_nvcc_arch_flags()!\")\n\t\t\n\t\tif not arch_bin:\n\t\t\traise ValueError(\"arch_bin wasn't set for some reason\")\n\t\t\n\t\tcuda_arch_bin += arch_bin\n\t\t\n\t\tif add_ptx:\n\t\t\tif not arch_ptx:\n\t\t\t\tarch_ptx = arch_bin\n\t\t\tcuda_arch_ptx += arch_ptx\n\t\t\t\n\tcuda_arch_bin = re.sub (\"\\.\", \"\", \" \".join(cuda_arch_bin))\n\tcuda_arch_ptx = re.sub (\"\\.\", \"\", \" \".join(cuda_arch_ptx))\n\tcuda_arch_bin = re.findall(\"[0-9()]+\", cuda_arch_bin)\n\tcuda_arch_ptx = re.findall(\"[0-9]+\", cuda_arch_ptx)\n\t\n\tif cuda_arch_bin: cuda_arch_bin = sorted(list(set(cuda_arch_bin)))\n\tif cuda_arch_ptx: cuda_arch_ptx = sorted(list(set(cuda_arch_ptx)))\n\t\n\tnvcc_flags = []\n\tnvcc_archs_readable = []\n\t\n\tfor arch in cuda_arch_bin:\n\t\tm = re.match(\"\"\"([0-9]+)\\(([0-9]+)\\)\"\"\", arch)\n\t\tif m:\n\t\t\tnvcc_flags += [\"-gencode\", \"arch=compute_{},code=sm_{}\".format(m[1], m[0])]\n\t\t\tnvcc_archs_readable += [\"sm_\"+m[0]]\n\t\telse:\n\t\t\tnvcc_flags += [\"-gencode\", \"arch=compute_\"+arch+\",code=sm_\"+arch]\n\t\t\tnvcc_archs_readable += [\"sm_\"+arch]\n\t\n\tfor arch in cuda_arch_ptx:\n\t\tnvcc_flags += [\"-gencode\", \"arch=compute_\"+arch+\",code=compute_\"+arch]\n\t\tnvcc_archs_readable += [\"compute_\"+arch]\n\t\n\treturn nvcc_flags, nvcc_archs_readable", "def get_kernel_compile_options(options, precision):\n import numbers\n\n if precision == \"single\":\n literal = \"f\"\n else:\n literal = \"\"\n\n compile_options = []\n for key, value in options.items():\n if value is None:\n compile_options += [\"-D\", \"{0}\".format(key)]\n else:\n if isinstance(value, numbers.Real) and not isinstance(\n value, numbers.Integral\n ):\n value_string = str(value) + literal\n else:\n value_string = str(value)\n compile_options += [\"-D\", \"{0}={1}\".format(key, value_string)]\n\n compile_options += [\"-I\", _INCLUDE_PATH]\n\n # Add precision flag\n\n if precision == \"single\":\n val = 0\n else:\n val = 1\n\n compile_options.append(\"-DPRECISION={0}\".format(val))\n\n return compile_options", "def cpu_options(self) -> pulumi.Output[Optional['outputs.LaunchTemplateCpuOptions']]:\n return pulumi.get(self, \"cpu_options\")", "def getVersionFlags():\r\n return {'CCFLAGS':[ \r\n '-D SZG_MAJOR_VERSION=1',\r\n '-D SZG_MINOR_VERSION=4',\r\n '-D SZG_PATCH_VERSION=0'\r\n ]}", "def device_render_options(self):\n return self.__device_render_options", "def get_runtime_options(config: ExperimentConfig):\n xla_options = {}\n if config.runtime.tpu_enable_xla_dynamic_padder is not None:\n xla_options[\"enable_xla_dynamic_padder\"] = (\n config.runtime.tpu_enable_xla_dynamic_padder)\n return tf.distribute.RunOptions(\n experimental_xla_options=tf.tpu.XLAOptions(**xla_options))", "def get_compile_options():\n compile_options = None\n if FLAGS.jax_dump_hlo_graph is not None:\n compile_options = get_xla_client().CompileOptions()\n compile_options.generate_hlo_graph = FLAGS.jax_dump_hlo_graph\n if FLAGS.jax_hlo_profile:\n compile_options = compile_options or get_xla_client().CompileOptions()\n compile_options.hlo_profile = True\n if FLAGS.jax_dump_hlo_unoptimized:\n compile_options = compile_options or get_xla_client().CompileOptions()\n path = _hlo_path(FLAGS.jax_dump_hlo_unoptimized, 'hlo_unoptimized')\n compile_options.dump_unoptimized_hlo_proto_to = path\n if FLAGS.jax_dump_hlo_optimized:\n compile_options = compile_options or get_xla_client().CompileOptions()\n path = _hlo_path(FLAGS.jax_dump_hlo_optimized, 'hlo_optimized')\n compile_options.dump_optimized_hlo_proto_to = path\n if FLAGS.jax_dump_hlo_per_pass:\n compile_options = compile_options or get_xla_client().CompileOptions()\n path = _hlo_path(FLAGS.jax_dump_hlo_per_pass, 'hlo_per_pass')\n compile_options.dump_per_pass_hlo_proto_to = path\n return compile_options", "def get_cuda_info():\n use_cuda = False\n multi_gpu = False\n\n if torch.cuda.is_available() and os.environ['CUDA_VISIBLE_DEVICES'] != \"\":\n gpu_ids = os.environ['CUDA_VISIBLE_DEVICES'].split()\n use_cuda = True\n logging.info('CUDA support is active')\n\n if len(gpu_ids) > 1:\n logging.info('MultiGPU support is active')\n multi_gpu = True\n\n return use_cuda, multi_gpu", "def foptions():\n \n \n opt_vect = np.zeros(18)\n opt_vect[1] = 1e-4\n opt_vect[2] = 1e-4\n opt_vect[3] = 1e-6\n opt_vect[15] = 1e-8\n opt_vect[16] = 0.1\n return opt_vect", "def tvm_callback_cuda_compile(code):\n ptx = nvcc.compile_cuda(code, target=\"ptx\", arch='sm_52') # use old arch for this to work on old GPUs\n return ptx", "def ss_cno_uc2_algorithm(function, mode, nfvi_uuid_list):\n dict_return = {'GPU': [], 'CPU': [], 'nfvi_id': 'ncsrd'}\n\n if function == 'vdetection': # if vdetection then always use GPU if ncsrd is available\n dict_return['GPU'] = ['vdetection']\n\n if nfvi_uuid_list == [] or 'ncsrd' in nfvi_uuid_list:\n dict_return['nfvi_id'] = 'ncsrd'\n else: # ncsrd is not available, then have to use CPU in one of nfvi location\n dict_return['GPU'] = []\n dict_return['CPU'] = ['vdetection'] \n dict_return['nfvi_id'] = nfvi_uuid_list[0]\n\n elif function == 'vspeech_vdetection': # always use GPU if ncsrd is available\n dict_return['GPU'] = ['vspeech', 'vdetection']\n \n if nfvi_uuid_list == [] or 'ncsrd' in nfvi_uuid_list:\n dict_return['nfvi_id'] = 'ncsrd'\n else: # ncsrd is not available, then have to use CPU in one of nfvi location\n dict_return['GPU'] = []\n dict_return['CPU'] = ['vspeech', 'vdetection'] \n dict_return['nfvi_id'] = nfvi_uuid_list[0]\n elif function == 'vspeech': # use GPU in live case, otherwise use CPU\n if 'live' in mode:\n dict_return['GPU'] = ['vspeech']\n\n if nfvi_uuid_list == [] or 'ncsrd' in nfvi_uuid_list:\n dict_return['nfvi_id'] = 'ncsrd'\n else: # ncsrd is not available, then have to use CPU in one of nfvi location\n dict_return['GPU'] = []\n dict_return['CPU'] = ['vspeech']\n dict_return['nfvi_id'] = nfvi_uuid_list[0]\n else: # if recorded case, use CPU\n dict_return['GPU'] = []\n dict_return['CPU'] = ['vspeech']\n if nfvi_uuid_list == []:\n dict_return['nfvi_id'] = 'tid'\n else:\n dict_return['nfvi_id'] = nfvi_uuid_list[0]\n else: # when function == none\n dict_return['GPU'] = []\n dict_return['CPU'] = []\n if nfvi_uuid_list == []:\n dict_return['nfvi_id'] = 'tid'\n else:\n dict_return['nfvi_id'] = nfvi_uuid_list[0]\n return dict_return", "def generate_params_combination(self):\n # cpu frequency and status\n \n core_status=dict()\n for key in self.big_cores.iterkeys():\n if key==\"core0\":\n core_status[key]=[self.ENABLE]\n else:\n core_status[key]=[self.ENABLE,self.DISABLE]\n core_frequency=self.big_core_freqs[:]\n \n # gpu status\n gpu_status=[self.ENABLE] \n # emmc status \n emc_status=[self.ENABLE] \n # gpu max frequency\n [a,b,c,d]=self.get_indices(self.gpu_freqs)\n gpu_freq=self.gpu_freqs[:] \n #emc max frequency\n \n emc_freq=self.emc_freqs[:]\n \n \"\"\"\n create configurable paramters set before permutation in a varibale named var\n index 0: core0 status\n index 1: core1 status \n index 2: core2 status \n index 3: core3 status \n index 4: core frequency\n index 5,6: gpu status, gpu frequency \n index 7,8: emc status, emc frequency \n \"\"\"\n status_var=[(self.ENABLE,self.DISABLE,self.DISABLE,self.DISABLE),\n (self.ENABLE,self.DISABLE,self.DISABLE,self.ENABLE),\n (self.ENABLE,self.DISABLE,self.ENABLE,self.DISABLE),\n (self.ENABLE,self.DISABLE,self.ENABLE,self.ENABLE),\n (self.ENABLE,self.ENABLE,self.DISABLE,self.DISABLE),\n (self.ENABLE,self.ENABLE,self.DISABLE,self.ENABLE),\n (self.ENABLE,self.ENABLE,self.ENABLE,self.DISABLE),\n (self.ENABLE,self.ENABLE,self.ENABLE,self.ENABLE)\n ]\n var=[\n core_frequency,\n gpu_status, gpu_freq,\n emc_status, emc_freq\n ]\n \n self.params=list(itertools.product(*var))\n self.params=list(itertools.product(status_var,self.params))\n for i in range(len(self.params)):\n self.params[i]=self.params[i][0]+self.params[i][1]", "def gpu_enable():\n return msg(\"GPU: enable\")", "def get_gpu_info():\n cmd = 'nvidia-smi --query-gpu=driver_version,gpu_name --format=csv'\n retcode, result = run_command(cmd)\n\n if retcode != 0:\n logging.error('nvidia-smi did not return as expected:{}'.format(result))\n return {}\n\n lines = result.splitlines()\n gpu_info_line = lines[1]\n if 'Quadro' in gpu_info_line and len(lines) >= 3:\n gpu_info_line = lines[2]\n\n gpu_info = {}\n gpu_info['gpu_driver_version'] = gpu_info_line.split(',')[0].strip()\n gpu_info['gpu_model'] = gpu_info_line.split(',')[1].strip()\n gpu_info['gpu_count'] = len(lines) - 1\n\n return gpu_info", "def usage_short():\n\treturn \"\"\"\nOptions for VC++ backend:\n\n\"\"\"", "def set_gpu():\n if Config.gpu_count == 1:\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = Config.gpu1\n elif Config.gpu_count == 2:\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = Config.gpu1 + ', ' + Config.gpu2\n elif Config.gpu_count == 3:\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = Config.gpu1 + ', ' + Config.gpu2 + ', ' + Config.gpu3\n elif Config.gpu_count == 4:\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = Config.gpu1 + ', ' + Config.gpu2 + ', ' + Config.gpu3 + ', ' + Config.gpu4" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
create Lineups such that there is a 3 way tie amongst the last 3 ranks.
def create_last_place_tie_teams_three_way(self): max = 6 tie_amount = 10.0 for i in range(1, max + 1): user = self.get_user(username=str(i)) self.fund_user_account(user) lineup = Lineup() if i <= 3: # for 1, 2, 3 lineup.test_fantasy_points = tie_amount else: # teams 4, 5, 6 should have unique test_fantasy_points lineup.test_fantasy_points = tie_amount + i lineup.user = user lineup.draft_group = self.draftgroup lineup.save() bm = BuyinManager(lineup.user) bm.buyin(self.contest_pool, lineup) Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest) self.contest.status = Contest.COMPLETED self.contest.save()
[ "def RankPlayers(players):\r\n #Weights:\r\n WIN_PER = 10\r\n AVG_PTS = 4 \r\n AVG_DIFF = 1\r\n TM_WIN_PER = -3\r\n GP = -1\r\n OPP_WIN_PER = 3 \r\n ranks = []\r\n initorder = []\r\n\r\n for i in range(len(players)): #Creating Rank List\r\n ranks.append([players[i][0]])\r\n initorder.append(players[i][0])\r\n players[i][6] = players[i][6] / players[i][3] #Average teammate gp \r\n players[i][8] = players[i][8] / players[i][3] #average opp gp\r\n for _ in range(10): #win %, GP rank, avgPts %, team win %, Teammate GP Rank, opp win %, Opp GP Rank, Wins, Losses, Avg Diff\r\n ranks[i].append(0)\r\n #Easy transfer Data\r\n ranks[i][1] = round(players[i][1]/players[i][3],3)\r\n ranks[i][3] = round(players[i][4]/10,3)\r\n ranks[i][4] = players[i][5]\r\n ranks[i][6] = players[i][7]\r\n ranks[i][8] = players[i][1]\r\n ranks[i][9] = players[i][2]\r\n ranks[i][10] = players[i][9]/10 #Dividing by 10 to get a good multiplier\r\n\r\n #GP rank normalized\r\n players.sort(key=lambda x: x[3], reverse=True) #descending order as to create negative percentile\r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][2] = round(1/(players[i][3]/players[0][3]),2)\r\n if players[i][3] < 5: #Not enough samples\r\n ranks[initorder.index(players[i][0])].append(10)\r\n elif players[i][3] < 10: #Still not enough samples\r\n ranks[initorder.index(players[i][0])].append(4)\r\n else: #Enough games played\r\n ranks[initorder.index(players[i][0])].append(0)\r\n\r\n #Teammate GP rank normalized\r\n players.sort(key=lambda x: x[6]) \r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][5] = round((i+1)/len(players),2)\r\n\r\n #opp GP rank normalized\r\n players.sort(key=lambda x: x[8]) #ascending order as to create positive precentile\r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][7] = round((i+1)/len(players),2)\r\n \r\n for i in range(len(ranks)):\r\n rawscore = ranks[i][1] * WIN_PER + ranks[i][11] * GP + ranks[i][3] * AVG_PTS + ranks[i][4] * TM_WIN_PER + ranks[i][6] * OPP_WIN_PER + ranks[i][10] * AVG_DIFF\r\n ranks[i].append(rawscore)\r\n #THEORETICAL MAX SCORE: 19.5\r\n ranks[i][1] = ranks[i][1] * 100 #Adjusting to readable format\r\n ranks[i][4] = ranks[i][4] * 100\r\n ranks[i][6] = ranks[i][6] * 100\r\n ranks[i][3] = ranks[i][3] * 10\r\n ranks[i][10] = ranks[i][10] * 10\r\n ranks[i][2] = len(ranks) - int(round(ranks[i][2] * len(ranks),0)) \r\n ranks[i][5] = len(ranks) - int(round(ranks[i][5] * len(ranks),0)) + 1\r\n ranks[i][7] = len(ranks) - int(round(ranks[i][7] * len(ranks),0)) + 1\r\n\r\n ranks.sort(key=lambda x: x[2],reverse=True) #Fixing GP Rank\r\n for i in range(len(ranks)):\r\n ranks[i][2] = i + 1\r\n\r\n #Final Ranking\r\n ranks.sort(key=lambda x: x[12],reverse=True) \r\n data={'Name':[i[0] for i in ranks], 'WINS':[i[8] for i in ranks], 'LOSSES':[i[9] for i in ranks],\r\n 'WIN %': [i[1] for i in ranks],'GP Rank':[i[2] for i in ranks],\r\n \"AVG PTS\":[i[3] for i in ranks],\"AVG DIFF\":[i[10] for i in ranks],\r\n \"AVG TM WIN %\":[i[4] for i in ranks],\"AVG TM GP Rank\":[i[5] for i in ranks],\"AVG OPP WIN %\":[i[6] for i in ranks],\"AVG OPP GP Rank\":[i[7] for i in ranks],\r\n \"Ranking Score\":[i[12] for i in ranks]}\r\n #Note: Rankings of GP, TM GP, and OPP GM: 1 means most games played, last means least games played\r\n result=pd.DataFrame(data=data)\r\n result=round(result,4)\r\n result.index += 1\r\n print(result) \r\n\r\n result = result.drop([\"WIN %\", \"GP Rank\", \"AVG TM GP Rank\", \"AVG OPP GP Rank\", \"Ranking Score\"], axis=1)\r\n result.to_csv(\"Standings/IndividualRankings.csv\")\r\n\r\n return None", "def formBestLineup(team):\n players = list(map(lambda Batter: Batter.transitionMatrixSimple(), team.batters))\n averagePlayer = team.averagePlayer().transitionMatrixSimple()\n availablePositions = set(range(9))\n bestLineup = [team.averagePlayer()] * 9\n for bestRemaining in range(4):\n worstRemaining = 8 - bestRemaining\n # Expected runs, best placement, worst placement\n bestPerforming = (0, 0, 0)\n for bestPos in availablePositions:\n for worstPos in availablePositions:\n if bestPos != worstPos:\n matrices = [averagePlayer] * 9\n matrices[bestPos] = players[bestRemaining]\n matrices[worstPos] = players[worstRemaining]\n scoreDistribution = simulateMarkovChain(matrices)[:, 216]\n expRuns = 0\n for i in range(21):\n expRuns += i * scoreDistribution[i]\n if expRuns > bestPerforming[0]:\n bestPerforming = (expRuns, bestPos, worstPos)\n availablePositions.remove(bestPerforming[1])\n availablePositions.remove(bestPerforming[2])\n bestLineup[bestPerforming[1]] = team.batters[bestRemaining]\n bestLineup[bestPerforming[2]] = team.batters[worstRemaining]\n bestLineup[availablePositions.pop()] = team.batters[4]\n return bestLineup", "def CreateOpponentStartingLineup(lineup, time, floridaScore, opponentScore):\n global currentOpponentLineup\n global opponentLinups\n currentOpponentLineup = lineup\n opponentLineups.append(lineup)\n opponentLineupNum = opponentLineups.index(lineup)\n floridaLineupNum = 0\n CreateStarterMatchup(floridaLineupNum, opponentLineupNum, time, floridaScore, opponentScore)\n return 0", "def formWorstLineup(team):\n players = list(map(lambda Batter: Batter.transitionMatrixSimple(), team.batters))\n averagePlayer = team.averagePlayer().transitionMatrixSimple()\n availablePositions = set(range(9))\n worstLineup = [team.averagePlayer()] * 9\n for bestRemaining in range(4):\n worstRemaining = 8 - bestRemaining\n # Expected runs, best placement, worst placement\n worstPerforming = (10, 0, 0)\n for bestPos in availablePositions:\n for worstPos in availablePositions:\n if bestPos != worstPos:\n matrices = [averagePlayer] * 9\n matrices[bestPos] = players[bestRemaining]\n matrices[worstPos] = players[worstRemaining]\n scoreDistribution = simulateMarkovChain(matrices)[:, 216]\n expRuns = 0\n for i in range(21):\n expRuns += i * scoreDistribution[i]\n if expRuns < worstPerforming[0]:\n worstPerforming = (expRuns, bestPos, worstPos)\n availablePositions.remove(worstPerforming[1])\n availablePositions.remove(worstPerforming[2])\n worstLineup[worstPerforming[1]] = team.batters[bestRemaining]\n worstLineup[worstPerforming[2]] = team.batters[worstRemaining]\n worstLineup[availablePositions.pop()] = team.batters[4]\n return worstLineup", "def create_rankings():\n\n suit1 = create_suit_deck('H')\n suit2 = create_suit_deck('C')\n\n listing = []\n\n for i in range(0,13):\n #pairs\n global wins\n wins = 0\n sim(suit1[i], suit2[i])\n x = (float(wins) / 5000, str(suit1[i] + ' ' + suit2[i]), 'pair')\n print x\n listing.append(x)\n #offsuit all it\n for j in range(i+1,13):\n global wins\n wins = 0\n sim(suit1[i],suit2[j])\n x= (float(wins)/5000 , str(suit1[i]+' '+ suit2[j]),'Offsuite')\n print x\n listing.append(x)\n #suited\n for k in range(0,i):\n wins = 0\n sim(suit1[i], suit1[k])\n x = (float(wins) / 5000, str(suit1[i] + ' ' + suit1[k]), 'suited')\n print x\n listing.append(x)\n\n df = pd.DataFrame(listing, columns = ('win%','Hand','Suit'))\n df.to_csv('rankings.csv')", "def rank_order():\r\n \r\n numbers = [str(i) for i in range(2, 10)] + [\"0\"]\r\n royals = [\"J\", \"Q\", \"K\", \"A\"]\r\n rank_orders = numbers + royals\r\n \r\n return rank_orders", "def test_generate_rankings(self):\n input = \"\"\"Manchester United 3, Chelsea 3\nSwansea City 0, Liverpool 2\nAston Villa 1, Arsenal 2\nChelsea 2, QPR 0\"\"\"\n expected_output = \"\"\"1. Chelsea, 4 pts\n2. Arsenal, 3 pts\n2. Liverpool, 3 pts\n4. Manchester United, 1 pt\n5. Aston Villa, 0 pts\n5. QPR, 0 pts\n5. Swansea City, 0 pts\n\"\"\"\n t = Table()\n for line in input.splitlines(True):\n t.record_result(MatchResult(line))\n output = \"\"\n for line in t.generate_rankings():\n output += line + \"\\n\"\n self.assertMultiLineEqual(expected_output, output)", "def make_set():\n global parent\n parent = [i for i in range(MAX + 5)]\n ranks = [0 for _ in range(MAX + 5)]", "def get_rank_chart(self):\n ranks = [(now(), self.rank)]\n\n for match in self.matches.order_by('-played'):\n if self.user == match.winner:\n ranks.insert(0, (match.played, match.winner_rank))\n else:\n ranks.insert(0, (match.played, match.loser_rank))\n\n if self.user == match.challenger:\n ranks.insert(0, (match.played, match.challenger_rank))\n else:\n ranks.insert(0, (match.played, match.opponent_rank))\n\n chart = pygal.DateTimeLine(\n title='Rank Movements',\n x_label_rotation=35,\n x_title='Date Played',\n y_title='Rank',\n range=(1, UserProfile.objects.filter(active=True).aggregate(max_rank=Max('rank'))['max_rank']),\n inverse_y_axis=True,\n show_legend=False,\n truncate_label=-1,\n x_value_formatter=lambda dt: dt.strftime('%b. %d, %Y, %I:%M %p'),\n style=CleanStyle(\n font_family='googlefont:Raleway',\n ),\n )\n chart.add('', ranks)\n return chart.render_data_uri()", "def sort_n_rank(rows, stat, highest_is_first=True):\n # sort rows\n rows = sorted(rows, key=lambda x: (float(x.get(stat)), float(x.get('k/d')), x.get('player'), x.get('team')), reverse=highest_is_first)\n\n # add rank to each row\n val, rank = 0, 0\n for i, row in enumerate(rows):\n if i == 0 or val != row[stat]:\n row['rank'] = rank = i+1\n val = row[stat]\n else:\n row['rank'] = rank\n return rows", "def _loess_rank(self):\n self.data.loc[(self.data.LOESS_z <= -2), 'LOESS_rank'] = -2\n self.data.loc[(self.data.LOESS_z > -2) &\n (self.data.LOESS_z <= -1), 'LOESS_rank'] = -1\n self.data.loc[(self.data.LOESS_z > -1) &\n (self.data.LOESS_z <= +1), 'LOESS_rank'] = 0\n self.data.loc[(self.data.LOESS_z > +1) &\n (self.data.LOESS_z <= +2), 'LOESS_rank'] = 1\n self.data.loc[(self.data.LOESS_z > +2), 'LOESS_rank'] = 2", "def wind_rose(self, n):\n ct = List(len(self._week_list[n][2].one_way))\n node = self._week_list[n][2].one_way.head()\n while node is not None:\n ct.add(node.item)\n node = node.next\n dg = List(len(self._week_list[n][2].one_way))\n node = self._week_list[n][2].second_way.head()\n while node is not None:\n dg.add(node.item)\n node = node.next\n wr = LinkedList(ct)\n wr.head().next = dg\n return wr", "def TrianglePrior():\n suite = Euro()\n for x in range(0, 51):\n suite.set(x, x)\n for x in range(51, 101):\n suite.set(x, 100 - x)\n suite.normalize()\n return suite", "def __create_lineups_with_fantasy_points(self, contest_pool, lineup_points=[]):\n\n max = contest_pool.entries\n for i in range(1, max + 1):\n # get the user for the lineup\n user = self.get_user(username=str(i))\n self.fund_user_account(user)\n\n # set the rest of the lineup properties\n lineup = Lineup()\n lineup.fantasy_points = lineup_points[i - 1]\n lineup.user = user\n lineup.draft_group = self.draftgroup\n lineup.save()\n\n # buy this lineup into the contest\n bm = BuyinManager(lineup.user)\n bm.buyin(self.contest_pool, lineup)\n Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest)\n # set the contest as payout-able\n self.contest.status = Contest.COMPLETED\n self.contest.save()", "def generate_ranking(self, scores):\n ranking = sorted(\n range(self.nplayers),\n key=lambda i: numpy.median(scores[i]))\n return ranking", "def createNewGroupRow(currGroup, nextGroup, pointsToGather, endPoint):\n newRow = {}\n newRow[\"Region No\"] = currGroup[\"Region No\"]\n currPoint = 1\n #Sort out the Group Points\n for currPointIndex in range(len(pointsToGather[0])):\n currGroupPoint = pointsToGather[0][currPointIndex]\n newRow[f\"Point {currPoint + currPointIndex} Orig Rt\"] = currGroup[f\"Point {currGroupPoint} Orig Rt\"]\n newRow[f\"Point {currPoint + currPointIndex} Agent Difference\"] = currGroup[f\"Point {currGroupPoint} Agent Difference\"]\n newRow[f\"Point {currPoint + currPointIndex} Action to Next Point\"] = currGroup[f\"Point {currGroupPoint} Action to Next Point\"]\n \n currPoint += len(pointsToGather[0])\n\n for currPointIndex in range(len(pointsToGather[1])):\n currGroupPoint = pointsToGather[1][currPointIndex]\n newRow[f\"Point {currPoint + currPointIndex} Orig Rt\"] = nextGroup[f\"Point {currGroupPoint} Orig Rt\"]\n newRow[f\"Point {currPoint + currPointIndex} Agent Difference\"] = nextGroup[f\"Point {currGroupPoint} Agent Difference\"]\n newRow[f\"Point {currPoint + currPointIndex} Action to Next Point\"] = nextGroup[f\"Point {currGroupPoint} Action to Next Point\"]\n \n #Sort out the Goal Point and Next Point\n goalPoint = pointsToGather[1][currPointIndex]+1\n\n newRow[f\"Goal Point Orig Rt\"] = nextGroup[f\"Point {goalPoint} Orig Rt\"]\n newRow[f\"Goal Point Agent Difference\"] = nextGroup[f\"Point {goalPoint} Agent Difference\"]\n\n if endPoint:\n newRow[f\"Next Point Rt Orig\"] = nextGroup[\"Goal Point Orig Rt\"]\n newRow[f\"Next Point Agent Difference\"] = nextGroup[\"Goal Point Agent Difference\"]\n newRow[f\"Agent Action to Next Point\"] = nextGroup[f\"Point {goalPoint} Action to Next Point\"]\n else:\n goalPoint += 1\n newRow[f\"Next Point Rt Orig\"] = nextGroup[f\"Point {goalPoint} Orig Rt\"]\n newRow[f\"Next Point Agent Difference\"] = nextGroup[f\"Point {goalPoint} Agent Difference\"]\n newRow[f\"Agent Action to Next Point\"] = nextGroup[f\"Point {goalPoint} Action to Next Point\"]\n\n return newRow", "def createTechLines(self):\n # create tech lines list\n for techID, myTech in self.myTech.iteritems():\n for itemID in myTech.preTechs:\n myTech2 = self.myTech[itemID]\n if myTech.id != myTech2.id:\n x1 = myTech.x\n y1 = myTech.y\n x2 = myTech2.x\n y2 = myTech2.y\n self.techLines.append((x1, y1, x2, y2,myTech.complete))", "def _create_ranks(distances):\n\tranks = np.zeros(distances.shape,'uint64')\n\trow,col = distances.shape\n\t_lib.create_ranks(distances,row,col,ranks)\n\treturn ranks", "def _correct_drawing_for_n_top_qubit_additions(self, n=1):\n self._draw.draw_circuit.correct_drawing_for_n_top_qubit_additions(self, n)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
contest is the contest to associate lineups with lineup_points is an array of the points to give to the lineups in creation order.
def __create_lineups_with_fantasy_points(self, contest_pool, lineup_points=[]): max = contest_pool.entries for i in range(1, max + 1): # get the user for the lineup user = self.get_user(username=str(i)) self.fund_user_account(user) # set the rest of the lineup properties lineup = Lineup() lineup.fantasy_points = lineup_points[i - 1] lineup.user = user lineup.draft_group = self.draftgroup lineup.save() # buy this lineup into the contest bm = BuyinManager(lineup.user) bm.buyin(self.contest_pool, lineup) Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest) # set the contest as payout-able self.contest.status = Contest.COMPLETED self.contest.save()
[ "def CreateOpponentStartingLineup(lineup, time, floridaScore, opponentScore):\n global currentOpponentLineup\n global opponentLinups\n currentOpponentLineup = lineup\n opponentLineups.append(lineup)\n opponentLineupNum = opponentLineups.index(lineup)\n floridaLineupNum = 0\n CreateStarterMatchup(floridaLineupNum, opponentLineupNum, time, floridaScore, opponentScore)\n return 0", "def __run_payouts(self, lineup_points, lineup_ranks, payout_ranks):\n self.__create_lineups_with_fantasy_points(self.contest_pool, lineup_points=lineup_points)\n pm = PayoutManager()\n pm.payout(finalize_score=False)\n\n # test payout ranks\n payouts = Payout.objects.order_by('contest', '-rank')\n i = 0\n for payout in payouts:\n msg = str(payout), 'rank:%s' % payout.rank, ' should be payout_ranks[%s]:%s' % (\n str(payout.rank), str(payout_ranks[i]))\n logger.info(msg)\n i += 1\n i = 0\n for payout in payouts:\n # print(str(payout), 'rank:%s' % payout.rank, ' should be lineup_rank[%s]:%s' % (str(payout.rank), str(lineup_ranks[i])) )\n self.assertEquals(payout.rank, payout_ranks[i])\n i += 1\n\n # test Entry ranks (each distinct buyin)\n lineups = Lineup.objects.order_by('fantasy_points') # ascending\n i = 0\n for lineup in lineups:\n for entry in Entry.objects.filter(lineup=lineup):\n msg = (' ', str(entry), 'entry.final_rank:', entry.final_rank,\n ' should be entry rank:', lineup_ranks[i])\n logger.info(msg)\n self.assertEquals(entry.final_rank, lineup_ranks[i])\n i += 1\n\n self.validate_side_effects_of_transaction()", "def snapLineToPoints(self, pointshp, lineshp, outshpdir):\n pass", "def assign_points_line_plus_plus(points, starts, goals, v_max):\n routes = []\n points = points.copy()\n distance_limit = 3\n for i in range(len(starts)):\n routes.append([starts[i], goals[i]])\n counter = 0\n\n lonely_agents = []\n while len(lonely_agents) < len(starts):\n for agent_index in range(len(routes)):\n if agent_index in lonely_agents:\n continue\n agent_route = routes[agent_index]\n #print(agent_route)\n close_poi = None\n min_distance = float(\"infinity\")\n point_index = 0\n for p_index in range(len(points)):\n for line_point_ind in range(len(agent_route)-1):\n line = find_line_eq(agent_route[line_point_ind].xy, agent_route[line_point_ind + 1].xy)\n distance, close_point = dist_point_to_line(points[p_index].xy, line)\n\n if not (starts[agent_index].xy[1] < close_point[1] < goals[agent_index].xy[1] or goals[agent_index].xy[1] < close_point[1] < starts[agent_index].xy[1]):\n distance = min(calc_distance(points[p_index].xy, starts[agent_index].xy), calc_distance(points[p_index].xy, goals[agent_index].xy))\n\n if distance < min_distance:\n min_distance = distance\n close_poi = points[p_index]\n point_index = line_point_ind\n if min_distance < distance_limit:\n routes[agent_index].insert(point_index+1, close_poi)\n points.remove(close_poi)\n else:\n lonely_agents.append(agent_index)\n print(\"appending agent: \", agent_index)\n\n print(\"points left: \", len(points))\n\n if len(points) > 0:\n for i in range(len(points)):\n min_distance = float(\"infinity\")\n min_index = -1\n min_point_index = -1\n for j in range(len(starts)):\n point_list = routes[j]\n for p in range(len(point_list) - 1):\n line = find_line_eq(point_list[p].xy, point_list[p + 1].xy)\n distance, close_poi = dist_point_to_line(points[i].xy, line)\n if not (starts[j].xy[1] < close_poi[1] < goals[j].xy[1] or goals[j].xy[1] < close_poi[1] <\n starts[j].xy[1]):\n distance = min(calc_distance(points[i].xy, starts[j].xy),\n calc_distance(points[i].xy, goals[j].xy))\n counter += 1\n if distance < min_distance:\n min_distance = distance\n min_index = j\n min_point_index = p\n\n routes[min_index].insert(min_point_index + 1, points[i])\n\n state = State([], v_max)\n print(counter)\n for i in range(len(routes)):\n route = routes[i]\n route_object = Route(route[0], route[-1], route[1:-1])\n state.add_route(route_object)\n\n return state", "def mlblineup(self, optteam):\n \n # test for valid teams.\n optteam = self._validteams(optteam)\n if not optteam: # team is not found in aliases or validteams.\n #print(\"ERROR: Team not found. Valid teams are: {0}\".format(self._allteams()))\n return\n # create url and fetch lineup page.\n url = self._b64decode('aHR0cDovL2Jhc2ViYWxscHJlc3MuY29tL2xpbmV1cF90ZWFtLnBocD90ZWFtPQ==') + optteam\n print \"url=\", url\n html = self._httpget(url)\n if not html:\n self.log.error(\"ERROR opening {0}\".format(url))\n return\n # sanity check.\n if 'No game today' in html:\n print(\"ERROR: No game today for {0}\".format(optteam))\n return\n # process html. this is kinda icky.\n soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8')\n div = soup.find('div', attrs={'class':'team-lineup highlight'})\n divs = div.findAll('div') \n # 20140330 - had to fix this again.\n gmdate = divs[1].getText() # date of game.\n seconddiv = None\n otherpitcher = ''\n if len(divs) > 3:\n seconddiv = divs[3] # opp pitcher.\n otherpitcher = seconddiv.getText() # opp pitcher and team.\n lineup = div.find('div', attrs={'class':'game-lineup'})\n # sanity check.\n if \"No lineup yet\" in lineup.getText():\n gmdate = 'No Lineup'\n else: # div is a collection of divs, each div = person in lineup.\n lineup = lineup.findAll('div')\n lineup = [i.getText(separator=' ').encode('utf-8') for i in lineup]\n # output.\n record = {}\n record['team'] = optteam\n record['gmdate'] = gmdate\n record['otherPitcher'] = otherpitcher\n record['lineup'] = []\n #print record\n #print lineup\n if record['gmdate'] in ['No Game', 'No Lineup']:\n return record\n for item in lineup:\n player = {}\n item=item.rstrip()\n print \"item=\", item, \"x\"\n if item == '':\n continue\n # 8. Ryan Flaherty (L) 2B\n p = re.compile(\" ?(\\d)\\. ((\\w+ )+) \\(.\\) (\\w+)\")\n p = re.compile(\" ?(\\d)\\. ([\\w ]+) \\((.)\\) (\\w+)\")\n m = p.match(item)\n if m:\n player['batting_order'] = m.group(1)\n player['name'] = m.group(2)\n player['bats_lr'] = m.group(3)\n player['position'] = m.group(4)\n record['lineup'].append(player)\n return record", "def optimize_lineup_from_bench(self):\n if len(self.bench) == 0:\n return\n # Helper to filter the players\n def is_included(plyr):\n if self.ignore_status:\n return plyr['status'] == '' or not plyr['status'].startswith('IL')\n return plyr['status'] == ''\n\n ppool = pd.DataFrame(\n data=[e for e in self.bench if is_included(e)], columns=self.bench[0].index)\n ldf = pd.DataFrame(\n data=[e for e in self.lineup if is_included(e)], columns=self.lineup[0].index)\n ppool = pd.concat([ppool, ldf], ignore_index=True, sort=False)\n optimizer_func = self._get_lineup_optimizer_function()\n new_lineup = optimizer_func(self.cfg, self.score_comparer,\n self.my_team_bldr, ppool, [])\n if new_lineup:\n self._set_new_lineup_and_bench(new_lineup.get_roster(), [])", "def extractLineup(self) -> None:\r\n for link in self._extracted_links:\r\n self.root_url = \"https://int.soccerway.com{}\".format(link)\r\n self.setSoup()\r\n if self.setLineUpTables(): #any error during locating the Lineup, the page is assumed to be invalid\r\n continue\r\n tables = self._line_up_tables\r\n current_match = []\r\n\r\n for team in tables:\r\n current_team = []\r\n for starting_player in team.findAll('tr')[:11]: #Gets first 11 instances of <tr> table row tag\r\n for player_detail in starting_player.findAll('td'): #Iterates through each Player attribute\r\n try:\r\n if player_detail['class'] == ['player', 'large-link']:\r\n player_name = player_detail.a.get_text() #Extracts player name from <a> tag\r\n current_team.append(player_name)\r\n except KeyError as e:\r\n print(\"player detail not found\", e)\r\n continue\r\n current_match.append(current_team)\r\n\r\n match_info = tuple(self.collectClubsResult()) #converts List to tuple of match info, to be a Dict Key\r\n self._extracted_matches[match_info] = current_match #Stores in Dict {Tuple(MATCH INFO) : List of player names}\r", "def drawPredPoints(self, srcImg):\r\n\r\n points = self._allLanePoints\r\n #print(\"points: \", points.shape)\r\n color_map = self.colorMap()\r\n decodeImage = srcImg.copy()\r\n for c_index in range(0, self._regionNum): # dont not handle background\r\n sigleChannel = points[c_index]\r\n pNum = len(sigleChannel)\r\n #print(\"c_index--pNum\", c_index, pNum)\r\n for p_index in range(0, pNum):\r\n x, y = sigleChannel[p_index] \r\n laneType = self._laneList[c_index]\r\n cv2.circle(decodeImage, (int(x), int(y)), 2, color_map[laneType], 2)\r\n \r\n \"\"\"\r\n if not len(self._all_ext_points) == 0:\r\n ext_x, ext_y = self._all_ext_points[c_index]\r\n cv2.circle(decodeImage, (ext_x, ext_y), 4, (255, 245, 152), 4)\r\n \"\"\"\r\n cv2.imwrite(self._path + self._imgName + \"_predPoints.jpg\", decodeImage)", "def test_processed_points_calculation(self):\n\n assert self.test_shape.processed_points == [\n (49.937460888595446, 2.5, \"circle\"),\n (43.300748759659555, 25.000903120744287, \"circle\"),\n (27.1320420790315, 41.99824154201773, \"straight\"),\n (77.154447582418, 128.6358861991937, \"circle\"),\n (129.90375269002172, 75.00010024693078, \"circle\"),\n (149.97916521970643, 2.5, \"straight\"),\n (49.937460888595446, 2.5, \"circle\"),\n ]", "def optimize_lineup_from_projections(espn: BaseballApi, hitter_projections: Dict[str, Stats], notifier: Notifier):\n lineup = espn.lineup()\n LOGGER.info(f\"Current lineup: {lineup}\")\n l_settings = espn.lineup_settings()\n s_settings = espn.scoring_settings()\n hitting_settings = filter(\n lambda s: s.stat\n not in {\n BaseballStat.K,\n BaseballStat.W,\n BaseballStat.ERA,\n BaseballStat.WHIP,\n BaseballStat.SV,\n },\n s_settings,\n )\n\n possibles = possible_lineup_totals(lineup, l_settings, hitter_projections)\n best_pa = best_for_stat(lineup, possibles, ScoringSetting(BaseballStat.PA, False, 0.0))\n threshold = best_pa.stats.value_for_stat(BaseballStat.PA) * 0.95\n candidates = above_threshold_for_stat(\n possibles, ScoringSetting(BaseballStat.PA, False, 0.0), threshold\n )\n\n num_candidates = len(candidates)\n LOGGER.info(\n f\"found {num_candidates} candidates within 95% of max PA's (above threshold {threshold})\"\n )\n best_list = best_lineups(lineup, candidates, hitting_settings)\n most_pas_from_best = best_for_stat(\n lineup, best_list, ScoringSetting(BaseballStat.PA, False, 0.0)\n )\n LOGGER.info(f\"Using lineup {most_pas_from_best.lineup}\")\n\n hitting_transitions = lineup.transitions(most_pas_from_best.lineup)\n pitching_transitions = optimal_pitching_transitions(lineup, espn)\n notifier.notify_set_lineup(\n espn.team_name(),\n most_pas_from_best,\n hitting_transitions + pitching_transitions,\n s_settings,\n )\n if len(hitting_transitions + pitching_transitions) == 0:\n LOGGER.info(\"no transitions to execute\")\n else:\n espn.execute_transitions(hitting_transitions + pitching_transitions)", "def addLineGaugePoints(self, line, line_segments):\n points = self.points\n new_points = {}\n field, endpoints = line\n comm = Comm.get().comm.tompi4py()\n\n def addPoint(points, field, point):\n point = tuple(point)\n if point in points:\n if self.isLineIntegralGauge:\n no_output = points[point]['no_output'] if 'no_output' in points[point] else set()\n points[point]['no_output'] = no_output.union(set((field,)) - points[point]['fields'])\n points[point]['fields'].update((field,))\n else:\n ignore1, nearestNode, ignore2 = self.getLocalNearestNode(point)\n if self.isLineIntegralGauge:\n points[point] = {'fields':set((field,)), 'no_output': set((field,)),\n 'nearest_node': nearestNode,\n 'owning_proc': comm.rank}\n else:\n points[point] = {'fields':set((field,)),\n 'nearest_node': nearestNode,\n 'owning_proc': comm.rank}\n new_points[point] = points[point]\n\n for segment in line_segments:\n logEvent(\"Processing segment [ %e %e %e ] to [ %e %e %e ]\" % (\n segment[0][0], segment[0][1], segment[0][2],\n segment[1][0], segment[1][1], segment[1][2]), 5)\n startPoint, endPoint = segment\n # only add both sides of segment to line integral gauges and first segment\n if self.isLineIntegralGauge or all(startPoint == endpoints[0]):\n addPoint(points, field, startPoint)\n addPoint(points, field, endPoint)\n\n if self.isLineGauge:\n new_points = comm.gather(new_points)\n if comm.rank == 0:\n for new_points_i in new_points:\n points.update(new_points_i)\n # resort points\n points = OrderedDict(sorted(points.items()))\n self.points = comm.bcast(points)", "def assign_points_line(points, starts, goals, v_max):\n routes = []\n\n for i in range(len(starts)):\n routes.append([])\n\n for i in range(len(points)):\n min_distance = float(\"infinity\")\n min_index = -1\n\n for j in range(len(starts)):\n line = find_line_eq(starts[j].xy, goals[j].xy)\n distance, close_point = dist_point_to_line(points[i].xy, line)\n if not (starts[j].xy[1] < close_point[1] < goals[j].xy[1] or goals[j].xy[1] < close_point[1] < starts[j].xy[1]):\n distance = min(calc_distance(points[i].xy, starts[j].xy), calc_distance(points[i].xy, goals[j].xy))\n\n if distance < min_distance:\n min_distance = distance\n min_index = j\n\n routes[min_index].append(points[i])\n state = State([], v_max)\n\n for i in range(len(routes)):\n route = routes[i]\n route_object = Route(starts[i], goals[i], route)\n state.add_route(route_object)\n\n return state", "def create_last_place_tie_teams_three_way(self):\n\n max = 6\n tie_amount = 10.0\n for i in range(1, max + 1):\n user = self.get_user(username=str(i))\n self.fund_user_account(user)\n\n lineup = Lineup()\n if i <= 3:\n # for 1, 2, 3\n lineup.test_fantasy_points = tie_amount\n else:\n # teams 4, 5, 6 should have unique test_fantasy_points\n lineup.test_fantasy_points = tie_amount + i\n\n lineup.user = user\n lineup.draft_group = self.draftgroup\n lineup.save()\n\n bm = BuyinManager(lineup.user)\n bm.buyin(self.contest_pool, lineup)\n Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest)\n self.contest.status = Contest.COMPLETED\n self.contest.save()", "def triangulate(self):\n # pre-condition: we should have at least 3 points\n i=0\n lista_t=[]\n assert len(self.points) > 2\n #print self.points[1].x\n gen=group3(len(self.points))\n # print range(math.factorial(len(self.points))/(math.factorial(3)*math.factorial(len(self.points)-3)))\n #print math.factorial(3)\n #print math.factorial(len(self.points)-3)\n for ite in range(math.factorial(len(self.points))/(math.factorial(3)*math.factorial(len(self.points)-3))):\n pos1,pos2,pos3=next(gen)\n #temp=[(self.points[pos1],(self.points[pos2].x,self.points[pos2].y),(self.points[pos3].x,self.points[pos3].y)]\n t=Triangle(self.points[pos1],self.points[pos2],self.points[pos3])\n if not self.are_collinear(self.points[pos1],self.points[pos2],self.points[pos3]):\n #print 'hello'\n # print self.points[pos1],self.points[pos2],self.points[pos3]\n # print self.points[1]\n # print t,pos1,pos2,pos3\n if self.is_delaunay(t):\n #print t\n self.triangles.append(t)\n #print lista_t \n \n # Your implementation here", "def testLeadersInRound(self):\n # Test one user\n profile = self.users[0].get_profile()\n profile.add_points(10,\n datetime.datetime.today() - datetime.timedelta(minutes=1), \"Test\")\n profile.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile,\n \"Current leader is not the leading user.\")\n\n # Have another user move ahead in points\n profile2 = self.users[1].get_profile()\n profile2.add_points(profile.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile2.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile2,\n \"User 2 should be the leading profile.\")\n\n # Have this user get the same amount of points,\n # but an earlier award date.\n profile3 = self.users[2].get_profile()\n profile3.add_points(profile2.points(),\n datetime.datetime.today() - datetime.timedelta(minutes=1), \"Test\")\n profile3.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile2,\n \"User 2 should still be the leading profile.\")", "def sum_bench_points(league: League, lineup: list) -> float:\n return np.sum([player.points for player in lineup if player.slot_position == \"BE\"])", "def get_punch_lines_data():\n punch_lines = []\n for joke in jokes:\n punch_lines.append('{0} {1}'.format(joke[1], joke[2]))\n return punch_lines", "def ugb_line_etl():\r\n with arcetl.ArcETL(\"UGB Lines\") as etl:\r\n etl.extract(dataset.UGB.path(\"pub\"))\r\n etl.transform(arcetl.convert.polygons_to_lines, topological=True)\r\n etl.load(dataset.UGB_LINE.path())", "def _save_line_image(image: np.ndarray, line_pts: np.ndarray, save_prefix: os.path):\n save_image = copy.deepcopy(image)\n for (x1, y1, x2, y2) in line_pts:\n save_image = cv2.line(save_image, (x1, y1),\n (x2, y2), (255, 0, 0), thickness=2)\n save_image_file = save_prefix + '.jpg'\n cv2.imwrite(save_image_file, save_image)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
helper method that a) creates lineups with the points in 'lineup_points', b) does payouts, c) ensures all the ranks are set as expected based on the ranks in lineup_ranks and payout_ranks
def __run_payouts(self, lineup_points, lineup_ranks, payout_ranks): self.__create_lineups_with_fantasy_points(self.contest_pool, lineup_points=lineup_points) pm = PayoutManager() pm.payout(finalize_score=False) # test payout ranks payouts = Payout.objects.order_by('contest', '-rank') i = 0 for payout in payouts: msg = str(payout), 'rank:%s' % payout.rank, ' should be payout_ranks[%s]:%s' % ( str(payout.rank), str(payout_ranks[i])) logger.info(msg) i += 1 i = 0 for payout in payouts: # print(str(payout), 'rank:%s' % payout.rank, ' should be lineup_rank[%s]:%s' % (str(payout.rank), str(lineup_ranks[i])) ) self.assertEquals(payout.rank, payout_ranks[i]) i += 1 # test Entry ranks (each distinct buyin) lineups = Lineup.objects.order_by('fantasy_points') # ascending i = 0 for lineup in lineups: for entry in Entry.objects.filter(lineup=lineup): msg = (' ', str(entry), 'entry.final_rank:', entry.final_rank, ' should be entry rank:', lineup_ranks[i]) logger.info(msg) self.assertEquals(entry.final_rank, lineup_ranks[i]) i += 1 self.validate_side_effects_of_transaction()
[ "def __create_lineups_with_fantasy_points(self, contest_pool, lineup_points=[]):\n\n max = contest_pool.entries\n for i in range(1, max + 1):\n # get the user for the lineup\n user = self.get_user(username=str(i))\n self.fund_user_account(user)\n\n # set the rest of the lineup properties\n lineup = Lineup()\n lineup.fantasy_points = lineup_points[i - 1]\n lineup.user = user\n lineup.draft_group = self.draftgroup\n lineup.save()\n\n # buy this lineup into the contest\n bm = BuyinManager(lineup.user)\n bm.buyin(self.contest_pool, lineup)\n Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest)\n # set the contest as payout-able\n self.contest.status = Contest.COMPLETED\n self.contest.save()", "def testTeamRankWithPoints(self):\n user = User(username=\"test_user\", password=\"changeme\")\n user.save()\n group = Group(name=\"Test group\")\n group.save()\n team = Team(name=\"A\", group=group)\n team.save()\n\n profile = user.get_profile()\n profile.team = team\n\n # Check that the user is ranked last if they haven't done anything.\n rank = 1\n self.assertEqual(profile.team_rank(), rank,\n \"Check that the user is ranked last.\")\n\n # Make the user number 1 overall.\n top_user = Profile.objects.all()[0]\n profile.add_points(top_user.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile.save()\n\n self.assertEqual(profile.team_rank(), 1,\n \"Check that the user is number 1.\")\n\n user2 = User(username=\"test_user2\", password=\"changeme\")\n user2.save()\n\n profile2 = user2.get_profile()\n profile2.add_points(profile.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile2.save()\n\n self.assertEqual(profile.team_rank(), 1,\n \"Check that the user is still number 1 if the new \"\n \"profile is not on the same team.\")\n\n profile2.team = team\n profile2.save()\n\n self.assertEqual(profile.team_rank(), 2,\n \"Check that the user is now rank 2.\")", "def CreateOpponentStartingLineup(lineup, time, floridaScore, opponentScore):\n global currentOpponentLineup\n global opponentLinups\n currentOpponentLineup = lineup\n opponentLineups.append(lineup)\n opponentLineupNum = opponentLineups.index(lineup)\n floridaLineupNum = 0\n CreateStarterMatchup(floridaLineupNum, opponentLineupNum, time, floridaScore, opponentScore)\n return 0", "def possible_lineups(self, lineup_settings, slots_to_fill):\n\n # don't use injured players to build possible lineups\n possible_starters = list(\n filter(\n lambda p: p not in self.injured(),\n self.players()\n )\n )\n\n initial_node = LineupSearchNode(\n Lineup(dict(), self.slot_enum), possible_starters, slots_to_fill\n )\n # stack of nodes representing the frontier of the search graph\n frontier = [initial_node]\n max_starters = lineup_settings.total_for_slots(slots_to_fill)\n\n all_starters = dict()\n total_proc = 0\n max_stack = 0\n total_stack = 0\n\n start_time = time.time()\n LOGGER.info(\"generating all possible lineups\")\n\n while len(frontier) != 0:\n total_stack += len(frontier)\n node = frontier.pop(0)\n successors = node.successors(lineup_settings)\n total_proc += 1\n if total_proc % 1000 == 0:\n LOGGER.debug(f\"processed {total_proc}\")\n max_stack = max(max_stack, len(frontier))\n for successor in successors:\n starters = successor.lineup.starters()\n if len(starters) == max_starters:\n self.add_lineup_to_unique_starters(successor.lineup, all_starters)\n elif not successor.all_slots_filled():\n frontier.insert(0, successor)\n end_time = time.time()\n info_dict = {\n \"starters\": len(all_starters),\n \"total\": total_proc,\n \"max_stack\": max_stack,\n \"avg_stack\": total_stack / float(total_proc),\n \"elapsed\": end_time - start_time,\n }\n LOGGER.info(\n \"possible starting combos: %(starters)d / %(total)d lineups,\"\n \" max stack: %(max_stack)d, avg stack: %(avg_stack).3f, time: %(elapsed).3fs\",\n info_dict,\n )\n return all_starters.values()", "def testOverallRankWithPoints(self):\n user = User(username=\"test_user\", password=\"changeme\")\n user.save()\n profile = user.get_profile()\n\n # Check if the rank works if the user has done nothing.\n rank = 1\n self.assertEqual(profile.overall_rank(), rank,\n \"Check that the user is at least tied for last.\")\n\n # Make the user ranked 1st.\n top_user = Profile.objects.all()[0]\n profile.add_points(top_user.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile.save()\n\n self.assertEqual(profile.overall_rank(), 1,\n \"Check that the user is number 1.\")\n\n user2 = User(username=\"test_user2\", password=\"changeme\")\n user2.save()\n\n profile2 = user2.get_profile()\n profile2.add_points(profile.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile2.save()\n\n self.assertEqual(profile.overall_rank(), 2,\n \"Check that the user is now rank 2.\")", "def create_rankings():\n\n suit1 = create_suit_deck('H')\n suit2 = create_suit_deck('C')\n\n listing = []\n\n for i in range(0,13):\n #pairs\n global wins\n wins = 0\n sim(suit1[i], suit2[i])\n x = (float(wins) / 5000, str(suit1[i] + ' ' + suit2[i]), 'pair')\n print x\n listing.append(x)\n #offsuit all it\n for j in range(i+1,13):\n global wins\n wins = 0\n sim(suit1[i],suit2[j])\n x= (float(wins)/5000 , str(suit1[i]+' '+ suit2[j]),'Offsuite')\n print x\n listing.append(x)\n #suited\n for k in range(0,i):\n wins = 0\n sim(suit1[i], suit1[k])\n x = (float(wins) / 5000, str(suit1[i] + ' ' + suit1[k]), 'suited')\n print x\n listing.append(x)\n\n df = pd.DataFrame(listing, columns = ('win%','Hand','Suit'))\n df.to_csv('rankings.csv')", "def optimize_lineup_from_projections(espn: BaseballApi, hitter_projections: Dict[str, Stats], notifier: Notifier):\n lineup = espn.lineup()\n LOGGER.info(f\"Current lineup: {lineup}\")\n l_settings = espn.lineup_settings()\n s_settings = espn.scoring_settings()\n hitting_settings = filter(\n lambda s: s.stat\n not in {\n BaseballStat.K,\n BaseballStat.W,\n BaseballStat.ERA,\n BaseballStat.WHIP,\n BaseballStat.SV,\n },\n s_settings,\n )\n\n possibles = possible_lineup_totals(lineup, l_settings, hitter_projections)\n best_pa = best_for_stat(lineup, possibles, ScoringSetting(BaseballStat.PA, False, 0.0))\n threshold = best_pa.stats.value_for_stat(BaseballStat.PA) * 0.95\n candidates = above_threshold_for_stat(\n possibles, ScoringSetting(BaseballStat.PA, False, 0.0), threshold\n )\n\n num_candidates = len(candidates)\n LOGGER.info(\n f\"found {num_candidates} candidates within 95% of max PA's (above threshold {threshold})\"\n )\n best_list = best_lineups(lineup, candidates, hitting_settings)\n most_pas_from_best = best_for_stat(\n lineup, best_list, ScoringSetting(BaseballStat.PA, False, 0.0)\n )\n LOGGER.info(f\"Using lineup {most_pas_from_best.lineup}\")\n\n hitting_transitions = lineup.transitions(most_pas_from_best.lineup)\n pitching_transitions = optimal_pitching_transitions(lineup, espn)\n notifier.notify_set_lineup(\n espn.team_name(),\n most_pas_from_best,\n hitting_transitions + pitching_transitions,\n s_settings,\n )\n if len(hitting_transitions + pitching_transitions) == 0:\n LOGGER.info(\"no transitions to execute\")\n else:\n espn.execute_transitions(hitting_transitions + pitching_transitions)", "def testLeadersInRound(self):\n # Test one user\n profile = self.users[0].get_profile()\n profile.add_points(10,\n datetime.datetime.today() - datetime.timedelta(minutes=1), \"Test\")\n profile.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile,\n \"Current leader is not the leading user.\")\n\n # Have another user move ahead in points\n profile2 = self.users[1].get_profile()\n profile2.add_points(profile.points() + 1, datetime.datetime.today(),\n \"Test\")\n profile2.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile2,\n \"User 2 should be the leading profile.\")\n\n # Have this user get the same amount of points,\n # but an earlier award date.\n profile3 = self.users[2].get_profile()\n profile3.add_points(profile2.points(),\n datetime.datetime.today() - datetime.timedelta(minutes=1), \"Test\")\n profile3.save()\n\n self.assertEqual(\n player_mgr.points_leader(round_name=self.current_round), profile2,\n \"User 2 should still be the leading profile.\")", "def RankPlayers(players):\r\n #Weights:\r\n WIN_PER = 10\r\n AVG_PTS = 4 \r\n AVG_DIFF = 1\r\n TM_WIN_PER = -3\r\n GP = -1\r\n OPP_WIN_PER = 3 \r\n ranks = []\r\n initorder = []\r\n\r\n for i in range(len(players)): #Creating Rank List\r\n ranks.append([players[i][0]])\r\n initorder.append(players[i][0])\r\n players[i][6] = players[i][6] / players[i][3] #Average teammate gp \r\n players[i][8] = players[i][8] / players[i][3] #average opp gp\r\n for _ in range(10): #win %, GP rank, avgPts %, team win %, Teammate GP Rank, opp win %, Opp GP Rank, Wins, Losses, Avg Diff\r\n ranks[i].append(0)\r\n #Easy transfer Data\r\n ranks[i][1] = round(players[i][1]/players[i][3],3)\r\n ranks[i][3] = round(players[i][4]/10,3)\r\n ranks[i][4] = players[i][5]\r\n ranks[i][6] = players[i][7]\r\n ranks[i][8] = players[i][1]\r\n ranks[i][9] = players[i][2]\r\n ranks[i][10] = players[i][9]/10 #Dividing by 10 to get a good multiplier\r\n\r\n #GP rank normalized\r\n players.sort(key=lambda x: x[3], reverse=True) #descending order as to create negative percentile\r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][2] = round(1/(players[i][3]/players[0][3]),2)\r\n if players[i][3] < 5: #Not enough samples\r\n ranks[initorder.index(players[i][0])].append(10)\r\n elif players[i][3] < 10: #Still not enough samples\r\n ranks[initorder.index(players[i][0])].append(4)\r\n else: #Enough games played\r\n ranks[initorder.index(players[i][0])].append(0)\r\n\r\n #Teammate GP rank normalized\r\n players.sort(key=lambda x: x[6]) \r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][5] = round((i+1)/len(players),2)\r\n\r\n #opp GP rank normalized\r\n players.sort(key=lambda x: x[8]) #ascending order as to create positive precentile\r\n for i in range(len(players)):\r\n ranks[initorder.index(players[i][0])][7] = round((i+1)/len(players),2)\r\n \r\n for i in range(len(ranks)):\r\n rawscore = ranks[i][1] * WIN_PER + ranks[i][11] * GP + ranks[i][3] * AVG_PTS + ranks[i][4] * TM_WIN_PER + ranks[i][6] * OPP_WIN_PER + ranks[i][10] * AVG_DIFF\r\n ranks[i].append(rawscore)\r\n #THEORETICAL MAX SCORE: 19.5\r\n ranks[i][1] = ranks[i][1] * 100 #Adjusting to readable format\r\n ranks[i][4] = ranks[i][4] * 100\r\n ranks[i][6] = ranks[i][6] * 100\r\n ranks[i][3] = ranks[i][3] * 10\r\n ranks[i][10] = ranks[i][10] * 10\r\n ranks[i][2] = len(ranks) - int(round(ranks[i][2] * len(ranks),0)) \r\n ranks[i][5] = len(ranks) - int(round(ranks[i][5] * len(ranks),0)) + 1\r\n ranks[i][7] = len(ranks) - int(round(ranks[i][7] * len(ranks),0)) + 1\r\n\r\n ranks.sort(key=lambda x: x[2],reverse=True) #Fixing GP Rank\r\n for i in range(len(ranks)):\r\n ranks[i][2] = i + 1\r\n\r\n #Final Ranking\r\n ranks.sort(key=lambda x: x[12],reverse=True) \r\n data={'Name':[i[0] for i in ranks], 'WINS':[i[8] for i in ranks], 'LOSSES':[i[9] for i in ranks],\r\n 'WIN %': [i[1] for i in ranks],'GP Rank':[i[2] for i in ranks],\r\n \"AVG PTS\":[i[3] for i in ranks],\"AVG DIFF\":[i[10] for i in ranks],\r\n \"AVG TM WIN %\":[i[4] for i in ranks],\"AVG TM GP Rank\":[i[5] for i in ranks],\"AVG OPP WIN %\":[i[6] for i in ranks],\"AVG OPP GP Rank\":[i[7] for i in ranks],\r\n \"Ranking Score\":[i[12] for i in ranks]}\r\n #Note: Rankings of GP, TM GP, and OPP GM: 1 means most games played, last means least games played\r\n result=pd.DataFrame(data=data)\r\n result=round(result,4)\r\n result.index += 1\r\n print(result) \r\n\r\n result = result.drop([\"WIN %\", \"GP Rank\", \"AVG TM GP Rank\", \"AVG OPP GP Rank\", \"Ranking Score\"], axis=1)\r\n result.to_csv(\"Standings/IndividualRankings.csv\")\r\n\r\n return None", "def snapLineToPoints(self, pointshp, lineshp, outshpdir):\n pass", "def mlblineup(self, optteam):\n \n # test for valid teams.\n optteam = self._validteams(optteam)\n if not optteam: # team is not found in aliases or validteams.\n #print(\"ERROR: Team not found. Valid teams are: {0}\".format(self._allteams()))\n return\n # create url and fetch lineup page.\n url = self._b64decode('aHR0cDovL2Jhc2ViYWxscHJlc3MuY29tL2xpbmV1cF90ZWFtLnBocD90ZWFtPQ==') + optteam\n print \"url=\", url\n html = self._httpget(url)\n if not html:\n self.log.error(\"ERROR opening {0}\".format(url))\n return\n # sanity check.\n if 'No game today' in html:\n print(\"ERROR: No game today for {0}\".format(optteam))\n return\n # process html. this is kinda icky.\n soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8')\n div = soup.find('div', attrs={'class':'team-lineup highlight'})\n divs = div.findAll('div') \n # 20140330 - had to fix this again.\n gmdate = divs[1].getText() # date of game.\n seconddiv = None\n otherpitcher = ''\n if len(divs) > 3:\n seconddiv = divs[3] # opp pitcher.\n otherpitcher = seconddiv.getText() # opp pitcher and team.\n lineup = div.find('div', attrs={'class':'game-lineup'})\n # sanity check.\n if \"No lineup yet\" in lineup.getText():\n gmdate = 'No Lineup'\n else: # div is a collection of divs, each div = person in lineup.\n lineup = lineup.findAll('div')\n lineup = [i.getText(separator=' ').encode('utf-8') for i in lineup]\n # output.\n record = {}\n record['team'] = optteam\n record['gmdate'] = gmdate\n record['otherPitcher'] = otherpitcher\n record['lineup'] = []\n #print record\n #print lineup\n if record['gmdate'] in ['No Game', 'No Lineup']:\n return record\n for item in lineup:\n player = {}\n item=item.rstrip()\n print \"item=\", item, \"x\"\n if item == '':\n continue\n # 8. Ryan Flaherty (L) 2B\n p = re.compile(\" ?(\\d)\\. ((\\w+ )+) \\(.\\) (\\w+)\")\n p = re.compile(\" ?(\\d)\\. ([\\w ]+) \\((.)\\) (\\w+)\")\n m = p.match(item)\n if m:\n player['batting_order'] = m.group(1)\n player['name'] = m.group(2)\n player['bats_lr'] = m.group(3)\n player['position'] = m.group(4)\n record['lineup'].append(player)\n return record", "def checkPlayerRankPoints(self):\n numOfNan = self.matches[self.matches['winner_rank_points'].isnull() | self.matches['loser_rank_points'].isnull()].shape[0]\n self.matches[self.matches['winner_rank_points'].isnull() | self.matches['loser_rank_points'].isnull()].shape[0]\n print(\"Sanity checking winner_rank_points and loser_rank_points: \" + str(numOfNan))\n\n \"\"\"Fill NaN players rank points with 0\"\"\"\n self.matches['winner_rank_points'] = self.matches['winner_rank_points'].fillna(0)\n self.matches['loser_rank_points'] = self.matches['loser_rank_points'].fillna(0)", "def possible_lineup_totals(lineup, lineup_settings, projections):\n possibles = lineup.possible_lineups(lineup_settings, BaseballSlot.hitting_slots())\n return list(\n map(lambda l: LineupTotal.total_from_projections(l, projections), possibles)\n )", "def create_last_place_tie_teams_three_way(self):\n\n max = 6\n tie_amount = 10.0\n for i in range(1, max + 1):\n user = self.get_user(username=str(i))\n self.fund_user_account(user)\n\n lineup = Lineup()\n if i <= 3:\n # for 1, 2, 3\n lineup.test_fantasy_points = tie_amount\n else:\n # teams 4, 5, 6 should have unique test_fantasy_points\n lineup.test_fantasy_points = tie_amount + i\n\n lineup.user = user\n lineup.draft_group = self.draftgroup\n lineup.save()\n\n bm = BuyinManager(lineup.user)\n bm.buyin(self.contest_pool, lineup)\n Entry.objects.filter(contest_pool=self.contest_pool).update(contest=self.contest)\n self.contest.status = Contest.COMPLETED\n self.contest.save()", "def assign_points_line_plus_plus(points, starts, goals, v_max):\n routes = []\n points = points.copy()\n distance_limit = 3\n for i in range(len(starts)):\n routes.append([starts[i], goals[i]])\n counter = 0\n\n lonely_agents = []\n while len(lonely_agents) < len(starts):\n for agent_index in range(len(routes)):\n if agent_index in lonely_agents:\n continue\n agent_route = routes[agent_index]\n #print(agent_route)\n close_poi = None\n min_distance = float(\"infinity\")\n point_index = 0\n for p_index in range(len(points)):\n for line_point_ind in range(len(agent_route)-1):\n line = find_line_eq(agent_route[line_point_ind].xy, agent_route[line_point_ind + 1].xy)\n distance, close_point = dist_point_to_line(points[p_index].xy, line)\n\n if not (starts[agent_index].xy[1] < close_point[1] < goals[agent_index].xy[1] or goals[agent_index].xy[1] < close_point[1] < starts[agent_index].xy[1]):\n distance = min(calc_distance(points[p_index].xy, starts[agent_index].xy), calc_distance(points[p_index].xy, goals[agent_index].xy))\n\n if distance < min_distance:\n min_distance = distance\n close_poi = points[p_index]\n point_index = line_point_ind\n if min_distance < distance_limit:\n routes[agent_index].insert(point_index+1, close_poi)\n points.remove(close_poi)\n else:\n lonely_agents.append(agent_index)\n print(\"appending agent: \", agent_index)\n\n print(\"points left: \", len(points))\n\n if len(points) > 0:\n for i in range(len(points)):\n min_distance = float(\"infinity\")\n min_index = -1\n min_point_index = -1\n for j in range(len(starts)):\n point_list = routes[j]\n for p in range(len(point_list) - 1):\n line = find_line_eq(point_list[p].xy, point_list[p + 1].xy)\n distance, close_poi = dist_point_to_line(points[i].xy, line)\n if not (starts[j].xy[1] < close_poi[1] < goals[j].xy[1] or goals[j].xy[1] < close_poi[1] <\n starts[j].xy[1]):\n distance = min(calc_distance(points[i].xy, starts[j].xy),\n calc_distance(points[i].xy, goals[j].xy))\n counter += 1\n if distance < min_distance:\n min_distance = distance\n min_index = j\n min_point_index = p\n\n routes[min_index].insert(min_point_index + 1, points[i])\n\n state = State([], v_max)\n print(counter)\n for i in range(len(routes)):\n route = routes[i]\n route_object = Route(route[0], route[-1], route[1:-1])\n state.add_route(route_object)\n\n return state", "def test_rank_list_with_offset(self):\n url = \"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1&filter[offset]=1&group_by[payer_tenant_id]=*\" # noqa: E501\n query_params = self.mocked_query_params(url, OCICostView)\n handler = OCIReportQueryHandler(query_params)\n ranks = [\n {\"payer_tenant_id\": \"1\", \"rank\": 1, \"source_uuid\": [\"1\"]},\n {\"payer_tenant_id\": \"2\", \"rank\": 2, \"source_uuid\": [\"1\"]},\n {\"payer_tenant_id\": \"3\", \"rank\": 3, \"source_uuid\": [\"1\"]},\n {\"payer_tenant_id\": \"4\", \"rank\": 4, \"source_uuid\": [\"1\"]},\n ]\n\n data_list = [\n {\n \"payer_tenant_id\": \"1\",\n \"date\": \"2022-05\",\n \"cost_markup\": 1,\n \"cost_raw\": 1,\n \"cost_total\": 1,\n \"cost_usage\": 1,\n \"infra_markup\": 1,\n \"infra_raw\": 1,\n \"infra_total\": 1,\n \"infra_usage\": 1,\n \"sup_markup\": 1,\n \"sup_raw\": 1,\n \"sup_total\": 1,\n \"sup_usage\": 1,\n \"cost_units\": \"USD\",\n \"source_uuid\": [\"1\"],\n },\n {\n \"payer_tenant_id\": \"2\",\n \"date\": \"2022-05\",\n \"cost_markup\": 1,\n \"cost_raw\": 1,\n \"cost_total\": 1,\n \"cost_usage\": 1,\n \"infra_markup\": 1,\n \"infra_raw\": 1,\n \"infra_total\": 1,\n \"infra_usage\": 1,\n \"sup_markup\": 1,\n \"sup_raw\": 1,\n \"sup_total\": 1,\n \"sup_usage\": 1,\n \"cost_units\": \"USD\",\n \"source_uuid\": [\"1\"],\n },\n {\n \"payer_tenant_id\": \"3\",\n \"date\": \"2022-05\",\n \"cost_markup\": 1,\n \"cost_raw\": 1,\n \"cost_total\": 1,\n \"cost_usage\": 1,\n \"infra_markup\": 1,\n \"infra_raw\": 1,\n \"infra_total\": 1,\n \"infra_usage\": 1,\n \"sup_markup\": 1,\n \"sup_raw\": 1,\n \"sup_total\": 1,\n \"sup_usage\": 1,\n \"cost_units\": \"USD\",\n \"source_uuid\": [\"1\"],\n },\n {\n \"payer_tenant_id\": \"4\",\n \"date\": \"2022-05\",\n \"cost_markup\": 1,\n \"cost_raw\": 1,\n \"cost_total\": 1,\n \"cost_usage\": 1,\n \"infra_markup\": 1,\n \"infra_raw\": 1,\n \"infra_total\": 1,\n \"infra_usage\": 1,\n \"sup_markup\": 1,\n \"sup_raw\": 1,\n \"sup_total\": 1,\n \"sup_usage\": 1,\n \"cost_units\": \"USD\",\n \"source_uuid\": [\"1\"],\n },\n ]\n expected = [\n {\n \"payer_tenant_id\": \"2\",\n \"date\": \"2022-05\",\n \"cost_markup\": 1,\n \"cost_raw\": 1,\n \"cost_total\": 1,\n \"cost_usage\": 1,\n \"infra_markup\": 1,\n \"infra_raw\": 1,\n \"infra_total\": 1,\n \"infra_usage\": 1,\n \"sup_markup\": 1,\n \"sup_raw\": 1,\n \"sup_total\": 1,\n \"sup_usage\": 1,\n \"cost_units\": \"USD\",\n \"source_uuid\": [\"1\"],\n \"rank\": 2,\n },\n ]\n ranked_list = handler._ranked_list(data_list, ranks)\n for i in range(len(ranked_list)):\n for key in ranked_list[i]:\n self.assertEqual(ranked_list[i][key], expected[i][key])", "def formBestLineup(team):\n players = list(map(lambda Batter: Batter.transitionMatrixSimple(), team.batters))\n averagePlayer = team.averagePlayer().transitionMatrixSimple()\n availablePositions = set(range(9))\n bestLineup = [team.averagePlayer()] * 9\n for bestRemaining in range(4):\n worstRemaining = 8 - bestRemaining\n # Expected runs, best placement, worst placement\n bestPerforming = (0, 0, 0)\n for bestPos in availablePositions:\n for worstPos in availablePositions:\n if bestPos != worstPos:\n matrices = [averagePlayer] * 9\n matrices[bestPos] = players[bestRemaining]\n matrices[worstPos] = players[worstRemaining]\n scoreDistribution = simulateMarkovChain(matrices)[:, 216]\n expRuns = 0\n for i in range(21):\n expRuns += i * scoreDistribution[i]\n if expRuns > bestPerforming[0]:\n bestPerforming = (expRuns, bestPos, worstPos)\n availablePositions.remove(bestPerforming[1])\n availablePositions.remove(bestPerforming[2])\n bestLineup[bestPerforming[1]] = team.batters[bestRemaining]\n bestLineup[bestPerforming[2]] = team.batters[worstRemaining]\n bestLineup[availablePositions.pop()] = team.batters[4]\n return bestLineup", "def createNewGroupRow(currGroup, nextGroup, pointsToGather, endPoint):\n newRow = {}\n newRow[\"Region No\"] = currGroup[\"Region No\"]\n currPoint = 1\n #Sort out the Group Points\n for currPointIndex in range(len(pointsToGather[0])):\n currGroupPoint = pointsToGather[0][currPointIndex]\n newRow[f\"Point {currPoint + currPointIndex} Orig Rt\"] = currGroup[f\"Point {currGroupPoint} Orig Rt\"]\n newRow[f\"Point {currPoint + currPointIndex} Agent Difference\"] = currGroup[f\"Point {currGroupPoint} Agent Difference\"]\n newRow[f\"Point {currPoint + currPointIndex} Action to Next Point\"] = currGroup[f\"Point {currGroupPoint} Action to Next Point\"]\n \n currPoint += len(pointsToGather[0])\n\n for currPointIndex in range(len(pointsToGather[1])):\n currGroupPoint = pointsToGather[1][currPointIndex]\n newRow[f\"Point {currPoint + currPointIndex} Orig Rt\"] = nextGroup[f\"Point {currGroupPoint} Orig Rt\"]\n newRow[f\"Point {currPoint + currPointIndex} Agent Difference\"] = nextGroup[f\"Point {currGroupPoint} Agent Difference\"]\n newRow[f\"Point {currPoint + currPointIndex} Action to Next Point\"] = nextGroup[f\"Point {currGroupPoint} Action to Next Point\"]\n \n #Sort out the Goal Point and Next Point\n goalPoint = pointsToGather[1][currPointIndex]+1\n\n newRow[f\"Goal Point Orig Rt\"] = nextGroup[f\"Point {goalPoint} Orig Rt\"]\n newRow[f\"Goal Point Agent Difference\"] = nextGroup[f\"Point {goalPoint} Agent Difference\"]\n\n if endPoint:\n newRow[f\"Next Point Rt Orig\"] = nextGroup[\"Goal Point Orig Rt\"]\n newRow[f\"Next Point Agent Difference\"] = nextGroup[\"Goal Point Agent Difference\"]\n newRow[f\"Agent Action to Next Point\"] = nextGroup[f\"Point {goalPoint} Action to Next Point\"]\n else:\n goalPoint += 1\n newRow[f\"Next Point Rt Orig\"] = nextGroup[f\"Point {goalPoint} Orig Rt\"]\n newRow[f\"Next Point Agent Difference\"] = nextGroup[f\"Point {goalPoint} Agent Difference\"]\n newRow[f\"Agent Action to Next Point\"] = nextGroup[f\"Point {goalPoint} Action to Next Point\"]\n\n return newRow", "def select_lineup(\r\n num_guard: int,\r\n num_forward: int,\r\n num_center: int,\r\n salaryYear: int,\r\n budget: float,\r\n points: float,\r\n twos: float,\r\n threes: float,\r\n free_throw: float,\r\n def_reb: float,\r\n off_reb: float,\r\n ato: float,\r\n steals: float,\r\n blocks: float,\r\n):\r\n num_combo = 5\r\n output = []\r\n used_combos = []\r\n while len(output) < 5:\r\n if num_combo == 40:\r\n print(\"Invalid input. Please try a higher budget or change a field.\")\r\n break\r\n\r\n guards = get_position_combinations(\r\n \"guard\",\r\n num_combo,\r\n num_guard,\r\n salaryYear,\r\n points,\r\n twos,\r\n threes,\r\n free_throw,\r\n def_reb,\r\n off_reb,\r\n ato,\r\n steals,\r\n blocks,\r\n )\r\n forwards = get_position_combinations(\r\n \"forward\",\r\n num_combo,\r\n num_forward,\r\n salaryYear,\r\n points,\r\n twos,\r\n threes,\r\n free_throw,\r\n def_reb,\r\n off_reb,\r\n ato,\r\n steals,\r\n blocks,\r\n )\r\n centers = get_position_combinations(\r\n \"center\",\r\n num_combo,\r\n num_center,\r\n salaryYear,\r\n points,\r\n twos,\r\n threes,\r\n free_throw,\r\n def_reb,\r\n off_reb,\r\n ato,\r\n steals,\r\n blocks,\r\n )\r\n num_combo += 1\r\n\r\n all_name = [guards[0], forwards[0], centers[0]]\r\n all_name_combos = list(product(*all_name))\r\n\r\n all_salary = [guards[2], forwards[2], centers[2]]\r\n all_salary_combos = list(product(*all_salary))\r\n all_salary_combos_sum = []\r\n for i in all_salary_combos:\r\n all_salary_combos_sum.append(sum(i))\r\n\r\n salary_index = []\r\n for i in range(len(all_salary_combos_sum)):\r\n if all_salary_combos_sum[i] < (budget * 1000000):\r\n salary_index.append(i)\r\n\r\n if salary_index == []:\r\n continue\r\n\r\n all_rating = [guards[1], forwards[1], centers[1]]\r\n all_rating_combos = list(product(*all_rating))\r\n all_rating_combos_sum = []\r\n for i in all_rating_combos:\r\n all_rating_combos_sum.append(sum(i))\r\n\r\n while len(output) < 5 and salary_index != []:\r\n best_index = 0\r\n best_rating = 0\r\n for i in salary_index:\r\n if all_rating_combos_sum[i] > best_rating:\r\n best_rating = all_rating_combos_sum[i]\r\n best_index = i\r\n\r\n if all_name_combos[best_index] in used_combos:\r\n salary_index.remove(best_index)\r\n else:\r\n output.append(\r\n [all_name_combos[best_index], all_salary_combos_sum[best_index]]\r\n )\r\n used_combos.append(all_name_combos[best_index])\r\n salary_index.remove(best_index)\r\n\r\n return output" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Draw the colorbar for range maps.
def draw_colorbar(): print('draw colorbar') depth_bar = np.tile(np.linspace(vmin, vmax, 100), (BAR_WIDTH, 1)) depth_bar = np.flipud(depth_bar.T) plt.imshow(depth_bar, cmap='jet') plt.box(False) plt.axis('off') plt.show()
[ "def colorbar(self):\n if self.s1:\n ax_cb = plt.subplot(self.gs[1])\n else:\n print('must create plot before adding colorbar')\n return\n if self.alt_zi == 'int':\n ticks = np.linspace(-1,1,21)\n # find the intersection of the range of data displayed and ticks\n ticks = [ticki for ticki in ticks if ticki >= \n min(self.zi_norm.min(), self.znull) and \n ticki <= max(self.znull, self.zi_norm.max())]\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == '+':\n ticks = np.linspace(self.zmin,\n self.zmax,\n self.contour_n + 2)\n # find the intersection of the range of data displayed and ticks\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == '-':\n ticks = np.linspace(self.zmin,\n self.zmax,\n self.contour_n + 2)\n # find the intersection of the range of data displayed and ticks\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == 'amp':\n ampmin = self.floor\n ampmax = self.ceiling\n ticks = np.linspace(ampmin, ampmax,\n num=self.contour_n + 2)\n # determine how much precision is necessary in the ticks:\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == 'log':\n # the colorbar range\n # not sure whether to define the range using masked array or\n # full array\n #logmin = np.log10(masked.min() / (self.zmax - masked.min()))\n logmin = self.floor\n logmax = self.ceiling\n ticks = np.linspace(logmin,logmax,num=11)\n # determine how much precision is necessary in the ticks:\n decimals = int(np.floor(-np.log10(np.abs(\n ticks[-1]-ticks[0])))) + 2\n ticklabels = np.around(ticks,decimals)\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb).ax.set_yticklabels(ticklabels)\n \"\"\"\n decimals = int(np.floor(-np.log10(np.abs(\n ticks[-1]-ticks[0])))) + 2\n ticklabels = np.around(ticks,decimals)\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb).ax.set_yticklabels(ticklabels)\n \"\"\"\n elif self.alt_zi in [None, 'raw']: # raw colorbar\n ticks = np.linspace(min([self.znull, self.zmin]),\n max(self.znull, self.zmax),num=11)\n decimals = int(np.floor(-np.log10(np.abs(\n ticks[-1]-ticks[0])))) + 2\n ticklabels = np.around(ticks,decimals)\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb).ax.set_yticklabels(ticklabels)\n #self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n else: #could not determine colorbar type\n print('color scale used not recognized: cannot produce colorbar')", "def colorbar(self):\n if self.s1:\n ax_cb = plt.subplot(self.gs[1])\n else:\n print 'must create plot before adding colorbar'\n return\n if self.alt_zi == 'int':\n ticks = np.linspace(-1,1,21)\n # find the intersection of the range of data displayed and ticks\n ticks = [ticki for ticki in ticks if ticki >= \n min(self.zi_norm.min(), self.znull) and \n ticki <= max(self.znull, self.zi_norm.max())]\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == 'amp':\n ticks = np.linspace(0,1,11)\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n elif self.alt_zi == 'log':\n # mask zi's negative and zero elements\n masked = np.ma.masked_less_equal(self.zi-self.znull, 0.)\n # the colorbar range\n # not sure whether to define the range using masked array or\n # full array\n logmin = np.log10(masked.min() / (self.zmax - masked.min()))\n ticks = np.linspace(logmin,0,num=11)\n # determine how much precision is necessary in the ticks:\n decimals = int(np.floor(-np.log10(np.abs(\n ticks[-1]-ticks[0])))) + 2\n ticklabels = np.around(ticks,decimals)\n self.p1.colorbar(self.cax, ticks=ticks).ax.set_yticklabels(ticklabels)\n elif self.alt_zi in [None, 'raw']: # raw colorbar\n ticks = np.linspace(min([self.znull, self.zmin]),max(self.znull, self.zmax),num=11)\n decimals = int(np.floor(-np.log10(np.abs(\n ticks[-1]-ticks[0])))) + 2\n ticklabels = np.around(ticks,decimals)\n self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb).ax.set_yticklabels(ticklabels)\n #self.p1.colorbar(self.cax, ticks=ticks, cax=ax_cb)\n else: #could not determine colorbar type\n print 'color scale used not recognized: cannot produce colorbar'", "def geoplot_listed_colormap(gdf,column,bound,ax=None,cmap='Reds',cmap_display=False,fig=None): \n # bounds\n vmin = gdf[column].min()\n vmax = gdf[column].max()\n bounds = [vmin] + bound + [vmax]\n \n def bound_value(x,bounds):\n for i,(bmin,bmax) in enumerate(zip(bounds[:-1],bounds[1:])):\n if i == 0 and x == bmin: return i\n elif bmin < x <= bmax: return i\n \n gdf['csign'] = gdf[column].apply(lambda x: bound_value(x,bounds))\n \n # colormap\n c = mpl.cm.get_cmap(cmap, 256)\n colors = c(np.linspace(0, 1, len(bounds)-1))\n cmap = ListedColormap(colors)\n gdf.plot(ax=ax,column='csign',cmap=cmap)\n \n # colorbar\n if cmap_display:\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(\"right\", size=\"5%\", pad=0.1)\n cbar = fig.colorbar(mappable=mpl.cm.ScalarMappable(cmap=cmap), cax=cax,ticks=np.linspace(0,1,len(bounds)))\n cbar.ax.set_yticklabels(bounds) # vertically oriented colorbar", "def draw_cmap(cmap):\n img = plt.imshow(np.array([[0,1]]), cmap=cmap)\n cbar = plt.colorbar(orientation='horizontal', cmap=cmap)\n img.axes.remove()\n return cbar", "def refresh_colorbar(self, cb_min, cb_max, width = None, height = None, xMin = None, yMin = None):\n\n if width is None:\n width = self.width\n else:\n self.width = width\n\n# FIXME: Until now, if you want to refresh the colorbar, a new QPainter\n# object has been created, but I think that it is not necassary.\n# I have to figure out how to use the created object properly.\n p = pg.QtGui.QPainter(self.pic)\n p.drawRect(self.boundingRect())\n p.setPen(pg.mkPen('k'))\n grad = pg.QtGui.QLinearGradient(width/2.0, cb_min*1.0, width/2.0, cb_max*1.0)\n for stop, color in zip(self.stops, self.colors):\n grad.setColorAt(1.0 - stop, pg.QtGui.QColor(*[255*c for c in color]))\n p.setBrush(pg.QtGui.QBrush(grad))\n if xMin is None:\n p.drawRect(pg.QtCore.QRectF(0, cb_min, width, cb_max-cb_min))\n else:\n # If this picture whants to be set in a plot, which is going to be\n # saved:\n p.drawRect(pg.QtCore.QRectF(xMin, yMin, width, height))\n p.end()\n\n vb = self.getViewBox()\n # check whether a viewbox is already created for this object. If yes,\n # then it should be adjusted according to the full screen.\n if vb is not None:\n vb.updateAutoRange()\n vb.enableAutoRange()", "def make_colorbar(cmap,vmin=None,vmax=None,norm=None,**kw):\n if norm is None:\n if vmin is None:\n vmin = 0.0\n if vmax is None:\n vmax = 1.0\n norm = Normalize(vmin,vmax)\n if vmin is None:\n vmin = norm.vmin\n if vmax is None:\n vmax = norm.vmax\n sm = ScalarMappable(norm,cmap=cmap)\n sm.set_array([vmin,vmax])\n return colorbar(sm,**kw)", "def update_colorbar():\n\n for key in colormap_dict:\n if cbar.value == key:\n cmap = log_cmap(\"counts\", palette=colormap_dict[cbar.value], low=1, high=col_max)\n color_bar.color_mapper.palette = colormap_dict[cbar.value]", "def RangeContourChart(self, \n η_overall = np.linspace(0.1,0.8,20), \n ϵ_fuel = Q_( np.linspace(.150,35,21), ureg['kWh/kg']),\n range_units='km', show_title=True):\n \n Range = lambda η_o, ϵ_f: ( np.log( self.Aircraft['Max Take Off Weight'] /\n self.FinalWeight ) / self._g *\n self.Lift2Drag * η_o * ϵ_f ).to(ureg[range_units])\n \n #Range2DArray = np.array([[Range(η, ϵ).magnitude for ϵ in ϵ_fuel] for η in η_overall])\n Range2DArray = np.array([[ (Range(η, ϵ) / self.Aircraft['Range']).to('') for ϵ in ϵ_fuel] for η in η_overall])\n\n plt.figure(figsize=(10,8))\n plt.contourf(ϵ_fuel.magnitude, η_overall, Range2DArray, 20)\n plt.colorbar()\n cs=plt.contour(ϵ_fuel.magnitude, η_overall, Range2DArray, \n #levels=[self.Aircraft['Range'].to(ureg(range_units)).magnitude], colors=['w'])\n levels=[0.5, 0.75, 1], colors=['w'], linestyles=[':','--','-'])\n plt.clabel(cs)\n plt.xlabel('Unstored Mass-Specific Energy of Storage Media (kWh/kg)', fontsize=18)\n plt.ylabel('Overall Propulsion System Efficiency', fontsize=18)\n plt.xticks(fontsize=14); plt.yticks(fontsize=14)\n if show_title:\n plt.title(self.Type + ': Range / Current Range Contours', fontsize=18)\n \n # Plot current fuel & efficiency point\n plt.plot(self.Fuel.lower_heating_value.to('kWh/kg'), self.OverallEfficiency, marker='o', markeredgecolor='w', \n markerfacecolor='w', markersize=14)\n \n # Plot fuel lines\n self._drawFuelLines(η_overall)\n \n plt.show()", "def create_cmap_scatter_colorbar(colormap, select_tool=False):\n colorbar = ColorBar(index_mapper=LinearMapper(range=colormap.range),\n color_mapper=colormap,\n orientation='v',\n resizable='v',\n width=30,\n padding=20)\n if select_tool:\n colorbar.tools.append(RangeSelection(component=colorbar))\n colorbar.overlays.append(RangeSelectionOverlay(component=colorbar,\n border_color=\"white\",\n alpha=0.8,\n fill_color=\"lightgray\"))\n return colorbar", "def plot_brain_colorbar(\n ax,\n clim,\n colormap=\"auto\",\n transparent=True,\n orientation=\"vertical\",\n label=\"Activation\",\n bgcolor=\"0.5\",\n):\n from matplotlib.colorbar import ColorbarBase\n from matplotlib.colors import Normalize\n\n mapdata = _process_clim(clim, colormap, transparent)\n ticks = _get_map_ticks(mapdata)\n colormap, lims = _linearize_map(mapdata)\n del mapdata\n norm = Normalize(vmin=lims[0], vmax=lims[2])\n cbar = ColorbarBase(\n ax, cmap=colormap, norm=norm, ticks=ticks, label=label, orientation=orientation\n )\n # make the colorbar background match the brain color\n cbar.ax.set(facecolor=bgcolor)\n # remove the colorbar frame except for the line containing the ticks\n cbar.outline.set_visible(False)\n cbar.ax.set_frame_on(True)\n for key in (\"left\", \"top\", \"bottom\" if orientation == \"vertical\" else \"right\"):\n ax.spines[key].set_visible(False)\n return cbar", "def view_colormap(cmap, axes=None, qminmax=None, fontsize=8):\n colors = cmap(np.arange(cmap.N))\n if axes is None:\n fig = plt.figure(); axes = fig.gca();\n plt.yticks([])\n if qminmax is None:\n plt.xticks([])\n elif len(qminmax)==2:\n plt.xticks([0,10],[\"%.2f μm$^{-1}$\" % qminmax[0], \"%.2f μm$^{-1}$\" % qminmax[1]], fontsize=fontsize) \n axes.imshow([colors], extent=[0,10,0,1])", "def __init__(self, cmap='jet', vrange=(0,1), xtol=1.0/255):\n self.xtol = xtol\n self.cmap = safe_cmap(cmap)\n self.vrange = vrange\n\n self.x = np.linspace(0.0, 1.0, 1.0/self.xtol)\n self.y = self.cmap(self.x)", "def colorbar_only(vmin,vmax,outname='colorbar.png',figsize=(4,1),\n cbsize=[0.05,0.5,0.9,0.2],cmap=None, label='cm/yr',\n orient='horizontal',extend='both',transparent=0,show=False):\n print(vmin, vmax)\n if orient == 'vertical':\n figsize = (1,4)\n cbsize = [0.05,0.05,0.1,0.9]\n\n fig = plt.figure(figsize=figsize)\n ax = fig.add_axes(cbsize)\n\n if cmap == None:\n cmap = mpl.cm.jet\n norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)\n\n cb = mpl.colorbar.ColorbarBase(ax,\n cmap=cmap,\n norm=norm,\n extend=extend,\n orientation=orient,\n )\n cb.set_label(label)\n\n #Show & save\n if show:\n plt.show()\n\n plt.savefig(outname,\n transparent=transparent,\n bbox_inches='tight', #doesn't work for ps output...\n )\n print('output {}'.format(outname))", "def draw_heatmap(scores, goal_class, wt_scores, startseq_chars, wdir, logfile, f_width=50, f_height=4, res=300):\n\n ccmap = mplcolors.LinearSegmentedColormap('by_cmap', cdict)\n aas = ['A', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'Y']\n fig, ax = plt.subplots(figsize=(f_width, f_height), dpi=res)\n\n for idx in range(scores.shape[0]):\n for idy in range(scores.shape[1]):\n scores[idx, idy, goal_class] = scores[idx, idy, goal_class] - wt_scores[goal_class]\n\n\n pcm = ax.imshow(scores[:, :-1, goal_class], origin='lower',\n aspect='auto',\n cmap = ccmap)\n clb = fig.colorbar(pcm, ax=ax)\n clb.set_label('Mutant score', y=0.5)\n\n xtickvals = []\n xtickvalsminor = []\n ytickvals = []\n xticks = []\n xticksminor = []\n yticks = []\n\n for pos in range(scores.shape[1]-1):\n xtickvalsminor.append(startseq_chars[pos])\n xticksminor.append(pos)\n xtickvals.append(pos + 1) # 1-based counting for sequence position\n xticks.append(pos)\n for aa in range(20):\n ytickvals.append(aas[aa])\n yticks.append(aa)\n\n # plt.title('Predicted score for amino acid exchanges for label {}'.format(goal_class))\n plt.ylabel('Amino Acid')\n #plt.xlabel('Sequence Position')\n\n plt.tick_params(axis='y',\n which='both',\n left='off')\n plt.tick_params(axis='x',\n which='major',\n bottom='off',\n labelbottom='on',\n labelsize=4,\n pad=15)\n ax.tick_params( axis='x',\n which='minor',\n bottom='off',\n labelbottom='on')\n\n ax.set_xticks(xticks, minor=False)\n ax.set_xticklabels(xtickvals, minor=False, rotation='vertical')\n\n ax.set_xticklabels(xtickvalsminor, minor=True)\n ax.set_xticks(xticksminor, minor=True)\n\n ax.set_yticks(yticks, minor=False)\n ax.set_yticklabels(ytickvals, minor=False, rotation=0)\n\n ax.spines['top'].set_visible(True)\n ax.spines['right'].set_visible(True)\n\n plt.savefig(os.path.join(wdir, 'walk_heatmap_goal-{}'.format(goal_class)))\n plt.close()", "def draw(self, **kwargs):\n # Set the axes aspect to be equal\n self.ax.set_aspect(\"equal\")\n\n # Generate a mask for the upper triangle\n mask = np.zeros_like(self.ranks_, dtype=bool)\n mask[np.triu_indices_from(mask)] = True\n\n # Draw the heatmap\n # TODO: Move mesh to a property so the colorbar can be finalized\n data = np.ma.masked_where(mask, self.ranks_)\n mesh = self.ax.pcolormesh(data, cmap=self.colormap, vmin=-1, vmax=1)\n\n # Set the Axis limits\n self.ax.set(xlim=(0, data.shape[1]), ylim=(0, data.shape[0]))\n\n # Add the colorbar\n cb = self.ax.figure.colorbar(mesh, None, self.ax)\n cb.outline.set_linewidth(0)\n\n # Reverse the rows to get the lower left triangle\n self.ax.invert_yaxis()\n\n # Add ticks and tick labels\n self.ax.set_xticks(np.arange(len(self.ranks_)) + 0.5)\n self.ax.set_yticks(np.arange(len(self.ranks_)) + 0.5)\n if self.show_feature_names_:\n self.ax.set_xticklabels(self.features_, rotation=90)\n self.ax.set_yticklabels(self.features_)\n else:\n self.ax.set_xticklabels([])\n self.ax.set_yticklabels([])", "def plot_colormap(self, freq = 1000, total_pres = True, dinrange = 20):\n id_f = np.where(self.controls.freq <= freq)\n id_f = id_f[0][-1]\n # color parameter\n if total_pres:\n color_par = 20*np.log10(np.abs(self.pres_s[0][:,id_f])/np.amax(np.abs(self.pres_s[0][:,id_f])))\n else:\n r1 = np.linalg.norm(self.sources.coord - self.receivers.coord, axis = 1)\n color_par = np.abs(self.pres_s[0][:,id_f]-\\\n np.exp(-1j * self.controls.k0[id_f] * r1) / r1)\n color_par = 20*np.log10(color_par/np.amax(color_par))\n\n # Create triangulazition\n triang = tri.Triangulation(self.receivers.coord[:,0], self.receivers.coord[:,2])\n # Figure\n fig = plt.figure() #figsize=(8, 8)\n # fig = plt.figure()\n fig.canvas.set_window_title('pressure color map')\n plt.title('Reference |P(f)| (BEM sim)')\n # p = plt.tricontourf(triang, color_par, np.linspace(-15, 0, 15), cmap = 'seismic')\n p = plt.tricontourf(triang, color_par, np.linspace(-dinrange, 0, int(dinrange)), cmap = 'seismic')\n fig.colorbar(p)\n plt.xlabel(r'$x$ [m]')\n plt.ylabel(r'$z$ [m]')\n return plt", "def add_colormaps(colormaps, data_limits=[0.0,1.0], data_break=0.5, \n colormap_name=\"JohnDoe\"):\n\n lhs_dict = colormaps[0]._segmentdata\n rhs_dict = colormaps[1]._segmentdata\n new_dict = dict(red=[], green=[], blue=[], alpha=[])\n\n # Add first colorbar\n for key in rhs_dict:\n val_list = rhs_dict[key]\n for val in val_list:\n new_dict[key].append((val[0] * 0.5, val[1], val[2]))\n\n if 'alpha' not in list(rhs_dict.keys()):\n new_dict['alpha'].append((0.0,1.0,1.0))\n\n # Add second colorbar\n for key in lhs_dict:\n val_list = lhs_dict[key]\n for val in val_list:\n new_dict[key].append(((val[0] + 1.0) * 0.5, val[1], val[2]))\n\n if 'alpha' not in list(lhs_dict.keys()):\n new_dict['alpha'].append((1.0,1.0,1.0))\n\n N = 256\n gamma = 1.0\n\n cmap = colors.LinearSegmentedColormap(colormap_name, new_dict, N, gamma)\n\n # Compute new norm object\n bounds = numpy.empty(N)\n bounds[:int(N / 2)] = numpy.linspace(data_limits[0], data_break, int(N / 2))\n bounds[int(N / 2):] = numpy.linspace(data_break, data_limits[1], \n int(N / 2) + N % 2)\n #norm = colors.BoundaryNorm(boundaries=bounds, ncolors=N)\n\n try:\n # Use this norm for proportional pw linear with better tick mark locations:\n norm = colors.TwoSlopeNorm(data_break, data_limits[0], data_limits[1])\n except AttributeError:\n # older matplotlib.colors did not have TwoSlopeNorm, revert to old:\n norm = colors.BoundaryNorm(boundaries=bounds, ncolors=N)\n\n return cmap, norm", "def colormap_range(image, vrange='indep1', cmap=None):\n # this will clip the colormap\n\n # flatimg is one long 1d array, which enables the min, max, mean, std, and percentile calls to\n # operate on the values from each of the images simultaneously.\n flatimg = np.concatenate([i.flatten() for i in image]).flatten()\n\n if isinstance(vrange, str):\n if vrange[:4] == 'auto':\n if vrange == 'auto0':\n M = np.nanmax([np.abs(np.nanmin(flatimg)), np.abs(np.nanmax(flatimg))])\n vrange_list = [-M, M]\n elif vrange == 'auto1' or vrange == 'auto':\n vrange_list = [np.nanmin(flatimg), np.nanmax(flatimg)]\n elif vrange == 'auto2':\n vrange_list = [flatimg.nanmean() - 2 * flatimg.nanstd(),\n flatimg.nanmean() + 2 * flatimg.nanstd()]\n elif vrange == 'auto3':\n p1 = np.nanpercentile(flatimg, 10)\n p2 = np.nanpercentile(flatimg, 90)\n vrange_list = [p1-(p2-p1)/8.0, p2+(p2-p1)/8.0]\n\n # make sure to return as many ranges as there are images\n vrange_list = [vrange_list] * len(image)\n\n elif vrange[:5] == 'indep':\n # get independent vrange by calling this function one image at a time\n vrange_list = [colormap_range(im, vrange.replace('indep', 'auto'))[0][0] for im in image]\n else:\n vrange_list, _ = colormap_range(image, vrange='auto1')\n warnings.warn('Unknown vrange argument, using auto1 instead')\n else:\n # in this case, we've been passed two numbers, either as a list or tuple\n if len(vrange) != 2:\n raise Exception(\"If you're passing numbers to vrange, there must be 2 of them!\")\n vrange_list = [tuple(vrange)] * len(image)\n\n # double check that we're returning the right number of vranges\n assert len(image) == len(vrange_list)\n\n if cmap is None:\n if '0' in vrange:\n cmap = cm.RdBu\n else:\n cmap = cm.gray\n\n return vrange_list, cmap", "def __init__(self, parent: QtWidgets.QWidget, map):\n QtWidgets.QDialog.__init__(self, parent)\n main_layout = QtWidgets.QVBoxLayout(self)\n self.layout = QtWidgets.QHBoxLayout()\n main_layout.addLayout(self.layout)\n button_layout = QtWidgets.QHBoxLayout()\n main_layout.addLayout(button_layout)\n self.button_cancel = QtWidgets.QPushButton(\"Cancel\")\n self.button_cancel.clicked.connect(lambda _: self.done(0))\n button_layout.addStretch()\n button_layout.addWidget(self.button_cancel)\n\n self.maps = plt.colormaps()\n self.buttons = []\n self.setWindowTitle(\"Select colormap\")\n\n # Have colormaps separated into categories:\n # http://matplotlib.org/examples/color/colormaps_reference.html\n cmaps = [('Perceptually Uniform Sequential', [\n 'viridis', 'plasma', 'inferno', 'magma']),\n ('Sequential', [\n 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',\n 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',\n 'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn']),\n ('Simple Colors', [\n 'gray', 'red', 'orange', 'yellow', 'lime', 'green', 'mint', 'cyan', 'navy', 'blue', 'purple', 'magenta', 'grape']),\n ('Sequential (2)', [\n 'binary', 'gist_yarg', 'gist_gray', 'gray', 'bone', 'pink',\n 'spring', 'summer', 'autumn', 'winter', 'cool', 'Wistia',\n 'hot', 'afmhot', 'gist_heat', 'copper']),\n ('Diverging', [\n 'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu',\n 'RdYlBu', 'RdYlGn', 'Spectral', 'coolwarm', 'bwr', 'seismic']),\n ('Qualitative', [\n 'Pastel1', 'Pastel2', 'Paired', 'Accent',\n 'Dark2', 'Set1', 'Set2', 'Set3',\n 'tab10', 'tab20', 'tab20b', 'tab20c']),\n ('Miscellaneous', [\n 'turbo', 'flag', 'prism', 'ocean', 'gist_earth', 'terrain', 'gist_stern',\n 'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg', 'hsv',\n 'gist_rainbow', 'rainbow', 'nipy_spectral', 'gist_ncar'])]\n\n for cmap_category, cmap_list in cmaps:\n layout = QtWidgets.QVBoxLayout()\n label = QtWidgets.QLabel(cmap_category)\n layout.addWidget(label)\n label.setFixedWidth(150)\n for cmap in cmap_list:\n button = QtWidgets.QPushButton(cmap)\n button.setStyleSheet(\"text-align: center; border: 2px solid black; \"+self.getBackground(cmap))\n button.clicked.connect(lambda _, cmap=cmap: self.buttonClicked(cmap))\n self.buttons.append(button)\n layout.addWidget(button)\n layout.addStretch()\n self.layout.addLayout(layout)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that a Gen3Auth instance can be initialized when the required parameters are included.
def test_auth_init_outside_workspace(): # missing parameters with pytest.raises(ValueError): Gen3Auth() # working initialization endpoint = "localhost" refresh_token = "my-refresh-token" auth = Gen3Auth(endpoint=endpoint, refresh_token=refresh_token) assert auth._endpoint == endpoint assert auth._refresh_token == refresh_token assert auth._use_wts == False
[ "def test_auth_init_outside_workspace():\n # working initialization\n auth = gen3.auth.Gen3Auth(refresh_token=test_key)\n assert auth.endpoint == test_endpoint\n assert auth._refresh_token == test_key\n assert auth._use_wts == False", "def test_auth_init_with_both_endpoint_and_idp():\n with pytest.raises(ValueError):\n auth = gen3.auth.Gen3Auth(endpoint=\"https://caninedc.org\", idp=\"canine-google\")", "def test_init(self):\n self.assertEqual(self.new_credentials.account,\"Swift\")\n self.assertEqual(self.new_credentials.username,\"Gracegee\")\n self.assertEqual(self.new_credentials.password,\"grcee\")", "def test_auth(self):\n pass", "def test_auth_init_with_client_credentials_no_endpoint():\n client_id = \"id\"\n client_secret = \"secret\"\n with pytest.raises(ValueError, match=\"'endpoint' must be specified\"):\n gen3.auth.Gen3Auth(client_credentials=(client_id, client_secret))", "def test_init(self):\n assert self.registration_behaviour.is_registered is False\n assert self.registration_behaviour.registration_in_progress is False\n assert self.registration_behaviour.failed_registration_msg is None\n assert self.registration_behaviour._nb_retries == 0", "def test_trust_init(self):\n\n self._stubs_v3(method='trust')\n cfg.CONF.set_override('deferred_auth_method', 'trusts')\n self.m.ReplayAll()\n\n ctx = utils.dummy_context()\n ctx.username = None\n ctx.password = None\n ctx.auth_token = None\n ctx.trust_id = 'atrust123'\n ctx.trustor_user_id = 'trustor_user_id'\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\n self.assertIsNotNone(heat_ks_client.client)\n self.assertIsNone(ctx.auth_token)", "def test_trust_init(self):\r\n\r\n self._stubs_v3(method='trust')\r\n cfg.CONF.set_override('deferred_auth_method', 'trusts')\r\n self.m.ReplayAll()\r\n\r\n ctx = utils.dummy_context()\r\n ctx.username = None\r\n ctx.password = None\r\n ctx.auth_token = None\r\n ctx.trust_id = 'atrust123'\r\n ctx.trustor_user_id = 'trustor_user_id'\r\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\r\n self.assertIsNotNone(heat_ks_client.client)", "def test_keys(self):\n self.assert_requires_auth(self.instance.keys)", "def _required_auth_info(cls):", "def test_init_v3_token_auth_ref_v3(self):\n\n expected_auth_ref = {'auth_token': 'ctx_token',\n 'expires': '456',\n 'version': 'v3',\n 'methods': []}\n self._stubs_v3(method='auth_ref', auth_ref=expected_auth_ref)\n self.m.ReplayAll()\n\n ctx = utils.dummy_context()\n ctx.username = None\n ctx.password = None\n ctx.trust_id = None\n ctx.auth_token = 'ctx_token'\n ctx.auth_token_info = {'token': {'expires': '456', 'methods': []}}\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\n heat_ks_client.client\n self.assertIsNotNone(heat_ks_client._client)", "def test_initialization_with_auth_headers(self):\n api = Mint(use_rest_client=True, api_key=\"abc123\", cookies=\"chocolate-chip\")\n self.assertEqual(api.rest_client.__class__, RESTClient)\n self.assertEqual(\n # main method all endpoint requests route through\n getattr(api, \"request\").__self__.__class__,\n RESTClient,\n )\n self.assertIsNone(api.browser)", "def test_init_key():\n key = 'test_key'\n\n client = GiftbitClient(api_key=key)\n\n assert client.api_key == key", "def test_auth_init_with_endpoint_that_matches_multiple_idp():\n wts_token = test_key_wts[\"api_key\"]\n\n def _mock_request(url, **kwargs):\n mocked_response = MagicMock(requests.Response)\n\n if url.endswith(\"?idp=test-google\"):\n mocked_response.status_code = 200\n mocked_response.json.return_value = {\"token\": wts_token}\n elif url.endswith(\"/external_oidc/\"):\n mocked_response.status_code = 200\n mocked_response.json.return_value = {\n \"providers\": [\n {\n \"base_url\": \"https://test-commons.org\",\n \"idp\": \"test-google\",\n \"name\": \"test Google Login\",\n \"refresh_token_expiration\": None,\n \"urls\": [\n {\n \"name\": \"test Google Login\",\n \"url\": \"https://test-commons.org/wts/oauth2/authorization_url?idp=test-google\",\n }\n ],\n },\n {\n \"base_url\": \"https://test-commons.org\",\n \"idp\": \"test-google-2\",\n \"name\": \"test Google Login\",\n \"refresh_token_expiration\": None,\n \"urls\": [\n {\n \"name\": \"test Google Login\",\n \"url\": \"https://test-commons.org/wts/oauth2/authorization_url?idp=test-google-2\",\n }\n ],\n },\n ]\n }\n elif url.endswith(\"/access_token\"):\n mocked_response.status_code = 200\n mocked_response.json.return_value = test_access_token\n else:\n mocked_response.status_code = 400\n\n return mocked_response\n\n with patch(\"gen3.auth.requests.post\") as mock_request_post:\n with patch(\"gen3.auth.requests.get\") as mock_request_get:\n mock_request_post.side_effect = _mock_request\n mock_request_get.side_effect = _mock_request\n\n with open(test_cred_file_name, \"w\") as f:\n json.dump(test_key, f)\n\n with pytest.raises(ValueError):\n auth = gen3.auth.Gen3Auth(\n endpoint=test_external_endpoint, refresh_file=test_cred_file_name\n )\n # auth object should not initialize successfully\n assert not auth\n\n if os.path.isfile(test_cred_file_name):\n os.remove(test_cred_file_name)", "def test_init(self):\n iiq = insightiq_api.InsightiqApi(username='pat', password='a')\n\n self.fake_renew_session.assert_called()", "def setUp(self):\n self.credentialFactory = digest.DigestCredentialFactory('md5',\n 'test realm')", "def test_stores_authentication_details(self):\n plugin = session.CratonAuth(username=TEST_USERNAME_0,\n project_id=TEST_PROJECT_0,\n token=TEST_TOKEN_0)\n self.assertEqual(TEST_USERNAME_0, plugin.username)\n self.assertEqual(TEST_PROJECT_0, plugin.project_id)\n self.assertEqual(TEST_TOKEN_0, plugin.token)", "def setUp(self):\n\n self.new_credentials = Credentials(\"Swift\",\"Gracegee\",\"grcee\")", "def test_init_v3_password(self):\r\n\r\n self._stubs_v3(method='password')\r\n self.m.ReplayAll()\r\n\r\n ctx = utils.dummy_context()\r\n ctx.auth_token = None\r\n ctx.trust_id = None\r\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\r\n client = heat_ks_client.client\r\n self.assertIsNotNone(client)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists all clusters owned by a project in either the specified zone or all zones.
def list_clusters( self, project_id, zone, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "list_clusters" not in self._inner_api_calls: self._inner_api_calls[ "list_clusters" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_clusters, default_retry=self._method_configs["ListClusters"].retry, default_timeout=self._method_configs["ListClusters"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListClustersRequest( project_id=project_id, zone=zone, parent=parent ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["list_clusters"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def get_cluster_list(self):\n LOG.info(\"Getting clusters\")\n return self.client.request(constants.GET,\n constants.GET_CLUSTER.format\n (self.server_ip), payload=None,\n querystring=constants.\n SELECT_ID_AND_NAME)", "def get_clusters(self) -> List[Dict]:\n\n \"\"\"\n GET /v1/clusters HTTP/1.1\n Host: containers.bluemix.net\n Accept: application/json\n Authorization: [PRIVATE DATA HIDDEN]\n Content-Type: application/json\n X-Region: au-syd\n \"\"\"\n # returns 200 OK on success\n\n resp = self.session.get(\n \"{0}/v1/clusters\".format(self.endpoint_url),\n headers={\"X-Region\": self.region, \"Accept\": \"application/json\"},\n )\n\n if resp.status_code != 200:\n raise Exception(\n \"error getting clusters: code=%d body=%r\"\n % (resp.status_code, resp.text)\n )\n\n return resp.json()", "def list(cls, api_client, **kwargs):\n\n cmd = {}\n cmd.update(kwargs)\n if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():\n cmd['listall'] = True\n return super(Zone, cls).list(api_client.listZones(**cmd).get('zone'))", "def list_vm_clusters(lconfig, exa_infra_id):\n # Query (see https://docs.cloud.oracle.com/en-us/iaas/Content/Search/Concepts/querysyntax.htm)\n query = f\"query vmcluster resources\"\n\n DatabaseClient = oci.database.DatabaseClient(lconfig)\n\n SearchClient = oci.resource_search.ResourceSearchClient(lconfig)\n response = SearchClient.search_resources(oci.resource_search.models.StructuredSearchDetails(type=\"Structured\", query=query))\n for item in response.data.items:\n response2 = DatabaseClient.get_cloud_vm_cluster(item.identifier)\n vm_cluster = response2.data\n cpt_name = get_cpt_name_from_id(item.compartment_id)\n if vm_cluster.cloud_exadata_infrastructure_id == exa_infra_id:\n if vm_cluster.lifecycle_state == \"AVAILABLE\":\n COLOR_STATUS = COLOR_GREEN\n else:\n COLOR_STATUS = COLOR_YELLOW\n print (\" VM cluster : \"+COLOR_RED+f\"{vm_cluster.display_name:25s} \"+COLOR_YELLOW+f\"{vm_cluster.cpu_core_count:3} OCPUs \",end=\"\")\n print (COLOR_STATUS+f\"{vm_cluster.lifecycle_state:45s} \"+COLOR_NORMAL, end=\"\")\n if show_ocids:\n print (COLOR_NORMAL+f\"{vm_cluster.id} \")\n else:\n print (\"\")\n print (\" cpt : \"+COLOR_GREEN+f\"{cpt_name} \"+COLOR_NORMAL)\n list_dbhomes (lconfig, vm_cluster.id, vm_cluster.compartment_id)", "def get_cluster_list(self, context, filters=None, limit=None,\n marker=None, sort_key=None, sort_dir=None):", "def list_operations(\n self,\n project_id,\n zone,\n parent=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"list_operations\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"list_operations\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.list_operations,\n default_retry=self._method_configs[\"ListOperations\"].retry,\n default_timeout=self._method_configs[\"ListOperations\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.ListOperationsRequest(\n project_id=project_id, zone=zone, parent=parent\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"parent\", parent)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"list_operations\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def get_cluster(ctx, name, region, verbosity):\n cp = ControlPlane(name, region=region)\n ci = cp.query()\n headers = ['NAME', 'ENDPOINT', 'VPC', 'SUBNETS']\n print(tabulate([[ci.name, ci.endpoint, ci.vpc, ','.join(ci.subnets)]], headers, tablefmt='plain'))", "def get_instances_in_zone(self, zone):\n\t\t\n\t\ttry:\n\t\t\tinstances = self.compute.instances().list(project=self.project, zone=zone).execute()['items']\n\t\texcept KeyError:\n\t\t\tinstances = []\n\t\treturn instances", "def retrieve_all_instances(self):\n zones = []\n instances = []\n # Retrieve all Zones, given Project ID\n try:\n request = self.service.zones().list(project=self.project)\n response = request.execute()\n for zone in response.get('items', []):\n zones.append(zone['name'])\n except Exception as e:\n raise e\n # Retrieve all Instances in each Zone\n try:\n for zone in zones:\n request = self.service.instances().list(project=self.project, zone=zone)\n response = request.execute()\n for instance in response.get('items', []):\n instances.append(instance)\n self.__class__.all_instances = instances\n except Exception as e:\n raise e", "def availability_zone_list(request):\n az_manager = moganclient(request).availability_zone\n return az_manager.list()", "def get_cluster_names():\n token = get_session()\n headers = {\n 'Cookie': 'argocd.token={}'.format(token)\n }\n r = requests.get('https://build.osinfra.cn/api/v1/clusters', headers=headers)\n if r.status_code != 200:\n print('Cannot get cluster names because GET request failed.')\n print(r.status_code, r.json())\n sys.exit(1)\n cluster_names = []\n for i in r.json()['items']:\n cluster_names.append(i['name'])\n return cluster_names", "def cli_cosmosdb_managed_cassandra_cluster_list(client,\r\n resource_group_name=None):\r\n\r\n if resource_group_name is None:\r\n return client.list_by_subscription()\r\n\r\n return client.list_by_resource_group(resource_group_name)", "def ex_list_zones(self):\r\n list_zones = []\r\n request = '/zones'\r\n response = self.connection.request(request, method='GET').object\r\n list_zones = [self._to_zone(z) for z in response['items']]\r\n return list_zones", "def _get_list_zone_object(self):\n return self.rad_connection.list_objects(zonemgr.Zone())", "def listInstanceGroups(compute, project, zone):\n response = compute.instanceGroups().list(project=project, zone=zone).execute()\n pprint.pprint(response)\n return response", "def do_kube_cluster_list(cc, args):\n versions = cc.kube_cluster.list()\n fields = ['cluster_name', 'cluster_version', 'cluster_api_endpoint']\n labels = fields\n utils.print_list(versions, fields, labels, sortby=0)", "def get_cluster_stats(self, context, project_id):", "def _cluster_list():\n\n CLUSTER_TABLE = storage.get_cluster_table()\n clusters = []\n cluster_items = CLUSTER_TABLE.scan()\n\n for cluster in cluster_items['Items']:\n clusters.append(cluster['id'])\n\n print(f'tracked clusters: {clusters}')\n\n return clusters", "def host_cluster_find_all(context, filters=None, session=None):\n query = model_query(context, pvc_models.HmcHostClustersDTO,\n session=session)\n # Add in any provided filters to the overall query being performed\n if filters:\n query = query.filter_by(**filters)\n return result_set_as_dict(query.all())" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates the version and/or image type for the specified node pool.
def update_node_pool( self, project_id, zone, cluster_id, node_pool_id, node_version, image_type, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "update_node_pool" not in self._inner_api_calls: self._inner_api_calls[ "update_node_pool" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_node_pool, default_retry=self._method_configs["UpdateNodePool"].retry, default_timeout=self._method_configs["UpdateNodePool"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.UpdateNodePoolRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, node_pool_id=node_pool_id, node_version=node_version, image_type=image_type, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["update_node_pool"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def update_agent_pool(self, pool, pool_id):\n route_values = {}\n if pool_id is not None:\n route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')\n content = self._serialize.body(pool, 'TaskAgentPool')\n response = self._send(http_method='PATCH',\n location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',\n version='6.0-preview.1',\n route_values=route_values,\n content=content)\n return self._deserialize('TaskAgentPool', response)", "def update_pool(self, context, old_pool, pool, service):\n try:\n service_pending = \\\n self.lbdriver.update_pool(old_pool, pool, service)\n self.cache.put(service, self.agent_host)\n if service_pending:\n self.needs_resync = True\n except q_exception.NeutronException as exc:\n LOG.error(\"NeutronException: %s\" % exc.msg)\n except Exception as exc:\n LOG.error(\"Exception: %s\" % exc.message)", "def put(self, id, pool_):\n pool = pool_.pool\n context = pecan_request.context.get('octavia_context')\n with context.session.begin():\n db_pool = self._get_db_pool(context.session, id,\n show_deleted=False)\n\n project_id, provider = self._get_lb_project_id_provider(\n context.session, db_pool.load_balancer_id)\n\n self._auth_validate_action(context, project_id, constants.RBAC_PUT)\n\n if pool.tls_versions is None:\n pool.tls_versions = CONF.api_settings.default_pool_tls_versions\n if pool.tls_ciphers is None:\n pool.tls_ciphers = CONF.api_settings.default_pool_ciphers\n\n if (pool.session_persistence and\n not pool.session_persistence.type and\n db_pool.session_persistence and\n db_pool.session_persistence.type):\n pool.session_persistence.type = db_pool.session_persistence.type\n\n self._validate_pool_PUT(pool, db_pool)\n\n # Load the driver early as it also provides validation\n driver = driver_factory.get_driver(provider)\n\n with context.session.begin():\n self._test_lb_and_listener_statuses(\n context.session, lb_id=db_pool.load_balancer_id,\n listener_ids=self._get_affected_listener_ids(db_pool))\n\n # Prepare the data for the driver data model\n pool_dict = pool.to_dict(render_unsets=False)\n pool_dict['id'] = id\n provider_pool_dict = (\n driver_utils.pool_dict_to_provider_dict(pool_dict))\n\n # Also prepare the baseline object data\n old_provider_pool = driver_utils.db_pool_to_provider_pool(\n db_pool, for_delete=True)\n\n # Dispatch to the driver\n LOG.info(\"Sending update Pool %s to provider %s\", id, driver.name)\n driver_utils.call_provider(\n driver.name, driver.pool_update,\n old_provider_pool,\n driver_dm.Pool.from_dict(provider_pool_dict))\n\n # Update the database to reflect what the driver just accepted\n pool.provisioning_status = constants.PENDING_UPDATE\n db_pool_dict = pool.to_dict(render_unsets=False)\n self.repositories.update_pool_and_sp(context.session, id,\n db_pool_dict)\n\n # Force SQL alchemy to query the DB, otherwise we get inconsistent\n # results\n context.session.expire_all()\n with context.session.begin():\n db_pool = self._get_db_pool(context.session, id)\n result = self._convert_db_to_type(db_pool, pool_types.PoolResponse)\n return pool_types.PoolRootResponse(pool=result)", "def test_update_cloud_pool(self):\n pass", "def update_subnet_pool(self, subnet_pool, **attrs):\n return self._update(_subnet_pool.SubnetPool, subnet_pool, **attrs)", "def update_pool_member(self, pool_member, pool, **attrs):\n poolobj = self._get_resource(_pool.Pool, pool)\n return self._update(\n _pool_member.PoolMember, pool_member, pool_id=poolobj.id, **attrs\n )", "def post_update_azure_node_pool(\n self, response: operations_pb2.Operation\n ) -> operations_pb2.Operation:\n return response", "def pre_update_azure_node_pool(\n self,\n request: azure_service.UpdateAzureNodePoolRequest,\n metadata: Sequence[Tuple[str, str]],\n ) -> Tuple[azure_service.UpdateAzureNodePoolRequest, Sequence[Tuple[str, str]]]:\n return request, metadata", "def put(self, request, *args, **kwargs):\n\n server_pools = request.DATA\n json_validate(SPECS.get('pool_put')).validate(server_pools)\n verify_ports(server_pools)\n locks_list = facade.create_lock(server_pools.get('server_pools'))\n try:\n response = facade_pool_deploy.update_real_pool(server_pools, request.user)\n except Exception, exception:\n log.error(exception)\n raise rest_exceptions.NetworkAPIException(exception)\n finally:\n facade.destroy_lock(locks_list)\n return Response(response)", "def __update_version(edge_type, motif_node, motif_node_dict):\n\t# if updating version is not needed, simply return the original node\n\tif __filter_update_node(edge_type):\n\t\treturn motif_node, None\n\n\t# if not motif_node.mn_has_outgoing:\n\t# \treturn motif_node, None\n\n\tnew_motif_node = MotifNode(motif_node.mn_ty)\n\tnew_motif_node.mn_has_name_recorded = motif_node.mn_has_name_recorded\n\tnew_motif_node.mn_kernel_version = motif_node.mn_kernel_version\n\tnew_motif_node.mn_is_initialized = motif_node.mn_is_initialized\n\tif motif_node.mn_ty == 'task':\n\t\tmotif_edge = MotifEdge(motif_node, new_motif_node, relation_to_str('RL_VERSION_TASK'))\n\telse:\n\t\tmotif_edge = MotifEdge(motif_node, new_motif_node, relation_to_str('RL_VERSION'))\n\tdict_key = getKeyByValue(motif_node_dict, motif_node)\n\tif dict_key:\n\t\tmotif_node_dict[dict_key].append(new_motif_node)\n\treturn new_motif_node, create_leaf_node(motif_edge)", "def resize_nodepool(pool, new_size, project, zone, cluster):\n cmd = [\n 'gcloud', 'container', 'clusters', 'resize', cluster,\n '--zone', zone, '--project', project, '--node-pool', pool,\n '--num-nodes', str(new_size), '--quiet',\n ]\n print(cmd)\n subprocess.call(cmd)", "def update_pool_status(self, context, pool_id=None,\n provisioning_status=plugin_constants.ERROR,\n operating_status=None):\n with context.session.begin(subtransactions=True):\n try:\n pool = self.driver.plugin.db.get_pool(\n context,\n pool_id\n )\n if (pool.provisioning_status !=\n plugin_constants.PENDING_DELETE):\n self.driver.plugin.db.update_status(\n context,\n models.PoolV2,\n pool_id,\n provisioning_status,\n operating_status\n )\n except Exception as e:\n LOG.error('Exception: update_pool_status: %s',\n e.message)", "def update(self, storage_pool_id, name=None, is_protected=None):\n payload = {\n \"name\": name,\n \"isProtected\": is_protected\n }\n # FIXME: API throws error if 'isColdStorageEnabled' and 'description' fields are sent\n log.info(\"Updating storage pool ID '{}'\".format(storage_pool_id))\n return self.conn.put('vdc/data-services/varrays/{}'.format(storage_pool_id),\n json_payload=payload)", "def node_version(self):\n node_info, err = self.pools()\n if err:\n return None, err\n\n return node_info[\"implementationVersion\"].split(\n \"-\")[0], None if \"implementationVersion\" in node_info else (None, \"unexpected JSON returned from /pools\")", "def _update_nprocesses(pool, processes):\n # FIXME: Catch ERRNO 11:\n # OSError: [Errno 11] Resource temporarily unavailable\n if pool._processes < processes:\n pool._processes = processes\n pool._repopulate_pool()", "def pool_types(self, pool_types):\n\n self._pool_types = pool_types", "def update_agent_pool(name, params, bucket=CAMD_S3_BUCKET):\n client = boto3.client(\"s3\")\n prefix = \"{}/{}\".format(META_AGENT_PREFIX, name)\n agent_pool = MetaAgentCampaign.load_pickled_objects(name, bucket)[0]\n agent_pool.extend(params)\n\n pickled_agent = pickle.dumps(agent_pool)\n client.put_object(\n Bucket=bucket, Key=\"{}/agent_pool.pickle\".format(prefix), Body=pickled_agent\n )", "def ex_targetpool_add_node(self, targetpool, node):\r\n if not hasattr(targetpool, 'name'):\r\n targetpool = self.ex_get_targetpool(targetpool)\r\n if not hasattr(node, 'name'):\r\n node = self.ex_get_node(node, 'all')\r\n\r\n targetpool_data = {'instances': [{'instance': node.extra['selfLink']}]}\r\n\r\n request = '/regions/%s/targetPools/%s/addInstance' % (\r\n targetpool.region.name, targetpool.name)\r\n self.connection.async_request(request, method='POST',\r\n data=targetpool_data)\r\n targetpool.nodes.append(node)\r\n return True", "def _populate_pool_vm_configuration(config):\n # type: (dict) -> dict\n conf = pool_vm_configuration(config, 'platform_image')\n if 'publisher' in conf:\n publisher = conf['publisher'].lower()\n offer = conf['offer'].lower()\n sku = str(conf['sku']).lower()\n # auto convert windows native if detected\n if publisher == 'microsoftwindowsserver':\n vm_config = PoolVmPlatformImageSettings(\n publisher=publisher,\n offer=offer,\n sku=sku,\n version=_kv_read_checked(conf, 'version', default='latest'),\n native=True,\n license_type=_kv_read_checked(conf, 'license_type'),\n )\n elif publisher == 'microsoft-azure-batch':\n # auto convert linux native if detected\n vm_config = PoolVmPlatformImageSettings(\n publisher=publisher,\n offer=offer,\n sku=sku,\n version=_kv_read_checked(conf, 'version', default='latest'),\n native=True,\n license_type=None,\n )\n else:\n vm_config = PoolVmPlatformImageSettings(\n publisher=publisher,\n offer=offer,\n sku=sku,\n version=_kv_read_checked(conf, 'version', default='latest'),\n native=False,\n license_type=None,\n )\n # auto convert vm config to native if specified\n if not vm_config.native and _kv_read(conf, 'native', default=False):\n vm_size = _pool_vm_size(config)\n if (vm_config.publisher == 'canonical' and\n vm_config.offer == 'ubuntuserver' and\n vm_config.sku == '16.04-lts'):\n vm_config = PoolVmPlatformImageSettings(\n publisher='microsoft-azure-batch',\n offer='ubuntu-server-container{}'.format(\n '-rdma' if is_rdma_pool(vm_size) else ''),\n sku=vm_config.sku.replace('.', '-'),\n version='latest',\n native=True,\n license_type=None,\n )\n elif (vm_config.publisher == 'openlogic' and\n vm_config.offer.startswith('centos') and\n (vm_config.sku == '7.4' or vm_config.sku == '7.5' or\n vm_config.sku == '7.6' or vm_config.sku == '7.7')):\n vm_config = PoolVmPlatformImageSettings(\n publisher='microsoft-azure-batch',\n offer='centos-container{}'.format(\n '-rdma' if is_rdma_pool(vm_size) else ''),\n sku=vm_config.sku.replace('.', '-'),\n version='latest',\n native=True,\n license_type=None,\n )\n return vm_config\n else:\n conf = pool_vm_configuration(config, 'custom_image')\n node_agent = conf['node_agent'].lower()\n if node_agent == 'batch.node.windows amd64':\n native = True\n license_type = _kv_read_checked(conf, 'license_type')\n else:\n native = _kv_read(conf, 'native', default=False)\n license_type = None\n return PoolVmCustomImageSettings(\n arm_image_id=_kv_read_checked(conf, 'arm_image_id'),\n node_agent=node_agent,\n native=native,\n license_type=license_type,\n )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the autoscaling settings for the specified node pool.
def set_node_pool_autoscaling( self, project_id, zone, cluster_id, node_pool_id, autoscaling, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_node_pool_autoscaling" not in self._inner_api_calls: self._inner_api_calls[ "set_node_pool_autoscaling" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_node_pool_autoscaling, default_retry=self._method_configs["SetNodePoolAutoscaling"].retry, default_timeout=self._method_configs["SetNodePoolAutoscaling"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetNodePoolAutoscalingRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, node_pool_id=node_pool_id, autoscaling=autoscaling, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_node_pool_autoscaling"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def pool_autoscale_settings(config):\n # type: (dict) -> PoolAutoscaleSettings\n conf = pool_specification(config)\n conf = _kv_read_checked(conf, 'autoscale', {})\n ei = _kv_read_checked(conf, 'evaluation_interval')\n if util.is_not_empty(ei):\n ei = util.convert_string_to_timedelta(ei)\n else:\n ei = datetime.timedelta(minutes=15)\n scenconf = _kv_read_checked(conf, 'scenario')\n if scenconf is not None:\n mvc = _kv_read_checked(scenconf, 'maximum_vm_count')\n if mvc is None:\n raise ValueError('maximum_vm_count must be specified')\n mvipe = _kv_read_checked(\n scenconf, 'maximum_vm_increment_per_evaluation', default={})\n ndo = _kv_read_checked(\n scenconf, 'node_deallocation_option', 'taskcompletion')\n if (ndo is not None and\n ndo not in (\n 'requeue', 'terminate', 'taskcompletion', 'retaineddata')):\n raise ValueError(\n 'invalid node_deallocation_option: {}'.format(ndo))\n sli = _kv_read_checked(scenconf, 'sample_lookback_interval')\n if util.is_not_empty(sli):\n sli = util.convert_string_to_timedelta(sli)\n else:\n sli = datetime.timedelta(minutes=10)\n tr = _kv_read_checked(scenconf, 'time_ranges', default={})\n trweekday = _kv_read_checked(tr, 'weekdays', default={})\n trworkhour = _kv_read_checked(tr, 'work_hours', default={})\n scenario = PoolAutoscaleScenarioSettings(\n name=_kv_read_checked(scenconf, 'name').lower(),\n maximum_vm_count=_pool_vm_count(config, conf=mvc),\n maximum_vm_increment_per_evaluation=_pool_vm_count(\n config, conf=mvipe),\n node_deallocation_option=ndo,\n sample_lookback_interval=sli,\n required_sample_percentage=_kv_read(\n scenconf, 'required_sample_percentage', 70),\n rebalance_preemption_percentage=_kv_read(\n scenconf, 'rebalance_preemption_percentage', None),\n bias_last_sample=_kv_read(\n scenconf, 'bias_last_sample', True),\n bias_node_type=_kv_read_checked(\n scenconf, 'bias_node_type', 'auto').lower(),\n weekday_start=_kv_read(trweekday, 'start', default=1),\n weekday_end=_kv_read(trweekday, 'end', default=5),\n workhour_start=_kv_read(trworkhour, 'start', default=8),\n workhour_end=_kv_read(trworkhour, 'end', default=17),\n )\n else:\n scenario = None\n return PoolAutoscaleSettings(\n evaluation_interval=ei,\n formula=_kv_read_checked(conf, 'formula'),\n scenario=scenario,\n )", "def _add_auto_scaling(self):\n auto_scaling_group = self.fargate_service.service.auto_scale_task_count(\n min_capacity=2,\n max_capacity=10\n )\n auto_scaling_group.scale_on_cpu_utilization(\n 'CpuScaling',\n target_utilization_percent=50,\n scale_in_cooldown=core.Duration.seconds(60),\n scale_out_cooldown=core.Duration.seconds(60)\n )", "def resize_nodepool(pool, new_size, project, zone, cluster):\n cmd = [\n 'gcloud', 'container', 'clusters', 'resize', cluster,\n '--zone', zone, '--project', project, '--node-pool', pool,\n '--num-nodes', str(new_size), '--quiet',\n ]\n print(cmd)\n subprocess.call(cmd)", "def aws_update_autoscaler():\r\n ami_id = aws_create_ami_from()\r\n cur_date = time.strftime('%Y%m%d', time.gmtime())\r\n lcName = 'ns11-%s' % cur_date\r\n lc = LaunchConfiguration(name=lcName, \r\n image_id=ami_id, instance_type=env.aws.get('instance_type'),\r\n key_name=env.aws.get('key_pair'), \r\n security_groups=env.aws.get('security_groups'))\r\n env.asConn.create_launch_configuration(lc)\r\n print \"Created launchConfiguration %s\" % lcName\r\n \r\n ag = AutoScalingGroup(\r\n connection=env.asConn,\r\n launch_config=lc, \r\n group_name=env.aws.get('as_group'), load_balancers=env.aws.get('balancers'),\r\n availability_zones=env.aws.get('availability_zones'))\r\n # min_size=env.aws.get('min_size'), max_size=env.aws.get('max_size'))\r\n ag.update()\r\n # env.asConn.create_auto_scaling_group(ag) \r\n print \"Added launchConfiguration %s to group %s (updated AutoScaleGroup)\" % (lcName, env.aws.get('as_group'))", "def set_pool(self, value):\n self.gui.spn_pool.setValue(value)", "def set_pool_connections(self, pool_connections):\n CheckValue.check_int_gt_zero(pool_connections, 'pool_connections')\n self._pool_connections = pool_connections\n return self", "def set_cpus(self, num_cpus):\n if self.batch:\n self.batch_settings.batch_args[\"cpus-per-task\"] = num_cpus\n for db in self:\n db.run_settings.set_cpus_per_task(num_cpus)", "def set_auto_scaling (self, auto_scale_opt=None):\n txt = True\n if auto_scale_opt is not None:\n if int(auto_scale_opt) not in range(4):\n raise TypeError(\"Auto scale option must be in:\" + np.array(range(4), dtype='a1'))\n self._auto_scale_opt = auto_scale_opt\n txt = False\n else:\n self._auto_scale_opt += 1\n if self._auto_scale_opt == 4: self._auto_scale_opt = 0\n\n if self._auto_scale_opt == 0: # auto x and y\n if txt: print \"Auto scale X and Y axis\"\n self._auto_scale_x = True\n self._auto_scale_y = True\n elif self._auto_scale_opt == 1: # auto x\n if txt: print \"Auto scale X axis only\"\n self._auto_scale_x = True\n self._auto_scale_y = False\n elif self._auto_scale_opt == 2: # auto y\n if txt: print \"Auto scale Y axis only\"\n self._auto_scale_x = False\n self._auto_scale_y = True\n elif self._auto_scale_opt == 3: # auto off\n if txt: print \"Auto scale axis off\"\n self._auto_scale_x = False\n self._auto_scale_y = False", "def set_node_pool_management(\n self,\n project_id,\n zone,\n cluster_id,\n node_pool_id,\n management,\n name=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"set_node_pool_management\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"set_node_pool_management\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.set_node_pool_management,\n default_retry=self._method_configs[\"SetNodePoolManagement\"].retry,\n default_timeout=self._method_configs[\"SetNodePoolManagement\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.SetNodePoolManagementRequest(\n project_id=project_id,\n zone=zone,\n cluster_id=cluster_id,\n node_pool_id=node_pool_id,\n management=management,\n name=name,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"set_node_pool_management\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def config_scale(self, cnf={}, **kwargs):\n self._scale.config(cnf, **kwargs)\n # Update self._variable limits in case the ones of the scale have changed\n self._variable.configure(high=self._scale['to'],\n low=self._scale['from'])\n if 'orient' in cnf or 'orient' in kwargs:\n self._grid_widgets()", "def set_current_pool(self):\n self.ctl.fill_spn_pool()", "def set_fresnel(node_tree, bias_scale):\n\n node_tree.nodes[Glass.ADD_ENV_GROUP_NODE].inputs['Fresnel Bias'].default_value = bias_scale[0]\n node_tree.nodes[Glass.ADD_ENV_GROUP_NODE].inputs['Fresnel Scale'].default_value = bias_scale[1]", "def test_set_nat_pool_port_range(self, patch5):\n patch5.return_value = None\n self.assertEqual(self.cgn.set_nat_pool(name='pool1', addr='30.0.0.0/24',\n addr_low='30.0.0.1', addr_high='30.0.0.100',\n port_low='1000', port_high='2000',\n port_range_random=1, snmp_trap_low='1',\n snmp_trap_high='10',\n host_addr_base='10.0.0.1'), True)", "def test_set_nat_pool(self, patch4):\n patch4.return_value = None\n self.assertEqual(self.cgn.set_nat_pool(name='pool1', addr='30.0.0.0/24',\n addr_low='30.0.0.1', addr_high='30.0.0.100',\n port_low='1000', port_high='2000'), True)", "def scale(self, scale_factors):\r\n arg_str = p2e._base._util._convert_args_to_string(\"node.scale\", self._node._eco_id,\r\n scale_factors[0], \r\n scale_factors[1], \r\n scale_factors[2])\r\n p2e._app.Exec(arg_str)", "def update_subnet_pool(self, subnet_pool, **attrs):\n return self._update(_subnet_pool.SubnetPool, subnet_pool, **attrs)", "def set_up_omp_mpi_scaling_tests(scaling_root:str):\n\n input_xml = inputs_set2.input_xml\n zr_basis_xml = inputs_set2.zr_basis_xml\n o_basis_xml = inputs_set2.o_basis_xml\n\n # Check GW input script settings\n match = re.search('nempty=\"(.+?)\"', input_xml)\n n_empty = int(re.findall(r'\\d+', match.group())[0])\n assert n_empty == 100, \"n_empty != 100\"\n\n match = re.search('ngridq=\"(.+?)\"', input_xml)\n q_grid = [int(q) for q in re.findall(r'\\d+', match.group())]\n assert q_grid == [8, 8, 8], \"q_grid != [8, 8, 8]\"\n\n # Slurm script settings\n env_vars = OrderedDict([('EXE', '/users/sol/abuccheri/exciting/bin/excitingmpismp'),\n ('OUT', 'terminal.out'),\n ('export MKL_NUM_THREADS', '1'),\n ('export I_MPI_PIN_DOMAIN', 'sock')\n ])\n\n module_envs = ['intel/2019']\n\n # Cores per node\n ntasks_per_node = 4\n\n # OMP threads per MPI rank\n cpus_per_task = 9\n\n # Nodes to use in scaling tests\n # Dune 3 only appears to have 10 nodes available from nodes 181 - 196\n nodes = np.arange(1, 10+1)\n\n # These nodes differ in memory or processor to the rest of Dune 3\n # hence exclude 197 - 208\n exclude_nodes = ['node' + str(id) for id in range(197, 208 + 1)]\n\n # Timing in days, where key = node_count\n times = { 1: [4, 0, 0, 0],\n 2: [4, 0, 0, 0],\n 3: [4, 0, 0, 0],\n\n 4: [2, 0, 0, 0],\n 5: [2, 0, 0, 0],\n 6: [2, 0, 0, 0],\n\n 7: [1, 0, 0, 0],\n 8: [1, 0, 0, 0],\n 9: [1, 0, 0, 0],\n 10: [1, 0, 0, 0],\n 11: [1, 0, 0, 0],\n 12: [1, 0, 0, 0],\n 13: [1, 0, 0, 0],\n 14: [1, 0, 0, 0]}\n\n for node_count in nodes:\n job_dir = scaling_root + '/n_nodes_' + str(node_count)\n print(\"Writing files to:\", job_dir)\n\n Path(job_dir).mkdir(parents=True, exist_ok=True)\n\n write_file(job_dir + '/input.xml', input_xml)\n write_file(job_dir + '/Zr.xml', zr_basis_xml)\n write_file(job_dir + '/O.xml', o_basis_xml)\n\n slurm_directives = slurm.set_slurm_directives(job_name='scaling-omp-mpi-GW',\n time=times[node_count],\n partition='all',\n exclusive=True,\n nodes=node_count,\n ntasks_per_node=ntasks_per_node,\n cpus_per_task=cpus_per_task,\n hint='nomultithread',\n exclude=exclude_nodes)\n write_file(job_dir + '/run.sh', slurm.set_slurm_script(slurm_directives, env_vars, module_envs))\n\n return", "def rescale_image_using_pooling(image: tf.Tensor,\n scale: int, ):\n image = tf.cast(image, tf.float32)\n return tf.nn.pool(image, [scale, scale], \"AVG\", \"VALID\",\n strides=[scale, scale])", "def put_managed_scaling_policy(ClusterId=None, ManagedScalingPolicy=None):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the logging service for a specific cluster.
def set_logging_service( self, project_id, zone, cluster_id, logging_service, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_logging_service" not in self._inner_api_calls: self._inner_api_calls[ "set_logging_service" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_logging_service, default_retry=self._method_configs["SetLoggingService"].retry, default_timeout=self._method_configs["SetLoggingService"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLoggingServiceRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, logging_service=logging_service, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_logging_service"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def set_monitoring_service(\n self,\n project_id,\n zone,\n cluster_id,\n monitoring_service,\n name=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"set_monitoring_service\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"set_monitoring_service\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.set_monitoring_service,\n default_retry=self._method_configs[\"SetMonitoringService\"].retry,\n default_timeout=self._method_configs[\"SetMonitoringService\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.SetMonitoringServiceRequest(\n project_id=project_id,\n zone=zone,\n cluster_id=cluster_id,\n monitoring_service=monitoring_service,\n name=name,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"set_monitoring_service\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def log_cluster(self):\n pass", "def configure_services(cluster):\n services = cluster.get_all_services()\n\n for service in services:\n service_type = service.type\n if service_type == 'HDFS':\n print \"Configuring HDFS for Kerberos.\"\n service.update_config(\n {'hadoop_security_authentication': 'kerberos',\n 'hadoop_security_authorization': 'true'}\n )\n\n role_cfgs = service.get_all_role_config_groups()\n\n for role_cfg in role_cfgs:\n if role_cfg.roleType == 'DATANODE':\n role_cfg.update_config(\n {'dfs_datanode_port': '1004',\n 'dfs_datanode_http_port': '1006',\n 'dfs_datanode_data_dir_perm': '700'}\n )\n elif service_type == 'HBASE':\n print \"Configuring HBase for Kerberos.\"\n service.update_config(\n {'hbase_security_authentication': 'kerberos',\n 'hbase_security_authorization': 'true'}\n )\n elif service_type == 'ZOOKEEPER':\n print \"Configuring ZooKeeper for Kerberos.\"\n service.update_config(\n {'enableSecurity': 'true'}\n )\n elif service_type == 'SOLR':\n print \"Configuring Solr for Kerberos.\"\n service.update_config(\n {'solr_security_authentication': 'kerberos'}\n )\n elif service_type == 'KS_INDEXER':\n # API version 10 came out with CM 5.4, which is necessary to make this configuration\n # change.\n if API_CURRENT_VERSION >= 10:\n print \"Configuring KeyStoreIndexer for Kerberos.\"\n service.update_config(\n {'hbase_indexer_security_authentication': 'kerberos'}\n )\n elif service_type == 'HUE':\n kt_renewer_role = service.get_roles_by_type('KT_RENEWER')\n hue_server_role = service.get_roles_by_type('HUE_SERVER')\n\n if hue_server_role and not kt_renewer_role:\n print \"Configuring Hue for Kerberos.\"\n service.create_role('KT_RENEWER-1', 'KT_RENEWER',\n hue_server_role[0].hostRef.hostId)", "def set_logger(l):\n global _logger\n _logger = l", "def set_cluster(self, value):\n singleton = \"Singleton\"\n if isinstance(value, str):\n value = value.strip()\n\n # PhagesDB-output format.\n if value.capitalize() == singleton:\n self.cluster = singleton\n else:\n self.cluster = value\n\n # MySQL database-output format\n if value is None:\n self.cluster = singleton", "def put_tomcatlog_service():\n put(\"/usr/local/bin/tomcatlog.sh\", \"/usr/local/bin/tomcatlog.sh\")\n run(\"chmod +x /usr/local/bin/tomcatlog.sh\")\n put(\"/usr/lib/systemd/system/tomcatlogrights.service\", \"/usr/lib/systemd/system/tomcatlogrights.service\")\n run(\"systemctl daemon-reload\")\n run(\"systemctl enable tomcatlogrights\")\n run(\"systemctl start tomcatlogrights\")", "def set_logger(self, logger): \n self.logger = logger\n self.agent.set_logger(logger) #share logger with agent", "def setup_logger(service, default_level=logging.INFO):\n logger = logging.getLogger('mimic.' + service)\n logger.setLevel(default_level)\n logger.addHandler(logging.StreamHandler())\n\n return logger", "def set_logger(self, logger):\n self.LOGGER = logger", "def request_log_service_output(self, request_log_service_output: ConfigNodePropertyString):\n\n self._request_log_service_output = request_log_service_output", "def cluster_user(self, cluster_user):\n\n self._cluster_user = cluster_user", "def request_log_service_format(self, request_log_service_format: ConfigNodePropertyString):\n\n self._request_log_service_format = request_log_service_format", "def request_log_service_onentry(self, request_log_service_onentry: ConfigNodePropertyBoolean):\n\n self._request_log_service_onentry = request_log_service_onentry", "def set_cluster(self, data):\n cluster = Cluster(data['name'])\n for host in data['hosts']:\n cluster.add_host(**host)\n self._cluster = cluster", "def set_connection_logging(self, loadbalancer, val):\r\n loadbalancer.connection_logging = val", "def setLogger(self, filePathName='%s/data/%s/%s.log' % (gv.logLocation, getpass.getuser(), getpass.getuser()),\r\n name='pcsLogger', fresh=0):\r\n self.logger = pcsLogger.pcs_logger(filePathName=filePathName, name=name, fresh=fresh)", "def set_log(self, log_path):\n self.log = log_path", "def configure_service_set(ipsec_obj, **kwargs):\n return ipsec_obj.set_ss(**kwargs)", "def _set_logging(self):\n logging.basicConfig(**self.settings[\"general\"][\"logging\"])\n log.info(\n \"Setting logging config: {!r}\".format(self.settings[\"general\"][\"logging\"])\n )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the monitoring service for a specific cluster.
def set_monitoring_service( self, project_id, zone, cluster_id, monitoring_service, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_monitoring_service" not in self._inner_api_calls: self._inner_api_calls[ "set_monitoring_service" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_monitoring_service, default_retry=self._method_configs["SetMonitoringService"].retry, default_timeout=self._method_configs["SetMonitoringService"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetMonitoringServiceRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, monitoring_service=monitoring_service, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_monitoring_service"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def set_cluster(self, data):\n cluster = Cluster(data['name'])\n for host in data['hosts']:\n cluster.add_host(**host)\n self._cluster = cluster", "def set_cluster_for_vios(self, vios_id, cluster_id, cluster_name):\n # first update the vios_keyed dict\n if vios_id not in self.vios_keyed:\n self.vios_keyed[vios_id] = {}\n old = self.vios_keyed[vios_id]['last_cluster'] = None\n else:\n old = self.vios_keyed[vios_id]['last_cluster']\n if old != cluster_id:\n LOG.info(_(\"VIOS id %(id)s changed membership from cluster ID \"\n \"%(old)s to %(new)s with display name '%(name)s'.\") %\n dict(id=vios_id, old=old, new=cluster_id,\n name=cluster_name))\n self.vios_keyed[vios_id]['last_cluster'] = cluster_id\n # remove from the cluster side too\n if old is not None and old in self.cluster_keyed:\n if vios_id in self.cluster_keyed[old]['set']:\n self.cluster_keyed[old]['set'].remove(vios_id)\n ###################################################################\n # set_cluster_seq is the collection sequence number that the VIOS\n # last reported the cluster membership for.\n # trust_seq is reset to 0 when the VIOS reports as a member for a\n # Cluster feed request. It is bumped up independently during a VIOS\n # feed request (which occurs prior to cluster feed in a topology\n # collection) if the VIOS has a good state and rmc_state.\n # This means that if the VIOS has not reported as being a member of\n # the cluster for some number of iterations, but the trust_seq\n # has bumped up to some small number, then we can \"trust\" that\n # the vios really is not a member of the cluster, and not just\n # experiencing a connectivity problem due to the network or heavy\n # load.\n ###################################################################\n self.vios_keyed[vios_id]['trust_seq'] = 0 # reset\n if cluster_id is None:\n self.vios_keyed[vios_id]['set_cluster_seq'] =\\\n (self.sequence_num - 1) # set sequence in past for None case\n return # Don't need to update cluster_keyed dict.\n self.vios_keyed[vios_id]['set_cluster_seq'] = self.sequence_num\n\n # Now update the cluster_keyed dict\n if cluster_id not in self.cluster_keyed:\n entry = {'set_cluster_seq': self.sequence_num, 'set': set()}\n self.cluster_keyed[cluster_id] = entry\n else:\n entry = self.cluster_keyed[cluster_id]\n entry['display_name'] = cluster_name\n LOG.debug(\"Vios_id=%s, Vios_keyed after update=%s, Cluster entry \"\n \"before update for cluster %s: %s.\" %\n (vios_id, self.vios_keyed[vios_id], cluster_id, entry))\n if entry['set_cluster_seq'] != self.sequence_num:\n # new topology collection sequence - reset membership\n entry['set'] = set()\n entry['set'].add(vios_id)\n entry['set_cluster_seq'] = self.sequence_num\n LOG.debug(\"Reset %s cluster membership for sequence %d to %s.\" %\n (cluster_id, self.sequence_num, entry['set']))\n else:\n entry['set'].add(vios_id)\n LOG.debug(\"Add VIOS %s to cluster %s: %s.\" %\n (vios_id, cluster_name, entry['set']))", "def cluster_user(self, cluster_user):\n\n self._cluster_user = cluster_user", "def configure_service_set(ipsec_obj, **kwargs):\n return ipsec_obj.set_ss(**kwargs)", "def set_start_cluster(self, start_cluster: int) -> None:\r\n self.start_cluster = start_cluster", "def set_logging_service(\n self,\n project_id,\n zone,\n cluster_id,\n logging_service,\n name=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"set_logging_service\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"set_logging_service\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.set_logging_service,\n default_retry=self._method_configs[\"SetLoggingService\"].retry,\n default_timeout=self._method_configs[\"SetLoggingService\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.SetLoggingServiceRequest(\n project_id=project_id,\n zone=zone,\n cluster_id=cluster_id,\n logging_service=logging_service,\n name=name,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"set_logging_service\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def cluster_sync_service_interval(self, cluster_sync_service_interval: ConfigNodePropertyInteger):\n\n self._cluster_sync_service_interval = cluster_sync_service_interval", "async def do_start_cluster(self, cluster):\n raise NotImplementedError", "def clustered(self, clustered):\n\n self._clustered = clustered", "def set_cluster(self, value):\n singleton = \"Singleton\"\n if isinstance(value, str):\n value = value.strip()\n\n # PhagesDB-output format.\n if value.capitalize() == singleton:\n self.cluster = singleton\n else:\n self.cluster = value\n\n # MySQL database-output format\n if value is None:\n self.cluster = singleton", "def cluster_password(self, cluster_password):\n\n self._cluster_password = cluster_password", "def select_cluster(self, clusters):\n pass", "def configure_services(cluster):\n services = cluster.get_all_services()\n\n for service in services:\n service_type = service.type\n if service_type == 'HDFS':\n print \"Configuring HDFS for Kerberos.\"\n service.update_config(\n {'hadoop_security_authentication': 'kerberos',\n 'hadoop_security_authorization': 'true'}\n )\n\n role_cfgs = service.get_all_role_config_groups()\n\n for role_cfg in role_cfgs:\n if role_cfg.roleType == 'DATANODE':\n role_cfg.update_config(\n {'dfs_datanode_port': '1004',\n 'dfs_datanode_http_port': '1006',\n 'dfs_datanode_data_dir_perm': '700'}\n )\n elif service_type == 'HBASE':\n print \"Configuring HBase for Kerberos.\"\n service.update_config(\n {'hbase_security_authentication': 'kerberos',\n 'hbase_security_authorization': 'true'}\n )\n elif service_type == 'ZOOKEEPER':\n print \"Configuring ZooKeeper for Kerberos.\"\n service.update_config(\n {'enableSecurity': 'true'}\n )\n elif service_type == 'SOLR':\n print \"Configuring Solr for Kerberos.\"\n service.update_config(\n {'solr_security_authentication': 'kerberos'}\n )\n elif service_type == 'KS_INDEXER':\n # API version 10 came out with CM 5.4, which is necessary to make this configuration\n # change.\n if API_CURRENT_VERSION >= 10:\n print \"Configuring KeyStoreIndexer for Kerberos.\"\n service.update_config(\n {'hbase_indexer_security_authentication': 'kerberos'}\n )\n elif service_type == 'HUE':\n kt_renewer_role = service.get_roles_by_type('KT_RENEWER')\n hue_server_role = service.get_roles_by_type('HUE_SERVER')\n\n if hue_server_role and not kt_renewer_role:\n print \"Configuring Hue for Kerberos.\"\n service.create_role('KT_RENEWER-1', 'KT_RENEWER',\n hue_server_role[0].hostRef.hostId)", "def service_unit(self, service_unit):\n\n self._service_unit = service_unit", "def update_cluster(self, cluster_id, values):", "def change_cluster_name(self, cluster_name):\n\n if not self.status.is_running:\n raise RuntimeError(_(\"Cannot change the cluster name. \"\n \"The service is not running.\"))\n\n LOG.debug(\"Changing the cluster name to '%s'.\", cluster_name)\n\n # Update the in-database value.\n self.__reset_cluster_name(cluster_name)\n\n # Update the configuration property.\n self._update_cluster_name_property(cluster_name)\n\n self.restart()", "def modify_cluster(ClusterId=None, StepConcurrencyLevel=None):\n pass", "def cluster_sync_service_timeout(self, cluster_sync_service_timeout: ConfigNodePropertyInteger):\n\n self._cluster_sync_service_timeout = cluster_sync_service_timeout", "def cluster_notification_interval(self, cluster_notification_interval):\n\n self._cluster_notification_interval = cluster_notification_interval" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the addons for a specific cluster.
def set_addons_config( self, project_id, zone, cluster_id, addons_config, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_addons_config" not in self._inner_api_calls: self._inner_api_calls[ "set_addons_config" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_addons_config, default_retry=self._method_configs["SetAddonsConfig"].retry, default_timeout=self._method_configs["SetAddonsConfig"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetAddonsConfigRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, addons_config=addons_config, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_addons_config"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def addons(self, value):\n if self._addons:\n raise RuntimeError(\"AddonManager already set!\")\n self._addons = value", "def set_cluster(self, data):\n cluster = Cluster(data['name'])\n for host in data['hosts']:\n cluster.add_host(**host)\n self._cluster = cluster", "def update_cluster(self, cluster_id, values):", "def set(self, cluster: str, namespace: str, type: str, labels: Labels) -> Labels:\n with self._lock:\n self._ns(cluster, namespace)[type] = labels\n return labels", "def select_cluster(self, clusters):\n pass", "def ZaServer_setZnsCluster(self, *args):\n\n info = None\n\n cmd = {\n \"cmd\":\"$ZaServer.setZnsCluster\",\n \"args\": {\n\t \"serverId\": self._cfg[\"serverId\"],\n\t \"znsClusterId\": None\n\t }\n }\n\n if len(args) > 0:\n cmd['args']['znsClusterId'] = args[0]\n\t self._cfg[\"znsClusterId\"] = args[0]\n else:\n\t self._cfg[\"znsClusterId\"] = 'default'\n\n # print \"\\n>>>\", cmd\n\n info = self.makeZapyRequest(json.dumps(cmd))\n\n # print \"\\n<<<\", info\n\n return info", "def test_add_cluster_admin(self):\n with requests_mock.Mocker() as m:\n m.register_uri(\n requests_mock.POST,\n \"http://localhost:8086/cluster_admins\"\n )\n\n cli = InfluxDBClient(database='db')\n cli.add_cluster_admin(\n new_username='paul',\n new_password='laup'\n )\n\n self.assertDictEqual(\n json.loads(m.last_request.body),\n {\n 'name': 'paul',\n 'password': 'laup'\n }\n )", "def set_up_addon_profiles(self, mc: ManagedCluster) -> ManagedCluster:\n addon_consts = self.context.get_addon_consts()\n CONST_GITOPS_ADDON_NAME = addon_consts.get(\"CONST_GITOPS_ADDON_NAME\")\n\n mc = super().set_up_addon_profiles(mc)\n addon_profiles = mc.addon_profiles\n addons = self.context.get_enable_addons()\n if \"gitops\" in addons:\n addon_profiles[\n CONST_GITOPS_ADDON_NAME\n ] = self.build_gitops_addon_profile()\n mc.addon_profiles = addon_profiles\n\n if \"web_application_routing\" in addons:\n if mc.ingress_profile is None:\n mc.ingress_profile = self.models.ManagedClusterIngressProfile()\n mc.ingress_profile.web_app_routing = self.build_web_app_routing_profile()\n\n return mc", "def modify_cluster(ClusterId=None, StepConcurrencyLevel=None):\n pass", "def cmd_node_update_cluster(self, args):\n node_id = args[0]\n cluster_id = args[1]\n data = {'cluster_id': cluster_id}\n self._update_obj(node_id, 'node', data)", "def set_selected_clusters(self, clusters):\n self.set_selected_items(list(map(self.item, clusters)))", "def init_multicluster_ocsci_conf(args, nclusters):\n parser = argparse.ArgumentParser(add_help=False)\n # Dynamically adding the argument --cluster$i to enforce\n # user's to pass --cluster$i param followed by normal cluster conf\n # options so that separation of per cluster conf will be easier\n for i in range(nclusters):\n parser.add_argument(\n f\"--cluster{i+1}\",\n required=True,\n action=\"store_true\",\n help=(\n \"Index argument for per cluster args, \"\n \"this marks the start of the cluster{i} args\"\n \"any args between --cluster{i} and --cluster{i+1} will be\",\n \"considered as arguments for cluster{i}\",\n ),\n )\n\n # Parsing just to enforce `nclusters` number of --cluster{i} arguments are passed\n _, _ = parser.parse_known_args(args[2:])\n multicluster_conf, common_argv = tokenize_per_cluster_args(args[2:], nclusters)\n\n # We need to seperate common arguments and cluster specific arguments\n framework.config.multicluster = True\n framework.config.nclusters = nclusters\n framework.config.init_cluster_configs()\n framework.config.reset_ctx()\n for index in range(nclusters):\n framework.config.switch_ctx(index)\n process_ocsci_conf(common_argv + multicluster_conf[index][1:])\n for arg in range(len(multicluster_conf[index][1:])):\n if multicluster_conf[index][arg + 1].startswith(\"--\"):\n multicluster_conf[index][\n arg + 1\n ] = f\"{multicluster_conf[index][arg+1]}{index + 1}\"\n framework.config.multicluster_args.append(multicluster_conf[index][1:])\n check_config_requirements()\n framework.config.multicluster_common_args.append(common_argv)\n # Set context to default_cluster_context_index\n framework.config.switch_default_cluster_ctx()\n # Set same run_id across all clusters\n # there is a race condition in which multiple run id's could be generated\n universal_run_id = framework.config.RUN[\"run_id\"]\n for cluster in framework.config.clusters:\n cluster.RUN[\"run_id\"] = universal_run_id", "def update_clusters(self, updates):\n if not updates:\n return\n with self.db.begin() as conn:\n conn.execute(\n clusters.update().where(clusters.c.id == sa.bindparam(\"_id\")),\n [{\"_id\": c.id, **u} for c, u in updates],\n )\n for c, u in updates:\n for k, v in u.items():\n setattr(c, k, v)", "def setup_cluster_or_multicore(self):\n if self.cluster_mode == 1:\n cluster_name = self.options['cluster_type']\n try:\n self.cluster = cluster.from_name[cluster_name](**self.options)\n except KeyError:\n # Check if a plugin define this type of cluster\n # check for PLUGIN format\n cluster_class = misc.from_plugin_import(self.plugin_path, \n 'new_cluster', cluster_name,\n info = 'cluster handling will be done with PLUGIN: %{plug}s' )\n if cluster_class:\n self.cluster = cluster_class(**self.options)\n \n if self.cluster_mode == 2:\n try:\n import multiprocessing\n if not self.nb_core:\n try:\n self.nb_core = int(self.options['nb_core'])\n except TypeError:\n self.nb_core = multiprocessing.cpu_count()\n logger.info('Using %d cores' % self.nb_core)\n except ImportError:\n self.nb_core = 1\n logger.warning('Impossible to detect the number of cores => Using One.\\n'+\n 'Use set nb_core X in order to set this number and be able to'+\n 'run in multicore.')\n\n self.cluster = cluster.MultiCore(**self.options)", "def configure_cluster(control_node, agent_nodes):\n return sequence([\n run_remotely(\n username='root',\n address=control_node,\n commands=task_enable_flocker_control(),\n ),\n sequence([\n sequence([\n Effect(Func(lambda node=node: configure_ssh(node, 22))),\n run_remotely(\n username='root',\n address=node,\n commands=task_enable_flocker_agent(\n node_name=node,\n control_node=control_node,\n ),\n ),\n ]) for node in agent_nodes\n ])\n ])", "def cluster_reconnect_attempts(self, cluster_reconnect_attempts):\n\n self._cluster_reconnect_attempts = cluster_reconnect_attempts", "async def _addcluster(self, ctx: commands.Context,\n clustername,\n joinchannel: discord.TextChannel,\n leavechannel: discord.TextChannel,\n adminlogchannel: discord.TextChannel,\n globalchatchannel: discord.TextChannel):\n async with self.config.guild(ctx.guild).clusters() as clusters:\n if clustername in clusters.keys():\n await ctx.send(\"Cluster already exists\")\n else:\n clusters[clustername.lower()] = {\n \"joinchannel\": joinchannel.id,\n \"leavechannel\": leavechannel.id,\n \"adminlogchannel\": adminlogchannel.id,\n \"globalchatchannel\": globalchatchannel.id,\n \"servertoserver\": False,\n \"servers\": {}\n }\n await ctx.send(f\"**{clustername}** has been added to the list of clusters.\")", "def add(self, node, cluster):\n self.assign(node, cluster, True)", "def set_cluster_for_vios(self, vios_id, cluster_id, cluster_name):\n # first update the vios_keyed dict\n if vios_id not in self.vios_keyed:\n self.vios_keyed[vios_id] = {}\n old = self.vios_keyed[vios_id]['last_cluster'] = None\n else:\n old = self.vios_keyed[vios_id]['last_cluster']\n if old != cluster_id:\n LOG.info(_(\"VIOS id %(id)s changed membership from cluster ID \"\n \"%(old)s to %(new)s with display name '%(name)s'.\") %\n dict(id=vios_id, old=old, new=cluster_id,\n name=cluster_name))\n self.vios_keyed[vios_id]['last_cluster'] = cluster_id\n # remove from the cluster side too\n if old is not None and old in self.cluster_keyed:\n if vios_id in self.cluster_keyed[old]['set']:\n self.cluster_keyed[old]['set'].remove(vios_id)\n ###################################################################\n # set_cluster_seq is the collection sequence number that the VIOS\n # last reported the cluster membership for.\n # trust_seq is reset to 0 when the VIOS reports as a member for a\n # Cluster feed request. It is bumped up independently during a VIOS\n # feed request (which occurs prior to cluster feed in a topology\n # collection) if the VIOS has a good state and rmc_state.\n # This means that if the VIOS has not reported as being a member of\n # the cluster for some number of iterations, but the trust_seq\n # has bumped up to some small number, then we can \"trust\" that\n # the vios really is not a member of the cluster, and not just\n # experiencing a connectivity problem due to the network or heavy\n # load.\n ###################################################################\n self.vios_keyed[vios_id]['trust_seq'] = 0 # reset\n if cluster_id is None:\n self.vios_keyed[vios_id]['set_cluster_seq'] =\\\n (self.sequence_num - 1) # set sequence in past for None case\n return # Don't need to update cluster_keyed dict.\n self.vios_keyed[vios_id]['set_cluster_seq'] = self.sequence_num\n\n # Now update the cluster_keyed dict\n if cluster_id not in self.cluster_keyed:\n entry = {'set_cluster_seq': self.sequence_num, 'set': set()}\n self.cluster_keyed[cluster_id] = entry\n else:\n entry = self.cluster_keyed[cluster_id]\n entry['display_name'] = cluster_name\n LOG.debug(\"Vios_id=%s, Vios_keyed after update=%s, Cluster entry \"\n \"before update for cluster %s: %s.\" %\n (vios_id, self.vios_keyed[vios_id], cluster_id, entry))\n if entry['set_cluster_seq'] != self.sequence_num:\n # new topology collection sequence - reset membership\n entry['set'] = set()\n entry['set'].add(vios_id)\n entry['set_cluster_seq'] = self.sequence_num\n LOG.debug(\"Reset %s cluster membership for sequence %d to %s.\" %\n (cluster_id, self.sequence_num, entry['set']))\n else:\n entry['set'].add(vios_id)\n LOG.debug(\"Add VIOS %s to cluster %s: %s.\" %\n (vios_id, cluster_name, entry['set']))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the locations for a specific cluster.
def set_locations( self, project_id, zone, cluster_id, locations, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_locations" not in self._inner_api_calls: self._inner_api_calls[ "set_locations" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_locations, default_retry=self._method_configs["SetLocations"].retry, default_timeout=self._method_configs["SetLocations"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLocationsRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, locations=locations, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_locations"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def update_cluster(cluster_collection, locations, centroid_id):\n bulk = cluster_collection.initialize_unordered_bulk_op()\n bulk.find({\"_id\": {\"$in\": locations}}).update({\"$set\": {\"centroid\": centroid_id}})\n try:\n bulk.execute()\n except BulkWriteError as bwe:\n logging.getLogger(__name__).error(bwe)", "def update_cluster(self, cluster_id, values):", "def set_cluster(self, data):\n cluster = Cluster(data['name'])\n for host in data['hosts']:\n cluster.add_host(**host)\n self._cluster = cluster", "def cluster_ids(self, cluster_ids):\n if isinstance(cluster_ids, np.ndarray):\n cluster_ids = cluster_ids.tolist()\n self._cluster_ids = sorted(cluster_ids)", "def poblar_cluster():\n\n lista = modelo.cluster_centers_.tolist()\n\n if connection.is_connected():\n cur = connection.cursor()\n i = 0\n for l in lista:\n longitud = l[0]\n latitud = l[1]\n\n sql = f'UPDATE Cluster SET longitud={longitud}, latitud={latitud} WHERE id_cluster={i}'\n\n cur.execute(sql)\n\n connection.commit()\n i += 1", "def select_cluster(self, clusters):\n pass", "def update_clusters(self, updates):\n if not updates:\n return\n with self.db.begin() as conn:\n conn.execute(\n clusters.update().where(clusters.c.id == sa.bindparam(\"_id\")),\n [{\"_id\": c.id, **u} for c, u in updates],\n )\n for c, u in updates:\n for k, v in u.items():\n setattr(c, k, v)", "def set_locations(self, locations_graph):\n for package in self._packages:\n if package.location not in map(lambda x: x.data, self._locations.get_vertex_list()):\n for vertex in locations_graph.get_vertex_list():\n if vertex.data == package.location:\n self._locations.add_vertex(vertex.data.name, vertex.data)\n break\n\n for location in map(lambda x: x.data, self._locations.get_vertex_list()):\n # index in truck graph\n index = self._locations.get_vertex_list().index(self._locations.get_vertex(location.name))\n\n # index in graph of all locations\n all_locations_index = locations_graph.get_vertex_list().index(locations_graph.get_vertex(location.name))\n\n for num, loc in enumerate(map(lambda x: x.data, self._locations.get_vertex_list())):\n cur_index = locations_graph.get_vertex(loc.name).index\n self._locations.adjacency_matrix[index][num] = locations_graph.adjacency_matrix[all_locations_index][cur_index]", "def clustered(self, clustered):\n\n self._clustered = clustered", "def set_selected_clusters(self, clusters):\n self.set_selected_items(list(map(self.item, clusters)))", "def set_cluster_for_vios(self, vios_id, cluster_id, cluster_name):\n # first update the vios_keyed dict\n if vios_id not in self.vios_keyed:\n self.vios_keyed[vios_id] = {}\n old = self.vios_keyed[vios_id]['last_cluster'] = None\n else:\n old = self.vios_keyed[vios_id]['last_cluster']\n if old != cluster_id:\n LOG.info(_(\"VIOS id %(id)s changed membership from cluster ID \"\n \"%(old)s to %(new)s with display name '%(name)s'.\") %\n dict(id=vios_id, old=old, new=cluster_id,\n name=cluster_name))\n self.vios_keyed[vios_id]['last_cluster'] = cluster_id\n # remove from the cluster side too\n if old is not None and old in self.cluster_keyed:\n if vios_id in self.cluster_keyed[old]['set']:\n self.cluster_keyed[old]['set'].remove(vios_id)\n ###################################################################\n # set_cluster_seq is the collection sequence number that the VIOS\n # last reported the cluster membership for.\n # trust_seq is reset to 0 when the VIOS reports as a member for a\n # Cluster feed request. It is bumped up independently during a VIOS\n # feed request (which occurs prior to cluster feed in a topology\n # collection) if the VIOS has a good state and rmc_state.\n # This means that if the VIOS has not reported as being a member of\n # the cluster for some number of iterations, but the trust_seq\n # has bumped up to some small number, then we can \"trust\" that\n # the vios really is not a member of the cluster, and not just\n # experiencing a connectivity problem due to the network or heavy\n # load.\n ###################################################################\n self.vios_keyed[vios_id]['trust_seq'] = 0 # reset\n if cluster_id is None:\n self.vios_keyed[vios_id]['set_cluster_seq'] =\\\n (self.sequence_num - 1) # set sequence in past for None case\n return # Don't need to update cluster_keyed dict.\n self.vios_keyed[vios_id]['set_cluster_seq'] = self.sequence_num\n\n # Now update the cluster_keyed dict\n if cluster_id not in self.cluster_keyed:\n entry = {'set_cluster_seq': self.sequence_num, 'set': set()}\n self.cluster_keyed[cluster_id] = entry\n else:\n entry = self.cluster_keyed[cluster_id]\n entry['display_name'] = cluster_name\n LOG.debug(\"Vios_id=%s, Vios_keyed after update=%s, Cluster entry \"\n \"before update for cluster %s: %s.\" %\n (vios_id, self.vios_keyed[vios_id], cluster_id, entry))\n if entry['set_cluster_seq'] != self.sequence_num:\n # new topology collection sequence - reset membership\n entry['set'] = set()\n entry['set'].add(vios_id)\n entry['set_cluster_seq'] = self.sequence_num\n LOG.debug(\"Reset %s cluster membership for sequence %d to %s.\" %\n (cluster_id, self.sequence_num, entry['set']))\n else:\n entry['set'].add(vios_id)\n LOG.debug(\"Add VIOS %s to cluster %s: %s.\" %\n (vios_id, cluster_name, entry['set']))", "def assign_to_centers(self):\n self.kmeans.assign_to_centers()\n clusters = self.kmeans.get_clusters()\n\n for i in range(len(clusters)):\n for point in clusters[i]:\n # get the ID and update its color\n self.canvas.itemconfig(self.data_to_id[str(point)], fill=self.COLORS[i])\n \n self.canvas.pack(fill=BOTH, expand=1)", "def set(self, cluster: str, namespace: str, type: str, labels: Labels) -> Labels:\n with self._lock:\n self._ns(cluster, namespace)[type] = labels\n return labels", "def cluster_user(self, cluster_user):\n\n self._cluster_user = cluster_user", "def cmd_node_update_cluster(self, args):\n node_id = args[0]\n cluster_id = args[1]\n data = {'cluster_id': cluster_id}\n self._update_obj(node_id, 'node', data)", "def set_location(self, v):\n self.location = v", "def set_centroids(self):\n centroid_coords = lambda x: (x.x, x.y)\n self.centroids = self.data_class([centroid_coords(t.centroid) for t in self.grid_polys])", "def setClusterID(self, id_):\n self.cluster_id_ = id_\n self.peptide.setClusterID(self.get_feature_id(), id_)", "def setLocation(self, account, location):\n\n assert_is_admin(account, \"Only administrator accounts can change the location of a lab\")\n\n if location != self.location:\n lab = self._getLab()\n\n try:\n lab.location = location\n lab.put()\n self.location = location\n except Exception as e:\n raise InputError(\"\"\"Cannot set the location of lab '%s' to '%s'.\n Click below for more details.\"\"\" % (lab.name, location),\n detail=e)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates the master for a specific cluster.
def update_master( self, project_id, zone, cluster_id, master_version, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "update_master" not in self._inner_api_calls: self._inner_api_calls[ "update_master" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_master, default_retry=self._method_configs["UpdateMaster"].retry, default_timeout=self._method_configs["UpdateMaster"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.UpdateMasterRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, master_version=master_version, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["update_master"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def cmd_node_update_cluster(self, args):\n node_id = args[0]\n cluster_id = args[1]\n data = {'cluster_id': cluster_id}\n self._update_obj(node_id, 'node', data)", "def test_slave_master_up_cluster_id(self):\n self._cluster.master = None\n self._slave_1.is_slave = False\n self._slave_1.cluster_id = 1\n self._cluster.update_cluster()\n assert_equals(self._slave_1.cluster_id, 0)", "def db_cluster_update(token, status, cluster_id, master_IP='', state='', password='', error=''):\n try:\n user = UserInfo.objects.get(okeanos_token=token)\n cluster = ClusterInfo.objects.get(id=cluster_id)\n except ObjectDoesNotExist:\n msg = 'Cluster with given name does not exist in pending state'\n raise ObjectDoesNotExist(msg)\n if password:\n user.master_vm_password = u'The root password of \\\"{0}\\\"({1}) master VM is {2}'.format(cluster.cluster_name,cluster.id,password)\n if error:\n user.error_message = u'Cluster \\\"{0}\\\"({1}) creation failed due to error: {2}'.format(cluster.cluster_name,cluster.id, error)\n\n if status == \"Active\":\n cluster.cluster_status = const_cluster_status_active\n user.master_vm_password = ''\n user.error_message = ''\n\n elif status == \"Pending\":\n cluster.cluster_status = const_cluster_status_pending\n \n elif status == \"Failed\":\n cluster.cluster_status = const_cluster_status_failed\n\n elif status == \"Destroyed\":\n cluster.cluster_status = const_cluster_status_destroyed\n cluster.master_IP = ''\n cluster.state= 'Deleted'\n cluster.hadoop_status = const_hadoop_status_stopped\n\n if state:\n cluster.state = state\n if master_IP:\n cluster.master_IP = master_IP\n user.save()\n cluster.save()", "def cluster_master_level(self, cluster_master_level):\n\n self._cluster_master_level = cluster_master_level", "def configure_master(\n self,\n ssh_client: paramiko.client.SSHClient,\n cluster: FlintrockCluster):\n raise NotImplementedError", "def update_cluster(self, cluster_id, values):", "def test_lost_master_up_cluster_id(self):\n self._cluster.master = None\n self._lost_1.is_slave = False\n self._lost_1.refresh_role = Mock()\n self._lost_1.cluster_id = 1\n self._cluster.update_cluster()\n assert_equals(self._lost_1.cluster_id, 0)", "def SetMasterInstance(self, master_instance):\n self.master = master_instance.key\n self.put()", "def reset_cluster_after_upgrade_neutron_ceph(self):\n self.env.revert_snapshot('upgrade_master_neutron_ceph')\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.fuel_web.stop_reset_env_wait(cluster_id)\n\n self.env.bootstrap_nodes(\n self.env.d_env.nodes().slaves[6:7])\n\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-02': ['controller'],\n 'slave-03': ['controller'],\n 'slave-05': ['compute', 'ceph-osd'],\n 'slave-06': ['compute', 'ceph-osd']\n }, False, True\n )\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-07': ['controller']}\n )\n\n self.fuel_web.run_network_verify(cluster_id)\n self.fuel_web.deploy_cluster_wait(cluster_id, check_services=False)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['ha', 'sanity', 'smoke'],\n should_fail=1)", "def update_cluster(self, model_data, **kwargs):\n\n url = self._make_url(\"/v1/cluster/{0}/\".format(model_data[\"id\"]))\n return self._session.put(url, json=model_data, **kwargs)", "def cluster_updated(configuration, cluster_state):", "def __set_minion_master(self):\n master_id = self.master_remote.hostname\n for rem in self.remotes.iterkeys():\n # remove old master public key if present. Minion will refuse to\n # start if master name changed but old key is present\n delete_file(rem, '/etc/salt/pki/minion/minion_master.pub',\n sudo=True, check=False)\n\n # set master id\n sed_cmd = ('echo master: {} > '\n '/etc/salt/minion.d/master.conf').format(master_id)\n rem.run(args=[\n 'sudo',\n 'sh',\n '-c',\n sed_cmd,\n ])", "def managed_cluster_update(self) -> 'outputs.ManagedClusterUpdateResponse':\n return pulumi.get(self, \"managed_cluster_update\")", "def test_slave_master_up(self):\n self._cluster.master = None\n self._slave_1.is_slave = False\n status = self._cluster.update_cluster()\n assert_equals(status, {\n 'master_down': False,\n 'master_up': self._slave_1,\n 'slaves_down': [],\n 'slaves_up': [],\n 'out': []\n })\n assert_items_equal([self._slave_2], self._cluster.slaves)\n assert_items_equal([self._lost_1, self._lost_2], self._cluster.lost)\n assert_equals(self._slave_1, self._cluster.master)", "def set_master(self, master):\n self.master_host = master", "def update_masters(self):\n # We have no masters, check if any are available.\n # Send a request, and wait a second for replies.\n servers = self.request_keycount()\n if servers != []:\n self.masters = servers[:2]\n self.nodes = servers", "def cli_cosmosdb_managed_cassandra_cluster_update(client,\r\n resource_group_name,\r\n cluster_name,\r\n tags=None,\r\n identity_type=None,\r\n client_certificates=None,\r\n external_gossip_certificates=None,\r\n external_seed_nodes=None,\r\n cassandra_version=None,\r\n authentication_method=None,\r\n hours_between_backups=None,\r\n repair_enabled=None):\r\n\r\n cluster_resource = client.get(resource_group_name, cluster_name)\r\n\r\n if client_certificates is None:\r\n client_certificates = cluster_resource.properties.client_certificates\r\n\r\n if external_gossip_certificates is not None:\r\n external_gossip_certificates = cluster_resource.properties.external_gossip_certificates\r\n\r\n if external_seed_nodes is None:\r\n external_seed_nodes = cluster_resource.properties.external_seed_nodes\r\n\r\n if cassandra_version is None:\r\n cassandra_version = cluster_resource.properties.cassandra_version\r\n\r\n if authentication_method is None:\r\n authentication_method = cluster_resource.properties.authentication_method\r\n\r\n if hours_between_backups is None:\r\n hours_between_backups = cluster_resource.properties.hours_between_backups\r\n\r\n if repair_enabled is None:\r\n repair_enabled = cluster_resource.properties.repair_enabled\r\n\r\n if tags is None:\r\n tags = cluster_resource.tags\r\n\r\n identity = cluster_resource.identity\r\n\r\n if identity_type is not None:\r\n identity = ManagedCassandraManagedServiceIdentity(type=identity_type)\r\n\r\n cluster_properties = ClusterResourceProperties(\r\n provisioning_state=cluster_resource.properties.provisioning_state,\r\n restore_from_backup_id=cluster_resource.properties.restore_from_backup_id,\r\n delegated_management_subnet_id=cluster_resource.properties.delegated_management_subnet_id,\r\n cassandra_version=cassandra_version,\r\n cluster_name_override=cluster_resource.properties.cluster_name_override,\r\n authentication_method=authentication_method,\r\n initial_cassandra_admin_password=cluster_resource.properties.initial_cassandra_admin_password,\r\n hours_between_backups=hours_between_backups,\r\n repair_enabled=repair_enabled,\r\n client_certificates=client_certificates,\r\n external_gossip_certificates=external_gossip_certificates,\r\n gossip_certificates=cluster_resource.properties.gossip_certificates,\r\n external_seed_nodes=cluster_resource.properties.external_seed_nodes,\r\n seed_nodes=cluster_resource.properties.seed_nodes\r\n )\r\n\r\n cluster_resource_create_update_parameters = ClusterResource(\r\n location=cluster_resource.location,\r\n tags=tags,\r\n identity=identity,\r\n properties=cluster_properties)\r\n\r\n return client.begin_create_update(resource_group_name, cluster_name, cluster_resource_create_update_parameters)", "def upgrade_master_neutron_ceph(self):\n self.check_run('upgrade_master_neutron_ceph')\n if not self.env.revert_snapshot('ceph_rados_gw'):\n raise SkipTest()\n\n self.env.admin_actions.upgrade_master_node()\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n nodes_count = len(self.fuel_web.client.list_cluster_nodes(cluster_id))\n\n self.fuel_web.assert_fuel_version(hlp_data.UPGRADE_FUEL_TO)\n self.fuel_web.assert_nodes_in_ready_state(cluster_id)\n self.fuel_web.wait_nodes_get_online_state(\n self.env.d_env.nodes().slaves[:nodes_count])\n self.fuel_web.assert_nailgun_upgrade_migration()\n\n self.env.make_snapshot('upgrade_master_neutron_ceph', is_make=True)", "def modify_cluster(ClusterId=None, StepConcurrencyLevel=None):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists all operations in a project in a specific zone or all zones.
def list_operations( self, project_id, zone, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "list_operations" not in self._inner_api_calls: self._inner_api_calls[ "list_operations" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_operations, default_retry=self._method_configs["ListOperations"].retry, default_timeout=self._method_configs["ListOperations"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListOperationsRequest( project_id=project_id, zone=zone, parent=parent ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["list_operations"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def get_operations_in_zone(self, zone):\n\n\t\treturn self.compute.zoneOperations().list(project=self.project, zone=zone).execute()", "def list(cls, api_client, **kwargs):\n\n cmd = {}\n cmd.update(kwargs)\n if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():\n cmd['listall'] = True\n return super(Zone, cls).list(api_client.listZones(**cmd).get('zone'))", "def availability_zone_list(request):\n az_manager = moganclient(request).availability_zone\n return az_manager.list()", "def get_instance_operations(self, instance_json, operation_type):\n\n\t\tzone = instance_json['zone'].rsplit('/', 1)[-1]\n\t\tinstance_id = instance_json['id']\n\t\tfilter_by = '(targetId eq '+instance_id+')(operationType eq '+operation_type+')'\n\t\treturn self.compute.zoneOperations().list(project=self.project, zone=zone, filter=filter_by).execute()", "def test_zone_list_function():\n response = zone.list()\n assert response.success\n\n payload = response.payload\n assert payload['url'] == 'https://api.cloudns.net/dns/list-zones.json'\n assert payload['params']['page'] == 1\n assert payload['params']['rows-per-page'] == 10\n assert payload['params']['search'] == ''\n assert payload['params']['group-id'] == ''", "def ex_list_zones(self):\r\n list_zones = []\r\n request = '/zones'\r\n response = self.connection.request(request, method='GET').object\r\n list_zones = [self._to_zone(z) for z in response['items']]\r\n return list_zones", "def pull_zones(*args, **kwargs):\n zones = api_call('/zones').json()\n for zone in zones:\n print(bcolors.HEADER + \"== Zone {zone[name]} [{zone[uuid]}] ==\".format(zone=zone) + bcolors.ENDC)\n\n # Retrieve the records for this zone\n records_response = api_call(zone['zone_records_href'], headers={'Accept': 'text/plain'})\n print(\"\\tWriting... \", end='')\n\n # Save the records in a file\n filename = \"{zone[name]}_{zone[uuid]}.txt\".format(zone=zone)\n with open(ZONES_FOLDER + filename, 'w') as records_file:\n records_file.write(records_response.content.decode('utf-8'))\n print(\"{bcolors.OKGREEN}done{bcolors.ENDC} ({fn})\".format(fn=filename, bcolors=bcolors))\n\n print(\"\\n\" + bcolors.OKBLUE + \"Written all zones in \" + ZONES_FOLDER + bcolors.ENDC)", "def _get_list_zone_object(self):\n return self.rad_connection.list_objects(zonemgr.Zone())", "def _fetch_all_zones(self):\n query = tables.zones.select()\n return self.storage.session.execute(query).fetchall()", "def get_all(self, project_id, region_name=None):\n project_id = acl.get_limited_to_project(request.headers, 'project_resource_get') or project_id\n if project_id is None:\n project_id = request.headers.get('X-Project-Id')\n\n LIST_METHODS = services.RESOURCE_LIST_METHOD\n result = []\n regions = [region_name] if region_name else cfg.CONF.regions\n\n for method, _region_name in itertools.product(LIST_METHODS, regions):\n resources = method(project_id, region_name=_region_name)\n for resource in resources:\n result.append(models.Resource(region_name=_region_name,\n resource_id=resource.id,\n resource_name=resource.name,\n resource_type=resource.resource_type))\n return result", "def retrieve_all_instances(self):\n zones = []\n instances = []\n # Retrieve all Zones, given Project ID\n try:\n request = self.service.zones().list(project=self.project)\n response = request.execute()\n for zone in response.get('items', []):\n zones.append(zone['name'])\n except Exception as e:\n raise e\n # Retrieve all Instances in each Zone\n try:\n for zone in zones:\n request = self.service.instances().list(project=self.project, zone=zone)\n response = request.execute()\n for instance in response.get('items', []):\n instances.append(instance)\n self.__class__.all_instances = instances\n except Exception as e:\n raise e", "def projects_ls(ctx, verbose):\n ctx.verbose = verbose\n hes = HoruzES(\"\", ctx)\n indexes = hes.indexes()\n if indexes:\n rtable.add_column(\"Projects\", style=\"cyan\", no_wrap=True)\n for i in indexes:\n rtable.add_row(i)\n ctx.log(rtable)", "def list_tpus(project: str, zone: str) -> List[Mapping[str, Any]]:\n tpu_node_url = os.path.join(\n _TPU_BASE_URL, \"projects\", project, \"locations\", zone, \"nodes\"\n )\n resp = requests.get(tpu_node_url, headers=get_headers())\n return resp.json()[\"nodes\"]", "def describe_availability_zones(DryRun=None, ZoneNames=None, Filters=None):\n pass", "def list_projects(doc, company, doc_type):\n print(\"TODO\")", "def print_zones(*args, **kwargs):\n # List the zones\n zones = api_call('/zones').json()\n for zone in zones:\n print(bcolors.HEADER + \"== Zone {zone[name]} [{zone[uuid]}] ==\".format(zone=zone) + bcolors.ENDC)\n\n # List associated domains\n domains = api_call(zone['domains_href']).json()\n if domains:\n print(\n \"{bcolors.MINOR}\\tDomain(s) associated with this zone:{bcolors.ENDC} {domains}\"\n .format(bcolors=bcolors, domains=', '.join([domain['fqdn'] for domain in domains]))\n )\n else:\n print(\"\\tNo domain associated with this zone.\")\n\n # Retrieve the records for this zone\n records = api_call(zone['zone_records_href']).json()\n print(\"\\t{count} {bcolors.MINOR}records in this zone:{bcolors.ENDC}\\n\".format(count=len(records), bcolors=bcolors))\n # Print each record\n for record in records:\n print_record(record)\n print()", "def list_projects_controller(self):\n try:\n logging.info(f\"list all annotation project on Label Studio\")\n list_project_url = self.label_studio_config.get(\"list_projects\")\n logging.info(f\"{list_project_url=}\")\n response, status_code = APIInterface.get(\n route=list_project_url,\n params={\"ordering\": \"id\"},\n headers=self.header,\n )\n return response\n except Exception as error:\n logging.error(f\"Error in list_projects_controller: {error}\")\n raise error", "def list_disks(compute, project, zone):\n\tbackup_logger.debug(\"Finding all disks for specified project\")\n\tall_disks = []\n\ttry:\n\t\tresult = compute.disks().list(project=project, zone=zone).execute()\n\t\tall_disks.extend(result['items'])\n\n\t\twhile 'nextPageToken' in result:\n\t\t\tresult = compute.disks().list(project=project, zone=zone, \\\n\t\t\t\tpageToken=result['nextPageToken']).execute()\n\t\t\tall_disks.extend(result['items'])\n\texcept HttpError:\n\t\tbackup_logger.error(\"Error with HTTP request made to list_disks\")\n\t\tsys.exit(1)\n\n\treturn all_disks", "def get_zone_names_list(self):\n\n\t\treturn [zone['description'] for zone in self.compute.zones().list(project=self.project).execute()['items']]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists the node pools for a cluster.
def list_node_pools( self, project_id, zone, cluster_id, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "list_node_pools" not in self._inner_api_calls: self._inner_api_calls[ "list_node_pools" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_node_pools, default_retry=self._method_configs["ListNodePools"].retry, default_timeout=self._method_configs["ListNodePools"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListNodePoolsRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, parent=parent ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["list_node_pools"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def list_nodepools(\n parent: str = None,\n configuration: Configuration = None,\n secrets: Secrets = None,\n) -> Dict[str, Any]: # noqa: E501\n parent = get_parent(parent, configuration=configuration, secrets=secrets)\n client = get_client(configuration, secrets)\n response = client.list_node_pools(parent=parent)\n logger.debug(\"NodePool listing: {}\".format(str(response)))\n return to_dict(response)", "def list_cluster_nodes(self):\n endpoint = self.build_url(\"/nodes\")\n return self.request('get', endpoint)", "def list_nodes(*, cluster):\n ss.load_config(cluster)\n return ss.svars['nodes']", "def get_cluster_list(self):\n LOG.info(\"Getting clusters\")\n return self.client.request(constants.GET,\n constants.GET_CLUSTER.format\n (self.server_ip), payload=None,\n querystring=constants.\n SELECT_ID_AND_NAME)", "def get_node_pools(self):\n return sorted(self.node_pools.keys())", "def _cluster_list():\n\n CLUSTER_TABLE = storage.get_cluster_table()\n clusters = []\n cluster_items = CLUSTER_TABLE.scan()\n\n for cluster in cluster_items['Items']:\n clusters.append(cluster['id'])\n\n print(f'tracked clusters: {clusters}')\n\n return clusters", "def list_pools(self):\n self.require_state(\"connected\")\n size = c_size_t(512)\n while True:\n c_names = create_string_buffer(size.value)\n ret = run_in_thread(self.librados.rados_pool_list,\n (self.cluster, byref(c_names), size))\n if ret > size.value:\n size = c_size_t(ret)\n else:\n break\n return filter(lambda name: name != '', c_names.raw.split('\\0'))", "def get_all_vm_pools():\n logger.info(\"Getting all vm pools in the system\")\n return UTIL.get(abs_link=False)", "def do_kube_cluster_list(cc, args):\n versions = cc.kube_cluster.list()\n fields = ['cluster_name', 'cluster_version', 'cluster_api_endpoint']\n labels = fields\n utils.print_list(versions, fields, labels, sortby=0)", "def list_cluster_nodegroups(self, context, cluster_id, filters=None,\n limit=None, marker=None, sort_key=None,\n sort_dir=None):", "def pools(self, pool=None):\n url = self.hostname + '/pools'\n if pool:\n url += '/' + pool\n return self._get(url)", "def pools(self, pool=None):\n url = f'{self.hostname}/pools'\n if pool:\n url += '/' + pool\n return self._get(url)", "def get_nodes_cluster(self, skip_master_node=True):\n\n node_list = []\n cmd = \"%s | awk 'FNR == 1 {next} {print $1\\\":\\\"$3}'\" % KUBECTL_GET_NODES % self.context\n if skip_master_node:\n cmd = cmd + \" | grep -v 'master'\"\n logger.info(\"cmd: %s\", cmd)\n result = self.nuvoloso_helper.run_check_output(cmd)\n if result:\n logger.info(result)\n node_list_raw = result.split()\n # e.g. node_list_raw = ['ip-172-20-47-154.us-west-2.compute.internal:node']\n # remove \":node\" from each line of node_list_raw and get the hostname\n node_list = list(map(lambda x:x.split(\":\")[0], node_list_raw))\n return node_list\n else:\n raise Exception(\"No output when running cmd: %s\" % cmd)", "def get_nodes(self, cluster):\n return list(self.clusters[cluster]['nodes'])", "def cli_cosmosdb_managed_cassandra_cluster_list(client,\r\n resource_group_name=None):\r\n\r\n if resource_group_name is None:\r\n return client.list_by_subscription()\r\n\r\n return client.list_by_resource_group(resource_group_name)", "def GetNodeList(cluster):\n if len(cluster) == 1:\n return [cluster]\n else:\n children = cluster.GetChildren()\n children.sort(key=lambda x: len(x), reverse=True)\n res = []\n for child in children:\n res += GetNodeList(child)\n res += [cluster]\n return res", "def _list_instances_in_cluster(self):\n properties = ['runtime.connectionState',\n 'config.extraConfig[\"nvp.vm-uuid\"]']\n LOG.debug(\"Getting list of instances from cluster %s\",\n self._cluster)\n vms = []\n if self._root_resource_pool:\n vms = self._session._call_method(\n vim_util, 'get_inner_objects', self._root_resource_pool, 'vm',\n 'VirtualMachine', properties)\n lst_vm_names = self._get_valid_vms_from_retrieve_result(vms)\n\n return lst_vm_names", "def list_cluster_names(self):\n info = self._make_service_info()\n try:\n return sorted(self.huskar_client.client.get_children(info.path))\n except NoNodeError:\n return []", "def get_cluster_names():\n token = get_session()\n headers = {\n 'Cookie': 'argocd.token={}'.format(token)\n }\n r = requests.get('https://build.osinfra.cn/api/v1/clusters', headers=headers)\n if r.status_code != 200:\n print('Cannot get cluster names because GET request failed.')\n print(r.status_code, r.json())\n sys.exit(1)\n cluster_names = []\n for i in r.json()['items']:\n cluster_names.append(i['name'])\n return cluster_names" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a node pool for a cluster.
def create_node_pool( self, project_id, zone, cluster_id, node_pool, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "create_node_pool" not in self._inner_api_calls: self._inner_api_calls[ "create_node_pool" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_node_pool, default_retry=self._method_configs["CreateNodePool"].retry, default_timeout=self._method_configs["CreateNodePool"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.CreateNodePoolRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, node_pool=node_pool, parent=parent, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_node_pool"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def create_pool(self, **params):\n pool = self.get_pool(connect=False, **params)\n\n # Save the pool\n self.pool.append(pool)\n\n return pool", "def test_pool_create(self):\n pool_name = p_n()\n self.unittest_command(\n [_STRATIS_CLI, \"pool\", \"create\", pool_name, StratisCertify.DISKS[0]],\n 0,\n True,\n True,\n )", "def create_cluster(worker_count=0):\n global nodes, stash, seeds\n nodes = []\n seeds = []\n stash = []\n #create the seed node\n seeds.append(Node(cluster_name, node_type=\"seed\", number=0, create=True, IPv4=True))\n #create the rest of the nodes\n for i in range(worker_count):\n stash.append(Node(cluster_name, node_type=\"node\", number=\"%02d\" % (i+1), create=True, IPv4=True))\n\n #save the cluster to file\n save_cluster()\n #wait until everybody is ready\n Cluster.wait_nodes(seeds+nodes)\n find_orchestrator()\n inject_hosts_files()\n log.info('Every node is ready for SSH')", "def create_cluster(ctx, name, region, verbosity,\n cp_role, subnets, tags, vpc_cidr, zones, kubeconf, username, heptio_auth, cp_only, node_name,\n node_role, node_sg_ingress, node_min, node_max, node_subnets, node_type, keyname, ssh_public_key,\n ami, no_user_data, yes):\n if node_subnets and not subnets:\n print('If node subnets are specified, the cluster subnets must appear!')\n exit(1)\n elif node_subnets and subnets:\n s = [ns for ns in node_subnets if ns not in subnets]\n if s:\n print('[{}] not one of the cluster subnets.'.format(','.join(s)))\n exit(1)\n\n if not kubeconf:\n files = os.environ.get('KUBECONFIG', '~/.kube/config')\n kubeconf = os.path.expanduser(files.split(':')[0])\n if not yes:\n if not click.confirm('Are you sure to create the EKS cluster in '\n 'region[{}] with kubeconfig[{}]'.format(region, kubeconf)):\n exit(0)\n\n cp = ControlPlane(name, subnets=subnets, role=cp_role, region=region, tags=tags,\n vpc_cidr=vpc_cidr, zones=zones)\n cluster_info = cp.create()\n kc = KubeConfig(cluster_info, kubeconf, user=username, heptio_auth=heptio_auth)\n kc.create()\n\n if cp_only:\n LOG.info('To create EKS cluster control plane only.')\n return\n\n ng = NodeGroup(node_name, cluster_info=cluster_info, keypair=keyname, region=region, ami=ami, subnets=node_subnets,\n kubeconf=kubeconf, role=node_role, sg_ingresses=node_sg_ingress, min_nodes=node_min,\n max_nodes=node_max, instance_type=node_type, ssh_public_key=ssh_public_key,\n no_user_data=no_user_data)\n ng.create()", "def create_nodes(count=2, instantiateOn='pnode', cores=4, ram=8):\n\n nodes = []\n # index nodes by their proper number (not zero-indexed)\n nodes.append(None)\n\n # create each VM\n for i in range(1, count + 1):\n nodes.append(mkVM('node' + str(i), GLOBALS.UBUNTU18_IMG, instantiateOn=instantiateOn, cores=cores, ram=ram))\n\n # run alternating install scripts on each vm to install software \n odd_node = True\n for node in nodes:\n if node is not None:\n if odd_node:\n node.addService(pg.Execute(shell=\"sh\", command=\"chmod +x /local/repository/install1.sh\"))\n node.addService(pg.Execute(shell=\"sh\", command=\"/local/repository/install1.sh\"))\n else:\n node.addService(pg.Execute(shell=\"sh\", command=\"chmod +x /local/repository/install2.sh\"))\n node.addService(pg.Execute(shell=\"sh\", command=\"/local/repository/install2.sh\"))\n odd_node = not odd_node\n\n return nodes", "def _NodePoolFromCluster(cluster, node_pool_name):\n for node_pool in cluster.nodePools:\n if node_pool.name == node_pool_name:\n # Node pools always have unique names.\n return node_pool\n raise NodePoolError(\n 'No node pool found matching the name [{}].'.format(node_pool_name))", "def test_create_cloud_pool(self):\n pass", "def cluster_initialize (self, worker_limit=None):\n with PerfTimer( f'create {self.compute_type} cluster'):\n\n cluster = None; client = None\n \n # initialize CPU or GPU cluster\n if 'multi-GPU' in self.compute_type:\n\n self.n_workers = cupy.cuda.runtime.getDeviceCount()\n\n if 'XGBoost' in self.model_type:\n self.n_workers = min( self.n_datafiles, self.n_workers ) \n\n if worker_limit is not None:\n self.n_workers = min( worker_limit, self.n_workers )\n \n \n cluster = LocalCUDACluster( n_workers = self.n_workers )\n client = Client( cluster )\n print(f'dask multi-GPU cluster with {self.n_workers} workers ')\n \n if 'multi-CPU' in self.compute_type:\n self.n_workers = os.cpu_count()\n\n if 'XGBoost' in self.model_type:\n self.n_workers = min( self.n_datafiles, self.n_workers )\n \n if worker_limit is not None:\n self.n_workers = min( worker_limit, self.n_workers )\n \n cluster = LocalCluster( n_workers = self.n_workers, threads_per_worker = 1 )\n client = Client( cluster )\n print(f'\\ndask multi-CPU cluster with {self.n_workers} workers')\n\n dask.config.set({'logging': {'loggers' : {'distributed.nanny': {'level': 'CRITICAL'}}}})\n dask.config.set({'temporary_directory' : self.directories['output_artifacts']})\n\n return cluster, client", "def build_cluster(self):\n self.redshift_client_create()\n self.iam_client_create()\n self.ec2_client_create()\n self.create_iam_role()\n # self.update_iam_config()\n self.create_redshift_cluster()\n # uses created redshift cluster's vpc_id\n self.open_tcp_port()", "def create_ceph_block_pool(request):\n class_instance = request.node.cls\n\n def finalizer():\n \"\"\"\n Delete the Ceph block pool\n \"\"\"\n if hasattr(class_instance, \"cbp_obj\"):\n class_instance.cbp_obj.delete()\n\n request.addfinalizer(finalizer)\n\n class_instance.cbp_obj = helpers.create_ceph_block_pool()\n assert class_instance.cbp_obj, \"Failed to create block pool\"", "def post_create_azure_node_pool(\n self, response: operations_pb2.Operation\n ) -> operations_pb2.Operation:\n return response", "def simple_pin_nodes_to_cluster(all_nodes, roller):\n nodes_data = []\n role_counter = {}\n # ctrl_counter = 0\n # compute_counter = 0\n LOG.info('Simple(random) node assign to cluster chosen')\n for node in all_nodes:\n if node['cluster'] is not None:\n LOG.debug('Skip reserved node: {0}{1}'.format(node['name'], node['id']))\n continue\n LOG.debug(\"Get free node: {0}\".format(node['name']))\n for node_label in roller.keys():\n if not roller[node_label].get('assigned_names'):\n # here we save assigned names for nodes\n # and use this for network interface configuration later\n roller[node_label]['assigned_names'] = []\n\n if role_counter.get(node_label) is None:\n # initialize counter for this role\n role_counter[node_label] = 0\n\n if role_counter[node_label] < roller[node_label]['count']:\n LOG.debug(\"Assign node with label {0}. \"\n \"Assigned with this label: {1} from {2}.\".format(\n node_label,\n role_counter[node_label],\n roller[node_label]['count']))\n\n node_name = check_for_name(node['mac'])\n node_data = {\n api_cluster_id: cluster_id,\n 'id': node['id'],\n 'pending_addition': True,\n 'pending_roles': roller[node_label]['roles'],\n 'name': node_name,\n }\n roller[node_label]['assigned_names'].append(node_name)\n role_counter[node_label] += 1\n LOG.info('Add node {0} new name: {1}, roles: {2}'.format(\n node['name'],\n node_name,\n roller[node_label]['roles'],\n ))\n nodes_data.append(node_data)\n # break to the next nailgun node\n break\n return nodes_data", "def create_subnet_pool(self, **attrs):\n return self._create(_subnet_pool.SubnetPool, **attrs)", "def post_get_azure_node_pool(\n self, response: azure_resources.AzureNodePool\n ) -> azure_resources.AzureNodePool:\n return response", "def get_node_pool(self, name):\n if not name in self.node_pools:\n raise KeyError(\"Cannot find node pool {}, available:\\n{}\"\n .format(name, ',\\n'.join(self.get_node_pools())))\n return self.node_pools[name]", "def __init__(__self__,\n resource_name: str,\n args: KubernetesClusterNodePoolArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_cluster(self, **attrs):\n return self._create(\n _cluster.Cluster, prepend_key=False, **attrs\n )", "def make_pooled_cluster(self, name=None, minutes_ago=0,\n provision=True, **kwargs):\n runner = EMRJobRunner(pool_clusters=True,\n pool_name=name,\n **kwargs)\n cluster_id = runner.make_persistent_cluster()\n mock_cluster = self.mock_emr_clusters[cluster_id]\n\n # poor man's version of simulating cluster progress\n mock_cluster['Status']['State'] = 'WAITING'\n mock_cluster['Status']['Timeline']['CreationDateTime'] = (\n _boto3_now() - timedelta(minutes=minutes_ago))\n mock_cluster['MasterPublicDnsName'] = 'mockmaster'\n\n # instance fleets cares about provisioned instances\n if provision:\n if mock_cluster['InstanceCollectionType'] == 'INSTANCE_GROUP':\n for ig in mock_cluster['_InstanceGroups']:\n ig['RunningInstanceCount'] = ig['RequestedInstanceCount']\n elif mock_cluster['InstanceCollectionType'] == 'INSTANCE_FLEET':\n for fleet in mock_cluster['_InstanceFleets']:\n fleet['ProvisionedOnDemandCapacity'] = fleet[\n 'TargetOnDemandCapacity']\n fleet['ProvisionedSpotCapacity'] = fleet[\n 'TargetSpotCapacity']\n\n return runner, cluster_id", "def create_pool(self, context, pool, driver_name):\n arg_dict = {'context': context,\n 'pool': pool,\n 'driver_name': driver_name,\n }\n LOG.info(_LI(\"Received request 'Create Pool' for Pool:%(pool_id)s \"),\n {'pool_id': pool['id']})\n self._send_event(lb_constants.EVENT_CREATE_POOL, arg_dict,\n serialize=True, binding_key=pool['id'],\n key=pool['id'])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deletes a node pool from a cluster.
def delete_node_pool( self, project_id, zone, cluster_id, node_pool_id, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "delete_node_pool" not in self._inner_api_calls: self._inner_api_calls[ "delete_node_pool" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_node_pool, default_retry=self._method_configs["DeleteNodePool"].retry, default_timeout=self._method_configs["DeleteNodePool"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.DeleteNodePoolRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, node_pool_id=node_pool_id, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["delete_node_pool"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def delete_pool(self, context, pool):\n self._clear_loadbalancer_instance(pool['tenant_id'], pool['id'])", "def delete_pool(self, pool):\n if pool.get('loadbalancer_id', None):\n self._update_loadbalancer_instance_v2(pool['loadbalancer_id'])\n elif pool['vip_id']:\n self._clear_loadbalancer_instance(pool['tenant_id'], pool['id'])", "def ex_targetpool_remove_node(self, targetpool, node):\r\n if not hasattr(targetpool, 'name'):\r\n targetpool = self.ex_get_targetpool(targetpool)\r\n if not hasattr(node, 'name'):\r\n node = self.ex_get_node(node, 'all')\r\n\r\n targetpool_data = {'instances': [{'instance': node.extra['selfLink']}]}\r\n\r\n request = '/regions/%s/targetPools/%s/removeInstance' % (\r\n targetpool.region.name, targetpool.name)\r\n self.connection.async_request(request, method='POST',\r\n data=targetpool_data)\r\n # Remove node object from node list\r\n index = None\r\n for i, nd in enumerate(targetpool.nodes):\r\n if nd.name == node.name:\r\n index = i\r\n break\r\n if index is not None:\r\n targetpool.nodes.pop(index)\r\n return True", "def post_delete_azure_node_pool(\n self, response: operations_pb2.Operation\n ) -> operations_pb2.Operation:\n return response", "def delete_pool(self, context, pool, service):\n try:\n service_pending = self.lbdriver.delete_pool(pool, service)\n self.cache.put(service, self.agent_host)\n if service_pending:\n self.needs_resync = True\n except q_exception.NeutronException as exc:\n LOG.error(\"delete_pool: NeutronException: %s\" % exc.msg)\n except Exception as exc:\n LOG.error(\"delete_pool: Exception: %s\" % exc.message)", "def destroy_cluster(self, cluster_id):", "def remove_nodes_from_resource_pool(self, node_list):\n return 0, 'Succeeded in removing nodes ' + str(node_list) + \\\n ' from the cluster resource pool!'", "def delete_agent_pool(self, pool_id):\n route_values = {}\n if pool_id is not None:\n route_values['poolId'] = self._serialize.url('pool_id', pool_id, 'int')\n self._send(http_method='DELETE',\n location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be',\n version='6.0-preview.1',\n route_values=route_values)", "def test_delete_cloud_pool(self):\n pass", "def delete(self, *args, **kwargs):\n if self.virtual_machines.all():\n children = [vm.hostname for vm in self.virtual_machines.all()]\n raise RuntimeError('cannot delete cluster until its hosts have been reassigned: {}'.format(children))\n for member in self.members.all():\n self.members.remove(member)\n self.save()\n super(Cluster, self).delete(*args, **kwargs)", "def delete(self, request, *args, **kwargs):\n\n pool_ids = kwargs['pool_ids'].split(';')\n pools = facade.get_pool_by_ids(pool_ids)\n pool_serializer = serializers.PoolV3Serializer(pools, many=True)\n locks_list = facade.create_lock(pool_serializer.data)\n try:\n response = facade_pool_deploy.delete_real_pool(pool_serializer.data, request.user)\n except Exception, exception:\n log.error(exception)\n raise rest_exceptions.NetworkAPIException(exception)\n finally:\n facade.destroy_lock(locks_list)\n return Response(response)", "async def delete_pool_and_wallet(self, pool_name, wallet_name):\n print(Colors.HEADER + \"\\nDelete pool\\n\" + Colors.ENDC)\n try:\n await pool.delete_pool_ledger_config(pool_name)\n except IndyError as E:\n raise E\n\n print(Colors.HEADER + \"\\nDelete wallet\\n\" + Colors.ENDC)\n try:\n await wallet.delete_wallet(wallet_name, None)\n except IndyError as E:\n raise E\n await asyncio.sleep(0)", "def delete_subnet_pool(self, subnet_pool, ignore_missing=True):\n self._delete(\n _subnet_pool.SubnetPool, subnet_pool, ignore_missing=ignore_missing\n )", "def delete_node(self, node):\n # Make sure the node is in the pool\n delete_node = self.nodes.get(node, None)\n if delete_node is None:\n self._bad_node(node)\n\n self.nodes.pop(node)\n\n # Deal with the properties differences for versions 1.0 and 2.0\n if self.api_version in ['1.0', '2.0']:\n if delete_node['state'] == \"disabled\":\n self.properties['basic']['disabled'].pop(self.properties['basic']['disabled'].index(node))\n elif delete_node['state'] == \"draining\":\n self.properties['basic']['draining'].pop(self.properties['basic']['draining'].index(node))\n else:\n self.properties['basic']['nodes'].pop(self.properties['basic']['nodes'].index(node))\n else:\n for i in range(len(self.properties['basic']['nodes_table'])):\n if self.properties['basic']['nodes_table'][i]['node'] == node:\n self.properties['basic']['nodes_table'].pop(i)\n break\n\n self.update()\n\n return self.nodes_status()", "def cli_jira_cluster_node_delete(ctx, nodeid):\n jira_cluster_node_delete_path = f'rest/api/2/cluster/node/{nodeid}'\n click.echo('not yet implemented.')\n _res = ctx.obj['connect'].delete(jira_cluster_node_delete_path, headers=json_headers, auth=True)\n ctx.obj['writer'].out(_res)", "def cli_cosmosdb_mongocluster_delete(client,\r\n resource_group_name, cluster_name):\r\n\r\n return client.begin_delete(resource_group_name, cluster_name)", "def delete_pool(self, argu):\n\n if not argu:\n LOG.error(\"In delete_pool, it should not pass the None.\")\n\n # delete policy\n self._delete_policy(\n argu['listener_id'],\n argu['session_persistence_type'],\n argu['lb_algorithm']\n )\n\n cmd_apv_no_group = ADCDevice.no_group(argu['pool_id'])\n for base_rest_url in self.base_rest_urls:\n self.run_cli_extend(base_rest_url, cmd_apv_no_group)", "def delete_nodegroup(ctx, name, region, verbosity, node_name, kubeconf):\n ng = NodeGroup(node_name, ClusterInfo(name), region=region, kubeconf=kubeconf)\n ng.delete()", "def ex_destroy_targetpool(self, targetpool):\r\n request = '/regions/%s/targetPools/%s' % (targetpool.region.name,\r\n targetpool.name)\r\n\r\n self.connection.async_request(request, method='DELETE')\r\n return True" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the NodeManagement options for a node pool.
def set_node_pool_management( self, project_id, zone, cluster_id, node_pool_id, management, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_node_pool_management" not in self._inner_api_calls: self._inner_api_calls[ "set_node_pool_management" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_node_pool_management, default_retry=self._method_configs["SetNodePoolManagement"].retry, default_timeout=self._method_configs["SetNodePoolManagement"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetNodePoolManagementRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, node_pool_id=node_pool_id, management=management, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_node_pool_management"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def set_numa_optimization(self):\n\n self.params += \" -XX:+UseNUMA -XX:+UseParallelGC\"", "def set_node_pool_autoscaling(\n self,\n project_id,\n zone,\n cluster_id,\n node_pool_id,\n autoscaling,\n name=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"set_node_pool_autoscaling\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"set_node_pool_autoscaling\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.set_node_pool_autoscaling,\n default_retry=self._method_configs[\"SetNodePoolAutoscaling\"].retry,\n default_timeout=self._method_configs[\"SetNodePoolAutoscaling\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.SetNodePoolAutoscalingRequest(\n project_id=project_id,\n zone=zone,\n cluster_id=cluster_id,\n node_pool_id=node_pool_id,\n autoscaling=autoscaling,\n name=name,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"set_node_pool_autoscaling\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def set_nodes(self, nodes):\n self.nodes = nodes\n self.update_size()", "def setConfiguration(options):", "def set_pool(self, value):\n self.gui.spn_pool.setValue(value)", "def resize_nodepool(pool, new_size, project, zone, cluster):\n cmd = [\n 'gcloud', 'container', 'clusters', 'resize', cluster,\n '--zone', zone, '--project', project, '--node-pool', pool,\n '--num-nodes', str(new_size), '--quiet',\n ]\n print(cmd)\n subprocess.call(cmd)", "def test_set_nat_pool(self, patch4):\n patch4.return_value = None\n self.assertEqual(self.cgn.set_nat_pool(name='pool1', addr='30.0.0.0/24',\n addr_low='30.0.0.1', addr_high='30.0.0.100',\n port_low='1000', port_high='2000'), True)", "def test_set_nat_pool_port_range(self, patch5):\n patch5.return_value = None\n self.assertEqual(self.cgn.set_nat_pool(name='pool1', addr='30.0.0.0/24',\n addr_low='30.0.0.1', addr_high='30.0.0.100',\n port_low='1000', port_high='2000',\n port_range_random=1, snmp_trap_low='1',\n snmp_trap_high='10',\n host_addr_base='10.0.0.1'), True)", "def set_cpus(self, num_cpus):\n if self.batch:\n self.batch_settings.batch_args[\"cpus-per-task\"] = num_cpus\n for db in self:\n db.run_settings.set_cpus_per_task(num_cpus)", "def set_opts(self, **kwargs):\n raise NotImplementedError('Function not implemented in base class.')", "def configManagementNet(self):\n networks = self.handler.getNetworks(self.osid)\n for net in networks['networks']:\n if net['name'] == \"management\":\n net[\"ip_ranges\"] = [[\"10.20.2.5\", \"10.20.2.254\"]]\n net[\"cidr\"] = \"10.20.2.0/24\"\n net[\"meta\"][\"notation\"] = \"ip_ranges\"\n net[\"meta\"][\"use_gateway\"] = True\n net[\"gateway\"] = \"10.20.2.1\"\n net[\"vlan_start\"] = None\n self.handler.uploadNetworks(networks, self.osid)", "def assign_to_machine(self,\n process,\n *,\n node_pool_name,\n process_group=None,\n process_per_machine=1):\n node_pool_di = self.get_node_pool(node_pool_name)\n if process_group is None:\n process_group = process\n\n np_labels = node_pool_di[\"node_config\"][\"labels\"]\n cpu = np_labels[\"cpu\"]\n memory_m = np_labels[\"memory_m\"]\n\n memory_share = memory_m / (process_per_machine + 1)\n cpu_share = (cpu - 0.6) / process_per_machine\n cpu_share = float(int(cpu_share * 1000)) / 1000\n\n if 'gpu_count' in np_labels:\n num_gpus = np_labels['gpu_count']\n gpu_share = max(int(num_gpus / process_per_machine), 1)\n else:\n gpu_share = None\n\n self.assign_to_node_pool(process,\n node_pool_name=node_pool_name,\n process_group=process_group,\n memory_m=memory_share,\n cpu=cpu_share,\n gpu_count=gpu_share)", "def setMountdOnNode(self, node, mountdport):\r\n log.info(\"Setting mountd port on %s\", node.alias)\r\n cmd = self.mountdCommand(mountdport)\r\n log.debug(\"Doing node.ssh.execute: \" + cmd)\r\n node.ssh.execute(cmd)", "def test_set_nat_pool_negative(self):\n self.assertEqual(self.cgn.set_nat_pool(name='nat_pool', action=None), True)", "def set_config(self, *, configuration: NodeManagerConfig) -> None:", "def set_numa_memnode(conf, guest_node_id, host_cell_id):\n conf.cellid = guest_node_id\n conf.nodeset = [host_cell_id]\n conf.mode = \"strict\"", "def _configure_launcher(self) -> None:\n self._launcher = NUMASubprocessLauncher(\n self.cluster_environment,\n self.num_processes,\n self.num_nodes,\n self._numa_kwargs,\n )", "def pool_types(self, pool_types):\n\n self._pool_types = pool_types", "def _vmware_control_plane_node_config(\n self, args: parser_extensions.Namespace\n ):\n kwargs = {\n 'autoResizeConfig': self._vmware_auto_resize_config(args),\n 'cpus': flags.Get(args, 'cpus'),\n 'memory': flags.Get(args, 'memory'),\n 'replicas': flags.Get(args, 'replicas'),\n 'vsphereConfig': self._vmware_control_plane_vsphere_config(args),\n }\n if flags.IsSet(kwargs):\n return messages.VmwareControlPlaneNodeConfig(**kwargs)\n return None" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets labels on a cluster.
def set_labels( self, project_id, zone, cluster_id, resource_labels, label_fingerprint, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_labels" not in self._inner_api_calls: self._inner_api_calls[ "set_labels" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_labels, default_retry=self._method_configs["SetLabels"].retry, default_timeout=self._method_configs["SetLabels"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLabelsRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, resource_labels=resource_labels, label_fingerprint=label_fingerprint, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_labels"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def set_labels(self,labels):\n assert isinstance(labels,dict)\n assert all([isinstance(_k,str) for _k in labels.keys()])\n assert all([isinstance(_v,(int,list,tuple)) for _v in labels.values()])\n \n if self.label_map is None:\n self.label_map = dict()\n \n values = set() \n for _k, _v in labels.items():\n self.label_map[_k] = _v\n if isinstance(_v,int):\n values.add(_v)\n else:\n for _v2 in _v:\n values.add(_v2)\n \n for _k in set(self.centroids_.keys()).difference(values):\n self.label_map[str(_k)] = _k", "def set(self, cluster: str, namespace: str, type: str, labels: Labels) -> Labels:\n with self._lock:\n self._ns(cluster, namespace)[type] = labels\n return labels", "def set_labels(self, labels):\n self.labels = [(numpy.asarray(map(round, tuple(centroid)), dtype=numpy.float32), label) \n for centroid, label in labels]", "def set_labels(self,labels):\n assert isinstance(labels,dict)\n assert all([isinstance(_k,str) for _k in labels.keys()])\n assert all([isinstance(_v,(int,list,tuple)) for _v in labels.values()])\n \n if self.label_map is None:\n self.label_map = dict()\n \n values = set() \n for _k, _v in labels.items():\n self.label_map[_k] = _v\n if isinstance(_v,int):\n values.add(_v)\n else:\n for _v2 in _v:\n values.add(_v2)\n \n for _k in set(self.means_.keys()).difference(values):\n self.label_map[str(_k)] = _k", "def set_labels(self, labels):\n if len(labels) != self.dimension:\n print(\"Cannot label {} columns with the provided {} labels\".format(self.dimension), len(labels))\n else:\n self.labels = labels", "def set_labels(self,label):\n ax = self.figurecavas.figure.axes[0]\n ax.set_xlabel(label[0])\n ax.set_ylabel(label[1])", "def set_labels(self,label:dict):\n self.label_dict = label\n print(\"[INFO] Label dictionary : \",label)", "def set_labels(self, cls, label):\n annotated_bbox = dataclasses.replace(\n self.bbox, ui_class=cls, ui_label=label)\n self.bbox = annotated_bbox", "def generate_clusters(self):\n\n self.cluster_labels = None", "def set_cluster(self, data):\n cluster = Cluster(data['name'])\n for host in data['hosts']:\n cluster.add_host(**host)\n self._cluster = cluster", "def addClusterInfo(self, clusterLabels, centroids):\n\n uniqueLabels = np.unique(clusterLabels, return_counts=False)\n\n for label in uniqueLabels:\n for fidx in np.where(clusterLabels == label)[0]:\n self.fiberTree[fidx][str(label)] = label\n self.fiberTree['centroid'][label] = centroids[label]", "def assign_labels_to_clusters(model, data, labels_true):\n print(\"Assigning labels to clusters ...\", end=' ')\n start_time = time()\n\n labels_pred = model.predict_cluster(data)\n labelled_clusters = []\n for i in range(model.n_clusters):\n idx = np.where(labels_pred == i)[0]\n if len(idx) != 0:\n labels_freq = np.bincount(labels_true[idx])\n labelled_clusters.append(np.argmax(labels_freq))\n else:\n labelled_clusters.append(-1)\n print(\"Done in {:.2f} sec\".format(time() - start_time))\n\n return np.asarray(labelled_clusters)", "def cluster(self, model):\n labels = model.fit_predict(self.data)\n # Relabel clusters by rank\n if len(set(labels)) > 100:\n warn(\"Too many clusters: labels are not sorted\")\n return\n labels = [string.printable[lbl] for lbl in labels]\n label_counts = Counter(labels)\n ranks = {lbl: rank for rank, (lbl, _) in\n enumerate(label_counts.most_common())}\n self.labels = np.array([ranks[lbl] for lbl in labels])", "def set_cluster_for_vios(self, vios_id, cluster_id, cluster_name):\n # first update the vios_keyed dict\n if vios_id not in self.vios_keyed:\n self.vios_keyed[vios_id] = {}\n old = self.vios_keyed[vios_id]['last_cluster'] = None\n else:\n old = self.vios_keyed[vios_id]['last_cluster']\n if old != cluster_id:\n LOG.info(_(\"VIOS id %(id)s changed membership from cluster ID \"\n \"%(old)s to %(new)s with display name '%(name)s'.\") %\n dict(id=vios_id, old=old, new=cluster_id,\n name=cluster_name))\n self.vios_keyed[vios_id]['last_cluster'] = cluster_id\n # remove from the cluster side too\n if old is not None and old in self.cluster_keyed:\n if vios_id in self.cluster_keyed[old]['set']:\n self.cluster_keyed[old]['set'].remove(vios_id)\n ###################################################################\n # set_cluster_seq is the collection sequence number that the VIOS\n # last reported the cluster membership for.\n # trust_seq is reset to 0 when the VIOS reports as a member for a\n # Cluster feed request. It is bumped up independently during a VIOS\n # feed request (which occurs prior to cluster feed in a topology\n # collection) if the VIOS has a good state and rmc_state.\n # This means that if the VIOS has not reported as being a member of\n # the cluster for some number of iterations, but the trust_seq\n # has bumped up to some small number, then we can \"trust\" that\n # the vios really is not a member of the cluster, and not just\n # experiencing a connectivity problem due to the network or heavy\n # load.\n ###################################################################\n self.vios_keyed[vios_id]['trust_seq'] = 0 # reset\n if cluster_id is None:\n self.vios_keyed[vios_id]['set_cluster_seq'] =\\\n (self.sequence_num - 1) # set sequence in past for None case\n return # Don't need to update cluster_keyed dict.\n self.vios_keyed[vios_id]['set_cluster_seq'] = self.sequence_num\n\n # Now update the cluster_keyed dict\n if cluster_id not in self.cluster_keyed:\n entry = {'set_cluster_seq': self.sequence_num, 'set': set()}\n self.cluster_keyed[cluster_id] = entry\n else:\n entry = self.cluster_keyed[cluster_id]\n entry['display_name'] = cluster_name\n LOG.debug(\"Vios_id=%s, Vios_keyed after update=%s, Cluster entry \"\n \"before update for cluster %s: %s.\" %\n (vios_id, self.vios_keyed[vios_id], cluster_id, entry))\n if entry['set_cluster_seq'] != self.sequence_num:\n # new topology collection sequence - reset membership\n entry['set'] = set()\n entry['set'].add(vios_id)\n entry['set_cluster_seq'] = self.sequence_num\n LOG.debug(\"Reset %s cluster membership for sequence %d to %s.\" %\n (cluster_id, self.sequence_num, entry['set']))\n else:\n entry['set'].add(vios_id)\n LOG.debug(\"Add VIOS %s to cluster %s: %s.\" %\n (vios_id, cluster_name, entry['set']))", "def add_labels(self, labels):\n self.labels.update(labels)\n if self.parent:\n self.parent.add_labels(self.labels)", "def set_label(self, label):", "def set_my_labelset (self):\n self.acquire_capabilities ()\n self.set_my_label (flume.LABEL_I)\n self.set_my_label (flume.LABEL_S)", "def add(self, label, value):\n self.clusters[(label,)] = [value]", "def change_labels(plot_obj, labels):\n for text, label in zip(plot_obj.legend_.texts, labels):\n text.set_text(label)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Enables or disables the ABAC authorization mechanism on a cluster.
def set_legacy_abac( self, project_id, zone, cluster_id, enabled, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_legacy_abac" not in self._inner_api_calls: self._inner_api_calls[ "set_legacy_abac" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_legacy_abac, default_retry=self._method_configs["SetLegacyAbac"].retry, default_timeout=self._method_configs["SetLegacyAbac"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLegacyAbacRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, enabled=enabled, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_legacy_abac"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def enable_cluster_backup(self):\n mch_resource = ocp.OCP(\n kind=\"MultiClusterHub\",\n resource_name=constants.ACM_MULTICLUSTER_RESOURCE,\n namespace=constants.ACM_HUB_NAMESPACE,\n )\n mch_resource._has_phase = True\n resource_dict = mch_resource.get()\n for components in resource_dict[\"spec\"][\"overrides\"][\"components\"]:\n if components[\"name\"] == \"cluster-backup\":\n components[\"enabled\"] = True\n mch_resource_yaml = tempfile.NamedTemporaryFile(\n mode=\"w+\", prefix=\"mch\", delete=False\n )\n yaml_serialized = yaml.dump(resource_dict)\n mch_resource_yaml.write(yaml_serialized)\n mch_resource_yaml.flush()\n run_cmd(f\"oc apply -f {mch_resource_yaml.name}\")\n mch_resource.wait_for_phase(\"Running\")\n self.backup_pod_status_check()", "def run(ceph_cluster, **kw):\n\n nodes = ceph_cluster.get_nodes()\n # non-active mgr node\n node = [node for node in nodes if not node.role == \"mgr\"][0]\n\n # Enable dashboard plug-in in non-active mgr\n out = CephAdm(node).ceph.mgr.module(action=\"enable\", module=\"dashboard\")\n exp_error = kw.get(\"config\").get(\"error\")\n if exp_error not in out:\n CephmgrDashboardPluginError(\"Dashboard plug-in enable in non-active mgr\")\n return 0", "def setAccessControlList(acl):", "def cluster_level_enable(self, cluster_level_enable):\n\n self._cluster_level_enable = cluster_level_enable", "def cluster_auth_aws(deployment, project, cluster, zone, service_key):\n\n subprocess.check_call(['aws', 'eks', 'update-kubeconfig',\n '--name', cluster, '--region', zone])", "def add_acls(user: str, task: str, topic: str, zookeeper_endpoint: str, env_str=None):\n\n _add_role_acls(\"producer\", user, task, topic, zookeeper_endpoint, env_str)\n _add_role_acls(\"consumer --group=*\", user, task, topic, zookeeper_endpoint, env_str)", "def turnOnAC(self,frame,db):\r\n self.aircon.actuators[0].turnOn()\r\n frame.aCStateDisplayLabel.config(text=\"On\")\r\n frame.update()\r\n db.commit()", "def ex_set_control_access(self, node, control_access):\r\n xml = ET.Element('ControlAccessParams',\r\n {'xmlns': 'http://www.vmware.com/vcloud/v1.5'})\r\n shared_to_everyone = ET.SubElement(xml, 'IsSharedToEveryone')\r\n if control_access.everyone_access_level:\r\n shared_to_everyone.text = 'true'\r\n everyone_access_level = ET.SubElement(xml, 'EveryoneAccessLevel')\r\n everyone_access_level.text = control_access.everyone_access_level\r\n else:\r\n shared_to_everyone.text = 'false'\r\n\r\n # Set subjects\r\n if control_access.subjects:\r\n access_settings_elem = ET.SubElement(xml, 'AccessSettings')\r\n for subject in control_access.subjects:\r\n setting = ET.SubElement(access_settings_elem, 'AccessSetting')\r\n if subject.id:\r\n href = subject.id\r\n else:\r\n res = self.ex_query(type=subject.type, filter='name==' +\r\n subject.name)\r\n if not res:\r\n raise LibcloudError('Specified subject \"%s %s\" not found '\r\n % (subject.type, subject.name))\r\n href = res[0]['href']\r\n ET.SubElement(setting, 'Subject', {'href': href})\r\n ET.SubElement(setting, 'AccessLevel').text = subject.access_level\r\n\r\n headers = {\r\n 'Content-Type': 'application/vnd.vmware.vcloud.controlAccess+xml'\r\n }\r\n self.connection.request(\r\n '%s/action/controlAccess' % get_url_path(node.id),\r\n data=ET.tostring(xml),\r\n headers=headers,\r\n method='POST')", "def configure_services(cluster):\n services = cluster.get_all_services()\n\n for service in services:\n service_type = service.type\n if service_type == 'HDFS':\n print \"Configuring HDFS for Kerberos.\"\n service.update_config(\n {'hadoop_security_authentication': 'kerberos',\n 'hadoop_security_authorization': 'true'}\n )\n\n role_cfgs = service.get_all_role_config_groups()\n\n for role_cfg in role_cfgs:\n if role_cfg.roleType == 'DATANODE':\n role_cfg.update_config(\n {'dfs_datanode_port': '1004',\n 'dfs_datanode_http_port': '1006',\n 'dfs_datanode_data_dir_perm': '700'}\n )\n elif service_type == 'HBASE':\n print \"Configuring HBase for Kerberos.\"\n service.update_config(\n {'hbase_security_authentication': 'kerberos',\n 'hbase_security_authorization': 'true'}\n )\n elif service_type == 'ZOOKEEPER':\n print \"Configuring ZooKeeper for Kerberos.\"\n service.update_config(\n {'enableSecurity': 'true'}\n )\n elif service_type == 'SOLR':\n print \"Configuring Solr for Kerberos.\"\n service.update_config(\n {'solr_security_authentication': 'kerberos'}\n )\n elif service_type == 'KS_INDEXER':\n # API version 10 came out with CM 5.4, which is necessary to make this configuration\n # change.\n if API_CURRENT_VERSION >= 10:\n print \"Configuring KeyStoreIndexer for Kerberos.\"\n service.update_config(\n {'hbase_indexer_security_authentication': 'kerberos'}\n )\n elif service_type == 'HUE':\n kt_renewer_role = service.get_roles_by_type('KT_RENEWER')\n hue_server_role = service.get_roles_by_type('HUE_SERVER')\n\n if hue_server_role and not kt_renewer_role:\n print \"Configuring Hue for Kerberos.\"\n service.create_role('KT_RENEWER-1', 'KT_RENEWER',\n hue_server_role[0].hostRef.hostId)", "def cluster_auth(deployment):\n config = get_config(deployment)\n\n if 'cluster' in config:\n cluster = config['cluster']\n provider = cluster.get('provider')\n if provider == 'gcloud':\n cluster_auth_gcloud(\n deployment, **cluster['gcloud']\n )\n elif provider == 'aws':\n cluster_auth_aws(\n deployment, **cluster['aws']\n )\n else:\n raise ValueError(\n f'Unknown provider {provider} found in hubploy.yaml')", "def _authorization(self, args: parser_extensions.Namespace):\n kwargs = {\n 'adminUsers': self._cluster_users(args),\n }\n if flags.IsSet(kwargs):\n return messages.Authorization(**kwargs)\n return None", "def setup_class(cls):\n try:\n ccm_cluster = CCMCluster.load(tests.integration.path, AUTH_CLUSTER_NAME)\n log.debug(\"Found existing ccm test authentication cluster, removing\")\n ccm_cluster.remove()\n except Exception:\n log.debug(\"Can not load cluster %s .....\" % AUTH_CLUSTER_NAME)\n\n log.debug(\"Creating new ccm test authentication cluster\")\n if tests.integration.CASSANDRA_DIR:\n ccm_cluster = CCMCluster(tests.integration.path, AUTH_CLUSTER_NAME, cassandra_dir=tests.integration.CASSANDRA_DIR)\n else:\n ccm_cluster = CCMCluster(tests.integration.path, AUTH_CLUSTER_NAME, cassandra_version=tests.integration.CASSANDRA_VERSION)\n\n ccm_cluster.set_configuration_options({'start_native_transport': True})\n ccm_cluster.set_configuration_options({'authenticator': 'PasswordAuthenticator'})\n\n #\n # This method is called either with AuthenticationTests class or with AuthorizedAuthenticationTests class.\n # In the second case we enable CassandraAuthorizer\n #\n if cls.__name__ == 'AuthorizedAuthenticationTests':\n print \"Running tests with Cassandra Authorizer Enabled\"\n log.info(\"Running tests with Cassandra Authorizer Enabled\")\n ccm_cluster.set_configuration_options({'authorizer': 'CassandraAuthorizer'})\n else:\n print \"Running tests with Cassandra Authorizer Disabled\"\n log.info(\"Running tests with Cassandra Authorizer Disabled\")\n\n common.switch_cluster(tests.integration.path, AUTH_CLUSTER_NAME)\n ccm_cluster.populate(1)\n\n log.debug(\"Starting ccm test authentication cluster\")\n ccm_cluster.start(wait_for_binary_proto=True)\n\n if not wait_for_cassandra() or not try_connecting('cassandra', 'cassandra'):\n log.exception(\"Can not talk to cassandra\")\n raise Exception('Can not talk to cassandra')\n\n log.debug(\"Switched to AUTH_CLUSTER_NAME cluster\")\n cls.ccm_cluster = ccm_cluster\n\n cls.root_cluster = cls.cluster_as('cassandra', 'cassandra')\n cls.root_session = cls.root_cluster.connect()\n cls.create_user(cls.root_cluster, cls.test_user, cls.password)\n cls.create_user(cls.root_cluster, cls.test_other_user, cls.password)\n cls.create_user(cls.root_cluster, cls.suser, cls.password, su=True)", "def activate(self, account_name: str, config_name: str=\"default_config\"):\n config = get_config(config_name)\n accounts = get_accounts()\n token = accounts[\"accounts\"][account_name]\n config[\"cloud\"][\"agent\"] = token\n write_prefect_config(config)\n subprocess.run([\"prefect\", \"auth\", \"login\", \"--token\", f\"{token}\"])", "def manual_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n logger.info('Maintenance mode for node-{0}'.format(_id))\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm on\")\n\n logger.info('Wait a node-{0} offline status after turning on of'\n ' maintenance mode'.format(_id))\n err_msg = ('Node-{0} has not become offline after'\n 'turning on of maintenance mode'.format(_id))\n wait(\n lambda: not\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n logger.info('Check that node-{0} in maintenance mode after '\n 'switching'.format(_id))\n\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\".format(_ip))\n\n asserts.assert_true('True' in check_auto_mode(_ip),\n \"Maintenance mode is not switched on\")\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm off\")\n\n logger.info('Wait a node-{0} online status'.format(_id))\n err_msg = ('Node-{0} has not become online after'\n 'turning off maintenance mode'.format(_id))\n wait(\n lambda:\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])", "def supports_authorization_admin(self):\n return # boolean", "def setUserIsAdministrator(self, account, email, reason=None):\n EquipmentACLInfo.setAdministrator(account, self, email, reason)\n self._updateCalendarPermissions(account)", "def enable_auth(self, value):\n self._set_property('enable_auth', value)", "def enable_auth(workspace):\n\n server_config_filename = \"server_config.json\"\n\n cc_package = codechecker_package()\n original_auth_cfg = os.path.join(cc_package,\n 'config',\n server_config_filename)\n\n shutil.copy(original_auth_cfg, workspace)\n\n server_cfg_file = os.path.join(workspace,\n server_config_filename)\n\n scfg_dict = load_json(server_cfg_file, {})\n scfg_dict[\"authentication\"][\"enabled\"] = True\n scfg_dict[\"authentication\"][\"method_dictionary\"][\"enabled\"] = True\n scfg_dict[\"authentication\"][\"method_dictionary\"][\"auths\"] = \\\n [\"cc:test\", \"john:doe\", \"admin:admin123\", \"colon:my:password\",\n \"admin_group_user:admin123\", \"regex_admin:blah\",\n \"permission_view_user:pvu\"]\n scfg_dict[\"authentication\"][\"method_dictionary\"][\"groups\"] = \\\n {\"admin_group_user\": [\"admin_GROUP\"]}\n scfg_dict[\"authentication\"][\"regex_groups\"][\"enabled\"] = True\n\n with open(server_cfg_file, 'w',\n encoding=\"utf-8\", errors=\"ignore\") as scfg:\n json.dump(scfg_dict, scfg, indent=2, sort_keys=True)\n\n # Create a root user.\n root_file = os.path.join(workspace, 'root.user')\n with open(root_file, 'w',\n encoding='utf-8', errors='ignore') as rootf:\n rootf.write(f\"root:{sha256(b'root:root').hexdigest()}\")\n os.chmod(root_file, stat.S_IRUSR | stat.S_IWUSR)", "def negative_admin_bonding_in_lacp_mode(self):\n self.env.revert_snapshot(\"ready_with_3_slaves\")\n\n segment_type = NEUTRON_SEGMENT['tun']\n\n self.show_step(1)\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": segment_type,\n }\n )\n\n self.show_step(2)\n self.show_step(3)\n self.fuel_web.update_nodes(\n cluster_id, {\n 'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['cinder']\n }\n )\n\n self.show_step(4)\n nailgun_nodes = self.fuel_web.client.list_cluster_nodes(cluster_id)\n invalid_bond_conf = deepcopy(self.BOND_CONFIG)\n invalid_bond_conf[1]['mode'] = '802.3ad'\n assert_raises(\n HTTPError,\n self.fuel_web.update_node_networks,\n nailgun_nodes[0]['id'],\n interfaces_dict=deepcopy(self.INTERFACES),\n raw_data=invalid_bond_conf)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Completes master IP rotation.
def complete_i_p_rotation( self, project_id, zone, cluster_id, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "complete_i_p_rotation" not in self._inner_api_calls: self._inner_api_calls[ "complete_i_p_rotation" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.complete_i_p_rotation, default_retry=self._method_configs["CompleteIPRotation"].retry, default_timeout=self._method_configs["CompleteIPRotation"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.CompleteIPRotationRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, name=name ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["complete_i_p_rotation"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def reset_master(server):\n server.exec_stmt(\"RESET MASTER\")", "def _cmd_resync(self):\n self.ctx.awaiting_bridge = True", "def delete_last_transport_process(self):", "def switch_sync_finished(self):", "def exit(self):\n # force state of missing Supvisors instances\n self.context.end_synchro()\n # arbitrarily choice : master address is the 'lowest' address\n # among running addresses\n addresses = self.context.running_addresses()\n self.logger.info('working with boards {}'.format(addresses))\n self.context.master_address = min(addresses)", "def end(self) -> None:\n self.celery_executor.end()\n self.kubernetes_executor.end()", "def remove_master_node(self, host_ip):\n\t\tself.swarm_manager.remove_master_node(host_ip)", "def main():\n server_list = [server for server in servers.keys() if server != 'local']\n delete_all_arc_jobs(server_list=server_list)\n if 'local' in servers:\n delete_all_local_arc_jobs()", "def rotate(self):\n\t\tp = self.list.rotate(-1)\n\t\t# p = self.list.popleft() # pop the head\n\t\t# self.list.append(p) # put the head at the end\n\t\tq = self.list[0] # take the new head PCB\n\t\tq.status.type == 'ready' # change the type of new PCB to 'ready'", "def task_done(self):\n self.acknowledge()", "def finalize ( self, backup_distmap=True ):\n # disable finalize_at_exit first so that exceptions cannot trigger\n # _atexit_run()->this function\n #\n self.logger.info ( \"finalizing\" )\n self.finalize_at_exit = False\n\n self._cleanup()\n if self.distmap is not None:\n ## not necessary\n #self.distmap.remove_volatiles()\n if backup_distmap:\n self.distmap.backup_and_write ( force=False )\n else:\n self.distmap.write ( force=False )\n self.logger.debug ( \"finalize() done\" )", "def _cb_sync_done(self):\n self.syncing = False", "def ha_one_controller_backup_restore(self):\n self.env.revert_snapshot(\"deploy_ha_one_controller_flat\")\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n os_conn = os_actions.OpenStackActions(\n self.fuel_web.get_public_vip(cluster_id),\n 'novaSimpleFlat', 'novaSimpleFlat', 'novaSimpleFlat')\n self.fuel_web.assert_cluster_ready(\n os_conn, smiles_count=6, networks_count=1, timeout=300)\n self.fuel_web.backup_master(self.env.get_admin_remote())\n checkers.backup_check(self.env.get_admin_remote())\n\n self.fuel_web.update_nodes(\n cluster_id, {'slave-03': ['compute']}, True, False)\n\n assert_equal(\n 3, len(self.fuel_web.client.list_cluster_nodes(cluster_id)))\n\n self.fuel_web.restore_master(self.env.get_admin_remote())\n checkers.restore_check_sum(self.env.get_admin_remote())\n self.fuel_web.restore_check_nailgun_api(self.env.get_admin_remote())\n checkers.iptables_check(self.env.get_admin_remote())\n\n assert_equal(\n 2, len(self.fuel_web.client.list_cluster_nodes(cluster_id)))\n\n self.fuel_web.update_nodes(\n cluster_id, {'slave-03': ['compute']}, True, False)\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n self.fuel_web.run_ostf(\n cluster_id=cluster_id)\n\n self.env.make_snapshot(\"ha_one_controller_backup_restore\")", "def done(self):\n self.task_progress = 0 # task is done, clear progress\n self.ready = True # the mirror is ready to get the next task", "def end(self):\n self.remove_phases()\n self.unsuccessful_run()", "def _clean(self):\n print(\"Cleaning up...\", file=sys.stderr)\n self._kill_workers()\n if self._multinode and self._region_comm:\n print(\"Aborting MPI job...\", file=sys.stderr)\n self._region_comm.Abort(errorcode=1)", "def rotate(self, azi, alt):\r\n arg_str = p2e._base._util._convert_args_to_string(\"node.rotate\", self._node._eco_id, \r\n azi, alt)\r\n p2e._app.Exec(arg_str)", "def rejoin(self):\n #read the cash file and get the last_ips\n re_join_request_dict = { 'Type' : \"re_Join_req\", \n 'Process_id' : (str(os.getpid())).strip(), \n 'IP_address' : self.IP_ADDRESS,\n 'Timestamp' : (str(datetime.now().isoformat(timespec='seconds'))).strip(),\n 'Port' : str(self.PORT)\n }\n re_join_request_json = json.dumps(re_join_request_dict)\n re_join_data = (re_join_request_json).encode('utf-8')\n with open(\"ip.txt\", 'r', encoding = 'utf-8') as f:\n ip_list = f.readlines()\n ip_list = [x.strip() for x in ip_list]\n print(\"Send out rejoin\")\n for target_ip_address in ip_list:\n self.socket.sendto(re_join_data, (target_ip_address, self.PORT))", "def complete(self):\n self.state = 2\n self.finished = datetime.now()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Enables or disables Network Policy for a cluster.
def set_network_policy( self, project_id, zone, cluster_id, network_policy, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_network_policy" not in self._inner_api_calls: self._inner_api_calls[ "set_network_policy" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_network_policy, default_retry=self._method_configs["SetNetworkPolicy"].retry, default_timeout=self._method_configs["SetNetworkPolicy"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetNetworkPolicyRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, network_policy=network_policy, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_network_policy"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def enable_network_management(request):\n log('Enabling network management')\n _assign_role(request, StandardRole.NETWORK_MANAGER)", "def enable_network(self):\n if self._is_admin():\n completed = subprocess.run(args=['netsh', 'interface', 'set', 'interface', '\"Wi-Fi\"', 'enable'])\n print(\"Enable Wi-Fi \", completed.returncode)\n completed = subprocess.run(args=['netsh', 'interface', 'set', 'interface', '\"Ethernet\"', 'enable'])\n print(\"Enable Ethernet\", completed.returncode)\n else:\n # Re-run the program with admin rights\n ctypes.windll.shell32.ShellExecuteW(None, \"runas\", 'netsh', ' interface set interface \"Ethernet\" enable', None, 1)\n ctypes.windll.shell32.ShellExecuteW(None, \"runas\", 'netsh', ' interface set interface \"Wi-Fi\" enable', None, 1)", "def disable_network(self):\n if self._is_admin():\n completed = subprocess.run(args=['netsh', 'interface', 'set', 'interface', '\"Wi-Fi\"', 'disable'])\n print(\"Disable Wi-Fi \", completed.returncode)\n completed = subprocess.run(args=['netsh', 'interface', 'set', 'interface', '\"Ethernet\"', 'disable'])\n print(\"Disable Ethernet\", completed.returncode)\n else:\n # Re-run the program with admin rights\n ctypes.windll.shell32.ShellExecuteW(None, \"runas\", 'netsh', ' interface set interface \"Wi-Fi\" disable', None, 1)\n ctypes.windll.shell32.ShellExecuteW(None, \"runas\", 'netsh', ' interface set interface \"Ethernet\" disable', None, 1)", "def cluster_level_enable(self, cluster_level_enable):\n\n self._cluster_level_enable = cluster_level_enable", "def configure_control_network(module, network, task, msg):\n cli = pn_cli(module)\n cli += ' fabric-info format control-network '\n time.sleep(4)\n current_control_network = run_command(module, cli, task, msg).split()[1]\n\n if current_control_network != network:\n cli = pn_cli(module)\n cli += ' fabric-local-modify control-network %s ' % network\n run_command(module, cli, task, msg)", "def enable_cluster_backup(self):\n mch_resource = ocp.OCP(\n kind=\"MultiClusterHub\",\n resource_name=constants.ACM_MULTICLUSTER_RESOURCE,\n namespace=constants.ACM_HUB_NAMESPACE,\n )\n mch_resource._has_phase = True\n resource_dict = mch_resource.get()\n for components in resource_dict[\"spec\"][\"overrides\"][\"components\"]:\n if components[\"name\"] == \"cluster-backup\":\n components[\"enabled\"] = True\n mch_resource_yaml = tempfile.NamedTemporaryFile(\n mode=\"w+\", prefix=\"mch\", delete=False\n )\n yaml_serialized = yaml.dump(resource_dict)\n mch_resource_yaml.write(yaml_serialized)\n mch_resource_yaml.flush()\n run_cmd(f\"oc apply -f {mch_resource_yaml.name}\")\n mch_resource.wait_for_phase(\"Running\")\n self.backup_pod_status_check()", "def manual_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n logger.info('Maintenance mode for node-{0}'.format(_id))\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm on\")\n\n logger.info('Wait a node-{0} offline status after turning on of'\n ' maintenance mode'.format(_id))\n err_msg = ('Node-{0} has not become offline after'\n 'turning on of maintenance mode'.format(_id))\n wait(\n lambda: not\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n logger.info('Check that node-{0} in maintenance mode after '\n 'switching'.format(_id))\n\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\".format(_ip))\n\n asserts.assert_true('True' in check_auto_mode(_ip),\n \"Maintenance mode is not switched on\")\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm off\")\n\n logger.info('Wait a node-{0} online status'.format(_id))\n err_msg = ('Node-{0} has not become online after'\n 'turning off maintenance mode'.format(_id))\n wait(\n lambda:\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])", "def set_maintenance_policy(\n self,\n project_id,\n zone,\n cluster_id,\n maintenance_policy,\n name=None,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n # Wrap the transport method to add retry and timeout logic.\n if \"set_maintenance_policy\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"set_maintenance_policy\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.set_maintenance_policy,\n default_retry=self._method_configs[\"SetMaintenancePolicy\"].retry,\n default_timeout=self._method_configs[\"SetMaintenancePolicy\"].timeout,\n client_info=self._client_info,\n )\n\n request = cluster_service_pb2.SetMaintenancePolicyRequest(\n project_id=project_id,\n zone=zone,\n cluster_id=cluster_id,\n maintenance_policy=maintenance_policy,\n name=name,\n )\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"set_maintenance_policy\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )", "def _reinitialize_policy_network(self):\n with tf.device(self._train_device):\n self._policy_network = PolicyNetwork(self._embedding_size,\n self._policy_network_layers,\n self._num_actions)\n self._optimizer_policy = tf.keras.optimizers.Adam(\n learning_rate=self._learning_rate)\n self._loss_policy = tf.keras.losses.MeanSquaredError()", "def _set_node_enabled(self, node_name, is_enabled):\n node = self.nodes.get(node_name)\n if node:\n node.enabled = is_enabled", "def enable(self):\n hoomd.util.print_status_line();\n self.check_initialization();\n\n # check if we are already disabled\n if self.enabled:\n hoomd.context.msg.warning(\"Ignoring command to enable a compute that is already enabled\");\n return;\n\n hoomd.context.current.system.addCompute(self.cpp_compute, self.compute_name);\n self.enabled = True;", "async def _servertoservertoggle(self, ctx: commands.Context, clustername):\n async with self.config.guild(ctx.guild).clusters() as clusters:\n if \"servertoserver\" not in clusters[clustername].keys():\n clusters[clustername][\"servertoserver\"] = False\n return await ctx.send(\n f\"Server to server chat for {clustername.upper()} has been initialized as **Disabled**.\")\n if clusters[clustername][\"servertoserver\"] is False:\n clusters[clustername][\"servertoserver\"] = True\n return await ctx.send(f\"Server to server chat for {clustername.upper()} has been **Enabled**.\")\n if clusters[clustername][\"servertoserver\"] is True:\n clusters[clustername][\"servertoserver\"] = False\n return await ctx.send(f\"Server to server chat for {clustername.upper()} has been **Disabled**.\")", "def block_connectivity_gluster_nodes(request, storage):\n self = request.node.cls\n\n testflow.setup(\n \"Blocking the following gluster nodes %s\",\n self.disabled_nodes_ips\n )\n self.block_nodes(self.disabled_nodes_ips)", "def enable_private_networking(self):\n return self.act_on_droplets(type='enable_private_networking')", "def set_inter_node_communication_enabled(config, flag):\n # type: (dict, bool) -> None\n config['pool_specification']['inter_node_communication_enabled'] = flag", "def patch_resource(\n self, namespace: typing.Optional[\"str\"] = None\n ) -> \"NetworkPolicyStatus\":\n names = [\"patch_namespaced_network_policy\", \"patch_network_policy\"]\n\n response = _kube_api.execute(\n action=\"patch\",\n resource=self,\n names=names,\n namespace=namespace,\n api_client=None,\n api_args={\"body\": self.to_dict(), \"name\": self.metadata.name},\n )\n\n output = NetworkPolicyStatus()\n if response is not None:\n output.from_dict(_kube_api.to_kuber_dict(response.status))\n return output", "def update_sdn_enabled(self, context):\n return self.call(context,\n self.make_msg('update_sdn_enabled'))", "def _upgrade_policy(self, args: parser_extensions.Namespace):\n if '--upgrade-control-plane' not in args.GetSpecifiedArgNames():\n return None\n\n return messages.VmwareClusterUpgradePolicy(\n controlPlaneOnly=args.upgrade_control_plane,\n )", "def negative_auto_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n logger.info('Disable UMM on node-{0}'.format(_id))\n\n change_config(_ip, umm=False, reboot_count=0)\n\n asserts.assert_false('True' in check_available_mode(_ip),\n \"Maintenance mode should not be available\")\n\n command = 'reboot --force >/dev/null & '\n\n logger.info('Unexpected reboot on node-{0}'\n .format(_id))\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=command)\n\n wait(lambda:\n not checkers.check_ping(self.env.get_admin_node_ip(),\n _ip),\n timeout=60 * 10)\n\n # Node don't have enough time for set offline status\n # after reboot --force\n # Just waiting\n\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\"\n \"\".format(_ip))\n logger.info('Wait a node-{0} online status after unexpected '\n 'reboot'.format(_id))\n\n self.fuel_web.wait_nodes_get_online_state([dregular_ctrl])\n\n logger.info('Check that node-{0} not in maintenance mode after'\n ' unexpected reboot'.format(_id))\n\n wait(lambda: tcp_ping(_ip, 22),\n timeout=60 * 10,\n timeout_msg='Node {} still is not available by SSH'.format(\n dregular_ctrl.name))\n\n asserts.assert_false('True' in check_auto_mode(_ip),\n \"Maintenance mode should not switched\")\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the maintenance policy for a cluster.
def set_maintenance_policy( self, project_id, zone, cluster_id, maintenance_policy, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "set_maintenance_policy" not in self._inner_api_calls: self._inner_api_calls[ "set_maintenance_policy" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_maintenance_policy, default_retry=self._method_configs["SetMaintenancePolicy"].retry, default_timeout=self._method_configs["SetMaintenancePolicy"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetMaintenancePolicyRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, maintenance_policy=maintenance_policy, name=name, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["set_maintenance_policy"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def maintenance_mode(self, mode):\n service = self._fetch_service_config(self.id)\n old_service = service.copy() # in case anything fails for rollback\n\n try:\n service['metadata']['annotations']['router.deis.io/maintenance'] = str(mode).lower()\n self._scheduler.svc.update(self.id, self.id, data=service)\n except KubeException as e:\n self._scheduler.svc.update(self.id, self.id, data=old_service)\n raise ServiceUnavailable(str(e)) from e", "def maintenance_mode(self, maintenance_mode):\n if maintenance_mode is None:\n raise ValueError(\"Invalid value for `maintenance_mode`, must not be `None`\")\n\n self._maintenance_mode = maintenance_mode", "def manual_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n logger.info('Maintenance mode for node-{0}'.format(_id))\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm on\")\n\n logger.info('Wait a node-{0} offline status after turning on of'\n ' maintenance mode'.format(_id))\n err_msg = ('Node-{0} has not become offline after'\n 'turning on of maintenance mode'.format(_id))\n wait(\n lambda: not\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n logger.info('Check that node-{0} in maintenance mode after '\n 'switching'.format(_id))\n\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\".format(_ip))\n\n asserts.assert_true('True' in check_auto_mode(_ip),\n \"Maintenance mode is not switched on\")\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm off\")\n\n logger.info('Wait a node-{0} online status'.format(_id))\n err_msg = ('Node-{0} has not become online after'\n 'turning off maintenance mode'.format(_id))\n wait(\n lambda:\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])", "def auto_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n\n change_config(_ip, reboot_count=0)\n\n logger.info('Change UMM.CONF on node-{0}'\n .format(_id))\n\n logger.info('Unexpected reboot on node-{0}'\n .format(_id))\n\n command = 'reboot --force >/dev/null & '\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=command)\n\n wait(lambda:\n not checkers.check_ping(self.env.get_admin_node_ip(),\n _ip),\n timeout=60 * 10)\n\n logger.info('Wait a node-{0} offline status after unexpected '\n 'reboot'.format(_id))\n err_msg = ('Node-{0} has not become offline'\n ' after unexpected'.format(_id))\n wait(\n lambda: not\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n logger.info('Check that node-{0} in maintenance mode after'\n ' unexpected reboot'.format(_id))\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\".format(_ip))\n\n asserts.assert_true('True' in check_auto_mode(_ip),\n \"Maintenance mode is not switched on\")\n\n logger.info('turn off Maintenance mode')\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm off\")\n time.sleep(30)\n\n change_config(_ip)\n\n logger.info('Wait a node-{0} online status'\n .format(_id))\n err_msg = ('Node-{0} has not become online after'\n 'turning off maintenance mode'.format(_id))\n wait(\n lambda:\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'], timeout=70 * 10, timeout_msg=err_msg)\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n # Wait until all Openstack services are UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])", "def cic_maintenance_mode_env(self):\n self.check_run('cic_maintenance_mode')\n self.env.revert_snapshot(\"ready_with_5_slaves\")\n data = {\n 'ceilometer': True,\n }\n\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=settings.DEPLOYMENT_MODE_HA,\n settings=data)\n\n self.fuel_web.update_nodes(\n cluster_id,\n {\n 'slave-01': ['controller', 'mongo'],\n 'slave-02': ['controller', 'mongo'],\n 'slave-03': ['controller', 'mongo'],\n 'slave-04': ['compute', 'cinder'],\n 'slave-05': ['compute', 'cinder']\n }\n )\n\n # Cluster deploy\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n # Check network\n self.fuel_web.verify_network(cluster_id)\n\n # Run ostf\n self.fuel_web.run_ostf(cluster_id, test_sets=['ha', 'smoke', 'sanity'])\n\n self.env.make_snapshot(\"cic_maintenance_mode\", is_make=True)", "def cluster_modify_scheduler_mutation(self, data):\n path = \"/ws/v1/cluster/scheduler-conf\"\n\n return self.request(path, \"PUT\", json=data)", "def modify_cluster(ClusterId=None, StepConcurrencyLevel=None):\n pass", "def cluster_modify_scheduler_conf_mutation(self, data):\n path = '/ws/v1/cluster/scheduler-conf'\n\n return self.request(path, 'PUT', json=data)", "def negative_auto_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n logger.info('Disable UMM on node-{0}'.format(_id))\n\n change_config(_ip, umm=False, reboot_count=0)\n\n asserts.assert_false('True' in check_available_mode(_ip),\n \"Maintenance mode should not be available\")\n\n command = 'reboot --force >/dev/null & '\n\n logger.info('Unexpected reboot on node-{0}'\n .format(_id))\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=command)\n\n wait(lambda:\n not checkers.check_ping(self.env.get_admin_node_ip(),\n _ip),\n timeout=60 * 10)\n\n # Node don't have enough time for set offline status\n # after reboot --force\n # Just waiting\n\n asserts.assert_true(\n checkers.check_ping(self.env.get_admin_node_ip(),\n _ip,\n deadline=600),\n \"Host {0} is not reachable by ping during 600 sec\"\n \"\".format(_ip))\n logger.info('Wait a node-{0} online status after unexpected '\n 'reboot'.format(_id))\n\n self.fuel_web.wait_nodes_get_online_state([dregular_ctrl])\n\n logger.info('Check that node-{0} not in maintenance mode after'\n ' unexpected reboot'.format(_id))\n\n wait(lambda: tcp_ping(_ip, 22),\n timeout=60 * 10,\n timeout_msg='Node {} still is not available by SSH'.format(\n dregular_ctrl.name))\n\n asserts.assert_false('True' in check_auto_mode(_ip),\n \"Maintenance mode should not switched\")\n\n # Wait until MySQL Galera is UP on some controller\n self.fuel_web.wait_mysql_galera_is_up(\n [dregular_ctrl.name])\n\n # Wait until Cinder services UP on a controller\n self.fuel_web.wait_cinder_is_up(\n [dregular_ctrl.name])\n\n # Wait until RabbitMQ cluster is UP\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['ha'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'RabbitMQ availability')),\n timeout=1500)\n logger.info('RabbitMQ cluster is available')\n\n wait_pass(lambda:\n self.fuel_web.run_single_ostf_test(\n cluster_id, test_sets=['sanity'],\n test_name=ostf_test_mapping.OSTF_TEST_MAPPING.get(\n 'Check that required services are running')),\n timeout=1500)\n logger.info(\"Required services are running\")\n\n try:\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 600 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(600)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['smoke', 'sanity', 'ha'])", "def negative_manual_cic_maintenance_mode(self):\n self.env.revert_snapshot('cic_maintenance_mode')\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n # Select a non-primary controller\n regular_ctrl = self.fuel_web.get_nailgun_node_by_name(\"slave-02\")\n dregular_ctrl = self.fuel_web.get_devops_node_by_nailgun_node(\n regular_ctrl)\n _ip = regular_ctrl['ip']\n _id = regular_ctrl['id']\n\n asserts.assert_true('True' in check_available_mode(_ip),\n \"Maintenance mode is not available\")\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm disable\")\n\n asserts.assert_false('True' in check_available_mode(_ip),\n \"Maintenance mode should not be available\")\n\n logger.info('Try to execute maintenance mode '\n 'for node-{0}'.format(_id))\n\n self.ssh_manager.execute_on_remote(\n ip=_ip,\n cmd=\"umm on\",\n assert_ec_equal=[1])\n\n # If we don't disable maintenance mode,\n # the node would have gone to reboot, so we just expect\n time.sleep(30)\n asserts.assert_true(\n self.fuel_web.get_nailgun_node_by_devops_node(dregular_ctrl)\n ['online'],\n 'Node-{0} should be online after command \"umm on\"'.format(_id))\n\n try:\n self.fuel_web.run_ostf(cluster_id, test_sets=['ha', 'smoke',\n 'sanity'])\n except AssertionError:\n logger.debug(\"Test failed from first probe,\"\n \" we sleep 300 second try one more time\"\n \" and if it fails again - test will fails \")\n time.sleep(300)\n self.fuel_web.run_ostf(cluster_id, test_sets=['ha', 'smoke',\n 'sanity'])", "def put_managed_scaling_policy(ClusterId=None, ManagedScalingPolicy=None):\n pass", "def modify_db_cluster(self, DBClusterIdentifier: str, NewDBClusterIdentifier: str = None, ApplyImmediately: bool = None, BackupRetentionPeriod: int = None, DBClusterParameterGroupName: str = None, VpcSecurityGroupIds: List = None, Port: int = None, MasterUserPassword: str = None, PreferredBackupWindow: str = None, PreferredMaintenanceWindow: str = None, CloudwatchLogsExportConfiguration: Dict = None, EngineVersion: str = None) -> Dict:\n pass", "def set_host_into_maintenance_mode(session, host_mor):\n\n try:\n task_ref = session._call_method(\n session._get_vim(), \"EnterMaintenanceMode_Task\", host_mor,\n timeout=0, evacuatePoweredOffVms=False)\n session.wait_for_task(task_ref)\n except Exception as e:\n LOG.exception(_LE(\"%s\"), e)\n raise Exception(e)", "def set_machine_maintenance_state(\n self, name_or_id, state=True, reason=None\n ):\n if state:\n self.baremetal.set_node_maintenance(name_or_id, reason)\n else:\n self.baremetal.unset_node_maintenance(name_or_id)", "def cluster_update_reservation(self, data):\n path = \"/ws/v1/cluster/reservation/update\"\n\n return self.request(path, \"POST\", json=data)", "def reset_cluster_after_upgrade_neutron_ceph(self):\n self.env.revert_snapshot('upgrade_master_neutron_ceph')\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.fuel_web.stop_reset_env_wait(cluster_id)\n\n self.env.bootstrap_nodes(\n self.env.d_env.nodes().slaves[6:7])\n\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-02': ['controller'],\n 'slave-03': ['controller'],\n 'slave-05': ['compute', 'ceph-osd'],\n 'slave-06': ['compute', 'ceph-osd']\n }, False, True\n )\n self.fuel_web.update_nodes(\n cluster_id,\n {'slave-07': ['controller']}\n )\n\n self.fuel_web.run_network_verify(cluster_id)\n self.fuel_web.deploy_cluster_wait(cluster_id, check_services=False)\n self.fuel_web.run_ostf(cluster_id,\n test_sets=['ha', 'sanity', 'smoke'],\n should_fail=1)", "def set_cluster_for_vios(self, vios_id, cluster_id, cluster_name):\n # first update the vios_keyed dict\n if vios_id not in self.vios_keyed:\n self.vios_keyed[vios_id] = {}\n old = self.vios_keyed[vios_id]['last_cluster'] = None\n else:\n old = self.vios_keyed[vios_id]['last_cluster']\n if old != cluster_id:\n LOG.info(_(\"VIOS id %(id)s changed membership from cluster ID \"\n \"%(old)s to %(new)s with display name '%(name)s'.\") %\n dict(id=vios_id, old=old, new=cluster_id,\n name=cluster_name))\n self.vios_keyed[vios_id]['last_cluster'] = cluster_id\n # remove from the cluster side too\n if old is not None and old in self.cluster_keyed:\n if vios_id in self.cluster_keyed[old]['set']:\n self.cluster_keyed[old]['set'].remove(vios_id)\n ###################################################################\n # set_cluster_seq is the collection sequence number that the VIOS\n # last reported the cluster membership for.\n # trust_seq is reset to 0 when the VIOS reports as a member for a\n # Cluster feed request. It is bumped up independently during a VIOS\n # feed request (which occurs prior to cluster feed in a topology\n # collection) if the VIOS has a good state and rmc_state.\n # This means that if the VIOS has not reported as being a member of\n # the cluster for some number of iterations, but the trust_seq\n # has bumped up to some small number, then we can \"trust\" that\n # the vios really is not a member of the cluster, and not just\n # experiencing a connectivity problem due to the network or heavy\n # load.\n ###################################################################\n self.vios_keyed[vios_id]['trust_seq'] = 0 # reset\n if cluster_id is None:\n self.vios_keyed[vios_id]['set_cluster_seq'] =\\\n (self.sequence_num - 1) # set sequence in past for None case\n return # Don't need to update cluster_keyed dict.\n self.vios_keyed[vios_id]['set_cluster_seq'] = self.sequence_num\n\n # Now update the cluster_keyed dict\n if cluster_id not in self.cluster_keyed:\n entry = {'set_cluster_seq': self.sequence_num, 'set': set()}\n self.cluster_keyed[cluster_id] = entry\n else:\n entry = self.cluster_keyed[cluster_id]\n entry['display_name'] = cluster_name\n LOG.debug(\"Vios_id=%s, Vios_keyed after update=%s, Cluster entry \"\n \"before update for cluster %s: %s.\" %\n (vios_id, self.vios_keyed[vios_id], cluster_id, entry))\n if entry['set_cluster_seq'] != self.sequence_num:\n # new topology collection sequence - reset membership\n entry['set'] = set()\n entry['set'].add(vios_id)\n entry['set_cluster_seq'] = self.sequence_num\n LOG.debug(\"Reset %s cluster membership for sequence %d to %s.\" %\n (cluster_id, self.sequence_num, entry['set']))\n else:\n entry['set'].add(vios_id)\n LOG.debug(\"Add VIOS %s to cluster %s: %s.\" %\n (vios_id, cluster_name, entry['set']))", "def cluster_update_reservation(self, data):\n path = '/ws/v1/cluster/reservation/update'\n\n return self.request(path, 'POST', json=data)", "def maintenance(message):\n status_set(WorkloadState.MAINTENANCE, message)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Lists subnetworks that are usable for creating clusters in a project.
def list_usable_subnetworks( self, parent=None, filter_=None, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "list_usable_subnetworks" not in self._inner_api_calls: self._inner_api_calls[ "list_usable_subnetworks" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_usable_subnetworks, default_retry=self._method_configs["ListUsableSubnetworks"].retry, default_timeout=self._method_configs["ListUsableSubnetworks"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListUsableSubnetworksRequest( parent=parent, filter=filter_, page_size=page_size ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_usable_subnetworks"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="subnetworks", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
[ "def ex_list_networks(self):\r\n list_networks = []\r\n request = '/global/networks'\r\n response = self.connection.request(request, method='GET').object\r\n list_networks = [self._to_network(n) for n in\r\n response.get('items', [])]\r\n return list_networks", "def ex_list_networks(self):\r\n response = self.connection.request(self._networks_url_prefix).object\r\n return self._to_networks(response)", "def list_networks(self):\n return self._get(\"network\", box=BoxList)", "def get_network_list(self, account):\n pass", "def findNatsubNetwork():\n ipsubnet = \"192.168.\"\n i = 10\n while True:\n cmdstatus, cmdoutput = commands.getstatusoutput(\"/sbin/ifconfig -a | /bin/grep -w inet | /bin/awk -F' ' '{print $2}' | grep '%s%s' \" % (ipsubnet.replace('.', '\\.'), str(i) + '\\.'))\n if cmdstatus:\n break\n else:\n i += 2\n return [ipsubnet + str(i) + sub for sub in [\".1\", \".2\", \".254\" ]]", "def list_networks(request):\n cloud_id = request.matchdict['cloud']\n auth_context = auth_context_from_request(request)\n auth_context.check_perm(\"cloud\", \"read\", cloud_id)\n\n try:\n Cloud.objects.get(owner=auth_context.owner, id=cloud_id)\n except me.DoesNotExist:\n raise CloudNotFoundError\n\n networks = methods.list_networks(auth_context.owner, cloud_id)\n\n return networks", "def cloud_networks(self):\r\n return self._get_client(\"network\")", "def _get_virtual_networks(self) -> List[dict]:\n print('Getting virtual networks...')\n\n return self._run_az([\n 'network', 'vnet', 'list',\n '--resource-group', self._selected_resource_group['name']\n ])", "def test_vmware_service_resources_networks_get(self):\n pass", "def _get_subnets(self) -> List[dict]:\n print('Getting subnets...')\n\n return self._run_az([\n 'network', 'vnet', 'subnet', 'list',\n '--resource-group', self._selected_resource_group['name'],\n '--vnet-name', self._selected_virtual_network['name']\n ])", "def networks(self) -> pulumi.Input[List[pulumi.Input['ManagedZonePrivateVisibilityConfigNetworkArgs']]]:\n return pulumi.get(self, \"networks\")", "def ls(manager: WebManager):\n manager.sanitize()\n click.echo(tabulate(\n [\n (n.id, n.name, n.version, n.report.user, n.report.public, n.report.number_nodes, n.report.number_edges)\n for n in manager.list_networks()\n ],\n headers=['id', 'name', 'version', 'owner', 'public', 'nodes', 'edges'],\n ))", "def getNetworks(self):\n return self.base.get(\"networks\", [])", "def list_networks(retrieve_all=True, **_params):\n return IMPL.list_networks(retrieve_all, **_params)", "def _list_networks():\n output = core.run(\"virsh net-list --all\")\n networks = {}\n\n # Take the header off and normalize whitespace.\n net_lines = [n.strip() for n in output.splitlines()[2:]]\n for line in net_lines:\n if not line:\n continue\n name, state, auto = line.split()\n networks[name] = state == \"active\"\n return networks", "def netns_list(self):\n return self._netns_list", "def get_subnet_list(self, environment):\n if environment != 'uncategorized':\n subnets_with_instances = self.aws_moduleobj.get_information(environment, subnets_with_instances='true')\n subnet_list = []\n for subnet, stack_list in subnets_with_instances.iteritems():\n for attribute, attr_details in self.module_config_data['stack_attributes'].iteritems():\n if attr_details['stack'] == 'all' or set(attr_details['stack']).issubset(set(stack_list)):\n if subnet not in subnet_list: subnet_list.append(subnet)\n return subnet_list", "def subnetwork(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"subnetwork\")", "def _extract_networks(self, compute):\r\n networks = list()\r\n\r\n network_list = compute.find('NETWORK')\r\n for element in network_list.findall('NIC'):\r\n networks.append(\r\n OpenNebulaNetwork(id=element.attrib.get('network', None),\r\n name=None,\r\n address=element.attrib.get('ip', None),\r\n size=1,\r\n driver=self.connection.driver))\r\n\r\n return networks" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
demo a selection process add args for Menu class timeout min sels 1 max sels None cycle how many times to choose if not chosen redo option salesExamples could be used here for menu and Nestedcit app
def select(self): # read 1 json file, 1 quiz from take_quiz() the_list = ['aaa', 'bbb', 'ccc', 'ddd'] # timeout cycle limit men = menu.Menu(cycle=10, limit=0, timeout=4) selections = men.select_from_menu(the_list, "select one or more") print(selections) men = menu.Menu(cycle=5, timeout=4, limit=1) selections = men.select_from_menu(the_list, "select only one item") print(selections) selections = men.select_from_menu(the_list, "select only one item with default", default='default') print(selections)
[ "def targetMenu()->None:\n print(\"\\nEscoja el rango de edad\")\n print(\"*******************************************\")\n print(\"0. 0-10 \")\n print(\"1. 11-20\")\n print(\"2. 21-30\")\n print(\"3. 31-40\")\n print(\"4. 41-50\")\n print(\"5. 51-60\")\n print(\"6. 60+\")\n print(\"*******************************************\")", "def main_menu(self):\n \n try:\n self.clr_scr()\n print(\"********************WELCOME TO LIBRARY APPLICATION****************************\")\n print(\"\\n\\n\")\n print(\"\"\"\\nThis is the main menu .\n 1).Add book\n 2).Remove book\n 3).Issue book\n 4).Return book\n 5).Inquiry \n 6).Details of students who have fine.\n 7).Exit\n You have to enter choice from 1/2/3/4/5/6/7\"\"\") # inquiry by college ID.\n print(\"\\n\\n\")\n choice=int(input(\"Enter choice from 1/2/3/4/5/6/7:\\t\\t\"))\n if choice in [1,2,3,4,5,6,7]:\n if choice==1:\n return self.add_book()\n elif choice==2:\n return self.remove_book()\n elif choice==3:\n return self.issue_book()\n elif choice==7:\n return self.exit()\n elif choice==5:\n return self.inquiry()\n elif choice==6:\n return self.fine()\n else:\n return self.return_book()\n else:\n print(\"\\n\\n\")\n print(\"WARNING------->>>>>>>> Enter choice from 1/2/3/4/5/6/7\")\n time.sleep(2)\n return self.main_menu()\n \n except Exception as msg:\n print(\"ERROR------>>>>>>\",msg)", "def show_menu(selected_test_def_ID):\n\n if selected_test_def_ID>0 :\n print(\"\\nCurrently selected test Definition ID: \",selected_test_def_ID)\n else:\n print(\"\\nCurrently selected test Definition ID: (none)\")\n print(\"1: select Test Definition ID\")\n print(\"2: view currently selected Test Definition details\")\n print(\"3: start an execution of currently selected Test Definition\")\n print(\"4: exit\")", "def test_can_select_top_menu(self):\n first_menu_item = self.dlg['menuStrip1'].children()[0]\n point = first_menu_item.rectangle().mid_point()\n child_from_point = self.dlg.from_point(point.x, point.y + 20)\n self.assertEqual(child_from_point.element_info.name, 'Form1')\n self.dlg.menu_select('tem1')\n time.sleep(0.1)\n child_from_point = self.dlg.from_point(point.x, point.y + 20)\n self.assertEqual(child_from_point.element_info.name, 'tem1DropDown')", "def goto_list_select():\n menu.is_menu(press=[\"KEY_UP\"],\n path=amazon_path_template_is_menu +\"search/goto_list_select/\",\n region_frame=stbt.Region(x=27, y=98, width=266, height=10),\n timeout=2,\n timeout_is_menu=20)", "def menu(self):\n #Database menu options\n menu = {'1':self.select_all,'2':self.add_student,'3':self.update_student,'4':self.delete_student,'5':self.search,'0':self.close_app}\n while not self.user_exit: #while the user has not closed the application\n print(\"\\n\\n\")\n #print menu\n selection = input(\"Please select one of the following:\\n\\tDisplay all students: 1\\n\\tAdd a student to the database: 2\\n\\tUpdate an existing student's information: 3\\n\\tDelete a student: 4\\n\\tSearch for students with a given Major, GPA, or Faculty Advisor: 5\\n\\tExit the application: 0\\n\").strip()\n if not selection in menu:# if invalid input given\n print(\"Invalid input given, please try again\")\n continue\n #launch selected menu item\n menu[selection]()", "def main():\n menu()", "def test_get_option_expirations_realtime(self):\n pass", "def click_menu(self):\n pass", "def initialize(self):\n\n self.title('WA Crash Feature Mapper')\n self.minsize(700, 700)\n\n self.selection0 = tk.StringVar()\n self.selection0.set('Select year to view')\n options0 = ['2013', '2014', '2015', '2016', '2017']\n self.drop0 = tk.OptionMenu(self, self.selection0, *options0)\n self.drop0.pack()\n\n # self.button0\n tk.Button(\n self,\n text='Save year selection',\n command=lambda: enable_next_dropdown(self.drop1)\n ).pack()\n\n self.selection1 = tk.StringVar()\n self.selection1.set('Select county to view')\n options1 = ['Adams',\n 'Asotin',\n 'Benton',\n 'Chelan',\n 'Clallam',\n 'Clark',\n 'Columbia',\n 'Cowlitz',\n 'Douglas',\n 'Ferry',\n 'Franklin',\n 'Garfield',\n 'Grant',\n 'Grays Harbor',\n 'Island',\n 'Jefferson',\n 'King',\n 'Kitsap',\n 'Kittitas',\n 'Klickitat',\n 'Lewis',\n 'Lincoln',\n 'Mason',\n 'Okanogan',\n 'Pacific',\n 'Pend Oreille',\n 'Pierce',\n 'San Juan',\n 'Skagit',\n 'Skamania',\n 'Snohomish',\n 'Spokane',\n 'Stevens',\n 'Thurston',\n 'Wahkiakum',\n 'Walla Walla',\n 'Whatcom',\n 'Whitman',\n 'Yakima']\n self.drop1 = tk.OptionMenu(self, self.selection1, *options1)\n self.drop1.configure(state='disabled')\n self.drop1.pack()\n # self.button1\n tk.Button(\n self,\n text='Save county selection',\n command=lambda: enable_next_dropdown(self.drop2)\n ).pack()\n\n self.selection2 = tk.StringVar()\n self.selection2.set('Select group feature to view')\n options2 = [\n 'Weather',\n 'Surface Condition',\n 'Lighting Condition',\n 'Day of the week']\n self.drop2 = tk.OptionMenu(self, self.selection2, *options2)\n self.drop2.configure(state='disabled')\n self.drop2.pack()\n\n # self.button2 =\n tk.Button(\n self,\n text='Save group selection',\n command=lambda: self.set_options_init(self.drop3, self.selection3)\n ).pack()\n\n self.selection3 = tk.StringVar()\n self.selection3.set('Select subgroup feature to view')\n options3 = 'Select subgroup to view'\n self.drop3 = tk.OptionMenu(self, self.selection3, options3)\n self.drop3.configure(state='disabled')\n self.drop3.pack()\n # self.button3 =\n tk.Button(\n self,\n text='Save subgroup selection',\n command=lambda: set_map_options(self.drop4, self.selection4)\n ).pack()\n\n self.selection4 = tk.StringVar()\n self.selection4.set('Select type of map to view')\n options4 = ['Select type of map to view']\n self.drop4 = tk.OptionMenu(self, self.selection4, *options4)\n self.drop4.configure(state='disabled')\n self.drop4.pack()\n # show the final map based on selections\n\n # self.button4 =\n tk.Button(\n self,\n text='Show map', command=self.show_map\n ).pack()\n\n # self.button5 =\n tk.Button(\n self,\n text='Generate ML reports',\n command=lambda: generate_ml(self.selection0.get())\n ).pack()", "def execute_setting_options():\n while True:\n screen_reset()\n print_set_and_play_header() \n print_set_and_play_options(pieces)\n \n option = input(\">> Introduce the number of the command you want to execute: \")\n sleep(2)\n print()\n \n if option not in [\"1\", \"2\", \"3\"]:\n print(\"Remember, only numbers from 1 to 3 are accepted. Please, try again.\", end=\"\\n\\n\")\n sleep(3)\n continue\n \n if option == \"1\":\n print(\"You have selected the first option: Help command.\", end=\"\\n\\n\")\n sleep(2)\n print_set_and_play_help()\n sleep(3)\n input(\"Press any key to continue: \")\n \n elif option == \"2\":\n print(\"You have selected the second option: Back to game.\", end=\"\\n\\n\")\n sleep(3)\n break\n \n elif option == \"3\":\n print(\"You have selected the third option: Return to main menu.\", end=\"\\n\\n\")\n sleep(3)\n print(\">> The position will be lost. Are you sure do you want to return to menu?\", end=\"\\n\\n\")\n select = input(\"Select 'YES' to return to main menu and 'NO' otherwise: \")\n if select.upper() in [\"YES\", \"Y\"]:\n sleep(1)\n print()\n print(\"Returning to main menu...\")\n sleep(3)\n execute_main_menu()", "def Main():\n done = False\n while not done:\n choice = MainMenu()\n #Menu option to show model of our solar system\n if choice == 1:\n our_system()\n #Menu option to input parameters for different planets\n elif choice == 2:\n choose_system()\n\n elif choice == 3:\n break", "def venue_menu(self):\n print(u'\\n░▒▓█ ■ Select a visualization of the inter-venue semantic distances.\\n')\n print(u'░▒▓█ (1) 30 Venue Heatmap - top 30 venues')\n print(u'░▒▓█ (2) 30 Venue MDS - top 30 venues')\n print(u'░▒▓█ (3) 600 Venue MDS - top 600 venues')\n print(u'░▒▓█ (4) MDS - top 40 venues from each of top 5 categories')\n print(u'░▒▓█ (5) Temporal view. - top 2 venues from top 5 categories')\n print(u'░▒▓█ (6) ■ back to main menu ■\\n')\n pick_vis = get_input('> ', int, 1, 6)\n if pick_vis is 1:\n print u'\\n░░▒▒▓█ ■ You must close graph in order to continue using program.'\n self.driver.vis_heatmap(self.driver.dist_matrix, [ven.name for ven in self.driver.vens])\n self.venue_menu()\n elif pick_vis is 2:\n print u'\\n░░▒▒▓█ ■ You must close graph in order to continue using program.'\n self.driver.vis_MDS(self.driver.dist_matrix, [ven.name for ven in self.driver.vens])\n self.venue_menu()\n elif pick_vis is 3:\n print u'\\n░░▒▒▓█ ■ You must close graph in order to continue using program.'\n super_dist_matrix = self.driver.compare_venues(self.driver.vens)\n self.driver.vis_super_MDS(super_dist_matrix)\n self.venue_menu()\n elif pick_vis is 4:\n print u'\\n░░▒▒▓█ ■ You must close graph in order to continue using program.'\n self.driver.vis_super_cat_MDS(5)\n self.venue_menu()\n elif pick_vis is 5:\n cat_list, ven_list = sq.venues_from_top_n_categories(2, 5)\n for v in ven_list:\n self.driver.temporal_weekday_single_ven(v.id)\n self.venue_menu()\n elif pick_vis is 6:\n self.main_menu()", "def run(self):\n valid_inputs = ['1', '2', '3', 'r']\n selection = select_from_menu(valid_inputs)\n if selection == '1':\n self._answer_question()\n elif selection == '2':\n self._list_answers()\n elif selection == '3':\n self._try_adding_vote(self.question_data, self.user_id)", "def getMenu(self,parent):\r\n self.menu = tk.Menu(parent)\r\n self.filemenu = tk.Menu(self.menu ,tearoff = 0)\r\n new_gameOption = tk.Menu(self.filemenu ,tearoff = 0)\r\n new_gameOption.add_command(label=\"Camera Input\", command = lambda: self.launchGame_CameraInput())\r\n new_gameOption.add_command(label=\"Manual Input\", command = lambda: self.launchGame_ManualInput())\r\n self.filemenu.add_cascade(label = \"New Game Solver\", menu= new_gameOption)\r\n self.filemenu.add_separator()\r\n self.filemenu.add_command(label=\"Return\", command = lambda: self.controller.show_frame(\"StartPage\",\"300x\"+str(210*len(self.controller.games)+100)))\r\n self.filemenu.add_command(label=\"Exit\", command = parent.destroy)\r\n self.menu.add_cascade(label=\"File\",menu=self.filemenu)\r\n self.helpmenu = tk.Menu(self.menu ,tearoff = 0)\r\n message = \"This is a Sudoku Solver, you add a new game either by typing the numbers or by importing an image\"\r\n self.helpmenu.add_command(label=\"About\", command = lambda: mb.showinfo(\"About!\",message))\r\n self.menu.add_cascade(label=\"Help\",menu=self.helpmenu)\r\n return(self.menu)", "def option_activated(self, *args, **kwargs):\n commands.command_use_item(self.game, self.options[self.selected], self.director.main_game_scene)\n super().option_activated(*args, **kwargs)", "def sample_menu(**params):\n option1 = Option.objects.create(\n description='Corn pie, Salad and Dessert')\n option2 = Option.objects.create(\n description='Chicken Nugget Rice, Salad and Dessert')\n\n defaults = {\n 'name': \"Today's Menu\",\n 'date': datetime.date.today(),\n }\n defaults.update(params)\n\n menu = Menu.objects.create(**defaults)\n menu.options.add(option1)\n menu.options.add(option2)\n return menu", "def menu():\n print(\" Welcome!\")\n time.sleep(1)\n\n choice = input(\"\"\"\nPlease choose to either:\n 1: Enter your account details\n 2: Facial Recognition\n\n Please enter your choice: \"\"\")\n\n if choice == \"1\":\n credentials()\n elif choice == \"2\":\n faceRecog()\n else:\n print(\"\\n You must only select either 1 or 2\")\n print(\" Please try again\\n\")\n menu()", "def option_menu():\r\n\r\n validity = 1\r\n print(\" Hello and welcome\")\r\n print(\"How can i help you?\", \"\\n\"*4)\r\n\r\n for option_no in range(TOTAL_OPTIONS):\r\n print(f\"{option_no + 1} => {MENU_OPTIONS[option_no]}\")\r\n\r\n option_chosen = int(input(\"\\n\"*3+\"please type the option no:\"))\r\n\r\n if option_chosen > 6 or option_chosen <1:\r\n option_chosen = 6\r\n validity = 0\r\n\r\n return option_chosen, validity" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Plot velocities in the model
def plot_velocities(self, LAXIS, xbl, xbr, ybu, ybd, ilg): bconv = self.bconv tconv = self.tconv super_ad_i = self.super_ad_i super_ad_o = self.super_ad_o # check supported geometries if self.ig != 1 and self.ig != 2: print("ERROR(VelocitiesMLTturb.py):" + self.errorGeometry(self.ig)) sys.exit() # load x GRID grd1 = self.xzn0 # load DATA to plot plt1 = self.ux plt2 = self.vexp1 plt3 = self.vexp2 plt4 = self.vturb plt5 = self.vmlt_1 # vmlt_1 = fhh / (alphae * dd * fht_cp * tt_rms) - REFERENCE NEEDED plt6 = self.vmlt_2 # vmlt_2 = gg * betaT * (nabla - nabla_ad) * ((lbd ** 2.) / (8. * Hp)) - REFERENCE NEEDED plt7 = self.vmlt_3 # THIS IS FROM TYCHO's initial model plt8 = self.vrms # create FIGURE plt.figure(figsize=(7, 6)) # format AXIS, make sure it is exponential plt.gca().yaxis.get_major_formatter().set_powerlimits((0, 0)) # temporary hack plt4 = np.nan_to_num(plt4) plt5 = np.nan_to_num(plt5) plt6 = np.nan_to_num(plt6) plt7 = np.nan_to_num(plt7) plt8 = np.nan_to_num(plt8) # set plot boundaries to_plot = [plt4, plt5, plt6, plt7] self.set_plt_axis(LAXIS, xbl, xbr, ybu, ybd, to_plot) # plot DATA plt.title('velocities ' + str(self.nsdim) + "D") # plt.plot(grd1,plt1,color='brown',label = r'$\overline{u}_r$') # plt.plot(grd1,plt2,color='red',label = r'$\widetilde{u}_r$') # plt.plot(grd1,plt3,color='green',linestyle='--',label = r'$\overline{v}_{exp} = -\dot{M}/(4 \pi r^2 \rho)$') #plt.plot(grd1, plt4, color='blue', label=r"$u_{turb} = +\widetilde{u''_x u''_x}^{1/2}$") plt.plot(grd1, plt8, color='blue', label=r"$u_{rms}$") plt.plot(grd1,plt5,color='red',label = r'$u_{mlt}$') # plt.plot(grd1,plt6,color='g',label = r'$u_{MLT} 2$') # plt.plot(self.rr,plt7,color='brown',label = r'$u_{MLT} 3 inimod$') # convective boundary markers plt.axvline(bconv, linestyle='--', linewidth=0.7, color='k') plt.axvline(tconv, linestyle='--', linewidth=0.7, color='k') # convective boundary markers - only super-adiatic regions plt.axvline(super_ad_i, linestyle=':', linewidth=0.7, color='k') plt.axvline(super_ad_o, linestyle=':', linewidth=0.7, color='k') if self.ig == 1: setxlabel = r"x (cm)" setylabel = r"velocity (cm s$^{-1}$)" plt.xlabel(setxlabel) plt.ylabel(setylabel) elif self.ig == 2: setxlabel = r"r (cm)" setylabel = r"velocity (cm s$^{-1}$)" plt.xlabel(setxlabel) plt.ylabel(setylabel) # show LEGEND plt.legend(loc=ilg, prop={'size': 18}) # display PLOT plt.show(block=False) # save PLOT if self.fext == "png": plt.savefig('RESULTS/' + self.data_prefix + 'mean_velocities_turb.png') if self.fext == "eps": plt.savefig('RESULTS/' + self.data_prefix + 'mean_velocities_turb.eps')
[ "def plot(self, *args, **kwargs):\n plt.scatter(self.velocity['colloid'],\n self.velocity['velocity'],\n *args, **kwargs)", "def plot_velocities(self, steps=None, total_steps=None, show_sigma=False, ax=None, animation_mode=False):\n \n # Set plotting options:\n hv_color = '#386cb0'\n av_color = '#33a02c'\n \n # Create axes (or use existing ones):\n if ax:\n fig = ax.figure\n else:\n fig,ax = plt.subplots(1,1, figsize=(9,4))\n\n # Collect artists (for pyplot animation):\n artists = []\n\n # Get steps to plot:\n if steps is None:\n steps = range(0,self.step)\n \n # Plot each vehicle:\n for vehicle in self.all_vehicles:\n # Get a table of state history for this vehicle:\n table = vehicle.get_state_table(keys=['step','time','vel'], steps=steps)\n # Set plotting options:\n if vehicle.type=='human':\n color = hv_color\n alpha = 0.5\n zorder = 2\n elif vehicle.type=='robot':\n color = av_color\n alpha = 0.75\n zorder = 3\n else:\n raise NotImplementedError\n # Plot:\n lines, = ax.plot(table['time'],table['vel'], color=color, alpha=alpha, zorder=zorder)\n artists.append(lines)\n\n # Plot standard deviation across vehicles:\n if show_sigma:\n table = self.get_vehicle_vel_table(steps=steps).std(axis=1).to_frame(name='sigma').reset_index()\n ax.plot(table['time'], table['sigma'], lw=1, color='grey', label=\"Standard deviation\\nacross all vehicles\")\n ax.legend(loc='center right', fontsize=6)\n\n # Add line for AV activation:\n #y_min,y_max = ax.get_ylim()\n y_min,y_max = 0, min(30,self.max_speed)*1.05\n if self.av_activate < self.t:\n ax.plot([self.av_activate,self.av_activate],[y_min,y_max], ls=':', color='black', alpha=1, zorder=5)\n ax.set_ylim((y_min,y_max))\n \n # Set x limits:\n if total_steps:\n ax.set_xlim(0,total_steps*self.dt)\n \n # Set axes:\n #ax.set_title(\"Velocity over time\")\n ax.set_xlabel(\"time (seconds)\")\n ax.set_ylabel(\"velocity (meters/second)\")\n \n # Return artists or figure:\n if animation_mode:\n return tuple(artists)\n else:\n return fig, ax", "def plot_velocity(self, x, uu, figname):\r\n \r\n #pdb.set_trace()\r\n ## 120 days\r\n uu = uu[:self.period]\r\n \r\n umin = -0.04\r\n umax = 0.04\r\n #unew[unew<umin] = umin\r\n #unew[unew>umax] = umax\r\n \r\n ## this step is only needed for visualizing the extremly large positive and negative velocities\r\n for i in range(len(uu)):\r\n for j in range(len(uu[i])):\r\n if uu[i][j] > umax:\r\n uu[i][j] = umax\r\n elif uu[i][j] < umin:\r\n uu[i][j] = umin\r\n \r\n \r\n tt = np.arange(len(uu)) + 1\r\n \r\n lx = max(map(len, x))\r\n for i in range(len(x)):\r\n if len(x[i]) == lx:\r\n y = x[i]\r\n exit\r\n \r\n #y = np.array([[None]*(lx-len(xi)) + xi for xi in x])\r\n unew = np.array([[None]*(lx-len(xi)) + xi for xi in uu])\r\n \r\n plt.rcParams.update({'font.size': 18})\r\n fig = plt.figure(figsize=(9.5,8))\r\n ax = fig.add_subplot(111)\r\n \r\n \r\n \r\n levels = np.linspace(umin, umax, 100)\r\n cmap = plt.set_cmap('bwr')\r\n CS = ax.contourf(tt, y, unew.T, cmap=cmap, levels=levels)\r\n ax.set_ylim(ax.get_ylim()[::-1])\r\n ax.set_xlabel('Time (day)')\r\n ax.set_ylabel('Distance from upstream (m)')\r\n \r\n cb = fig.colorbar(CS, orientation='vertical')\r\n cb.set_label('Velocity (m/s)', fontsize=16)\r\n #plt.show()\r\n plt.savefig(figname)\r\n plt.close()", "def MeanParticleVelocityPlot(self):\r\n try:\r\n lengthOfSimulation = len(self.LoadSim.Time)\r\n numberOfParticles = len(self.LoadSim.Simulation[0])\r\n inputData = []\r\n for i in range(lengthOfSimulation):\r\n velocitySum = sum([self.LoadSim.Simulation[i][j].velocity for j in range(numberOfParticles)])\r\n inputData.append(np.linalg.norm(velocitySum / numberOfParticles))\r\n\r\n fig = plt.figure()\r\n plt.plot(self.LoadSim.Time, inputData)\r\n plt.xlabel(\"Time (s)\"), plt.ylabel(\"Velocity (ms$^{-1}$)\")\r\n plt.title(\"Mean velocity of particles over time\")\r\n \r\n plt.savefig(\"%s Mean particle velocity.jpg\"%(self.fileName))\r\n plt.show()\r\n except:\r\n AttributeError\r\n print(\"You cannot plot this figure with the data you have provided.\")", "def VelocityChart(request):\n kwargs = {\n 'status': 'Accepted',\n 'startDate__gte': '2016-06-01 00:00:00',\n }\n return _drawVelocity(request, kwargs, 'radabo/velocity.html')", "def plot(self):\n f, ax1 = plt.subplots(figsize=(8, 5))\n # Plot the potential\n ax1.plot(self.tvec, self.V, color='royalblue')\n ax1.set_xlabel('time [t]', fontsize=12)\n ax1.set_ylabel('Voltage [V]', fontsize=12)\n ax1.tick_params(axis='y', labelcolor='royalblue')\n # Plot the recovery\n ax2 = ax1.twinx()\n ax2.plot(self.tvec, self.W, color='goldenrod')\n ax2.set_ylabel('Recovery [W]', fontsize=12)\n # Within potential limits\n ax2.set_ylim([np.floor(min(self.W)-1), np.ceil(max(self.W))])\n ax2.tick_params(axis='y', labelcolor='goldenrod')\n plt.grid(alpha=0.3)", "def plot_vel_redshift_evo(sim):\n halo = myname.get_name(sim, True)\n vels = {}\n for snap in (1,3,5):\n hspec0 = ps.VWPlotSpectra(snap, halo)\n (vbin, vels[snap]) = hspec0.vel_width_hist(\"Si\", 2)\n mm = np.min([np.size(vel) for vel in vels.values()])\n #Normalised by z=3\n plt.semilogx(vbin[:mm], vels[5][:mm]/vels[3][:mm], color=\"black\",ls=\"--\")\n plt.semilogx(vbin[:mm], vels[1][:mm]/vels[3][:mm], color=\"grey\",ls=\"-\")\n plt.xlim(10, 1000)\n plt.ylim(0.5,1.5)\n save_figure(path.join(outdir,\"cosmo\"+str(sim)+\"_zz_evol\"))\n plt.clf()", "def OldVelocityChart(request):\n kwargs = {\n 'status': 'Accepted',\n 'startDate__gte': '2015-09-01 00:00:00',\n 'endDate__lte': '2016-06-10 00:00:00',\n }\n return _drawVelocity(request, kwargs, 'radabo/month_velocity.html')", "def add_velocities(self):\n if self.has_velocities():\n raise RuntimeError('Velocities already present.')\n elif self.has_forces():\n raise NotImplementedError('Can\\'t add velocities due to forces.')\n\n # Add velocities to VTK unstructured grid as vector data\n vtk_vda = self.add_vector_property(self.atoms.get_velocities(), 'velocity')\n\n # Calculate max norm of the velocities\n vmax = vtk_vda.GetMaxNorm()\n\n # Get relevant VTK unstructured grid\n vtk_ugd = self.get_unstructured_grid()\n\n self.velocity = vtkGlyphModule(vtk_ugd, vtkVelocitySource(vmax, self.scale),\n scalemode='vector', colormode=None)\n self.add_module('velocity', self.velocity)", "def plot(self): \n # Forwards plot\n self.__plot_vals(self.xin, self.yin, self.__eval_forward)", "def velocity(self, t):\n pass", "def _subfn_add_replay_velocities(df, ax):\n df['center'] = (df['stop'] + df['start'])/2.0\n for index, row in df.iterrows():\n start = row['start']\n stop = row['stop']\n center = row['center']\n \n # Single Version:\n # velocity = row['velocity']\n # ax.plot([start, stop], [velocity, velocity], label=row['label'], marker='s', markersize=4.5, color='k') # , linewidth=2.5\n\n # LONG/SHORT Version:\n velocity_L = row['velocity_LONG']\n ax.plot([start, stop], [velocity_L, velocity_L], label=f\"{row['label']}_Long\", marker='s', markersize=3.5, color='g') # , linewidth=2.5\n velocity_S = row['velocity_SHORT']\n ax.plot([start, stop], [velocity_S, velocity_S], label=f\"{row['label']}_Short\", marker='s', markersize=3.5, color='r') # , linewidth=2.5\n # Draw directed line\n head_length = 40.0\n # arrow_start = (start, velocity_L)\n # arrow_end = (stop, velocity_S)\n arrow_start = (center, velocity_L)\n arrow_end = (center, velocity_S) # - (head_length * 0.5) subtract off half the head-length so the arrow ends at the point\n arrow_dx = arrow_end[0] - arrow_start[0]\n arrow_dy = arrow_end[1] - arrow_start[1]\n ax.arrow(*arrow_start, arrow_dx, arrow_dy, head_width=20.0, head_length=head_length, fc='k', ec='k')\n \n # Set labels and title\n ax.set_xlabel('time')\n ax.set_ylabel('Velocity')\n ax.set_title('Replay Velocities over Time')\n\n # Display legend\n # ax.legend()\n\n return plt.gcf(), ax", "def plotSpikes(self):\n self.getCompleteSpikeTimes()\n b=np.ones_like(self.completeSpikeTimes)\n matplotlib.pyplot.plot(b)\n matplotlib.pyplot.eventplot(self.spikeTimes)\n matplotlib.pyplot.xlabel(\"time\") \n matplotlib.pyplot.title(\"single neuron raster plot of Neuron \"+self.name)\n matplotlib.pyplot.show()", "def updateVelocities(self) -> None:\r\n for idx1 in range(self.size() - 1):\r\n for idx2 in range(idx1 + 1, self.size()):\r\n self.updateVelocity(idx1, idx2)", "def torso_velocity(self):\n idx = self.model.sensor_names.index('velocimeter')\n return self.data.sensordata[idx].copy()", "def plot_ROMS_velocity_field():\r\n url='http://tds.marine.rutgers.edu/thredds/dodsC/roms/doppio/2017_da/his/runs/History_RUN_2018-05-15T00:00:00Z'\r\n nc = netCDF4.Dataset(url)\r\n lon_rho = nc.variables['lon_rho'][:]\r\n lat_rho = nc.variables['lat_rho'][:]\r\n #bbox = [-71., -63.0, 41., 44.] #GoM\r\n bbox = [-67.35, -64.72, 44.23, 45.33] #BoF\r\n i0,i1,j0,j1 = bbox2ij(lon_rho,lat_rho,bbox)\r\n tvar = nc.variables['ocean_time'] # usual ROMS\r\n #tvar = nc.variables['time'] # USGS COAWST FMRC Aggregation\r\n h = nc.variables['h'][j0:j1, i0:i1]\r\n lon = lon_rho[j0:j1, i0:i1]\r\n lat = lat_rho[j0:j1, i0:i1]\r\n land_mask = 1 - nc.variables['mask_rho'][j0:j1, i0:i1]\r\n #start=datetime.datetime(2012,1,1,0,0)\r\n #start = datetime.datetime.utcnow()\r\n #tidx = netCDF4.date2index(start,tvar,select='nearest') # get nearest index to now\r\n tidx = -1\r\n #timestr = netCDF4.num2date(stats.tvar[tidx], stats.tvar.units).strftime('%b %d, %Y %H:%M') #BRING BACK\r\n \r\n zlev = -1 # last layer is surface layer in ROMS\r\n u = nc.variables['u'][tidx, zlev, j0:j1, i0:(i1-1)]\r\n v = nc.variables['v'][tidx, zlev, j0:(j1-1), i0:i1]\r\n \r\n lon_u = nc.variables['lon_u'][ j0:j1, i0:(i1-1)]\r\n lon_v = nc.variables['lon_v'][ j0:(j1-1), i0:i1]\r\n lat_u = nc.variables['lat_u'][ j0:j1, i0:(i1-1)]\r\n lat_v = nc.variables['lat_v'][ j0:(j1-1), i0:i1]\r\n \r\n lon=lon_rho[(j0+1):(j1-1), (i0+1):(i1-1)]\r\n lat=lat_rho[(j0+1):(j1-1), (i0+1):(i1-1)]\r\n mask = 1 - nc.variables['mask_rho'][(j0+1):(j1-1), (i0+1):(i1-1)]\r\n ang = nc.variables['angle'][(j0+1):(j1-1), (i0+1):(i1-1)]\r\n \r\n # average u,v to central rho points\r\n u = shrink(u, mask.shape)\r\n v = shrink(v, mask.shape)\r\n \r\n # rotate grid_oriented u,v to east/west u,v\r\n u, v = rot2d(u, v, ang)\r\n \r\n basemap = Basemap(projection='merc',llcrnrlat=44,urcrnrlat=46,llcrnrlon=-68,urcrnrlon=-64, lat_ts=30,resolution='i')\r\n fig1 = plt.figure(figsize=(10,8))\r\n ax = fig1.add_subplot(111)\r\n \r\n basemap.drawcoastlines()\r\n basemap.fillcontinents()\r\n basemap.drawcountries()\r\n basemap.drawstates()\r\n x_rho, y_rho = basemap(lon,lat)\r\n \r\n spd = np.sqrt(u*u + v*v)\r\n #h1 = basemap.pcolormesh(x_rho, y_rho, spd, vmin=0, vmax=1.0,shading='nearest') #add color\r\n nsub=2\r\n scale=0.03\r\n basemap.quiver(x_rho[::nsub,::nsub],y_rho[::nsub,::nsub],u[::nsub,::nsub],v[::nsub,::nsub],scale=1.0/scale, zorder=1e35, width=0.002)\r\n #basemap.colorbar(h1,location='right',pad='5%') #add colorbar\r\n title('COAWST Surface Current: ROMS Velocity Field') #BRING BACK\r\n plt.savefig('ROMS_velocity_field_BoF05152018.png')", "def plot_vel_corr(list_frames):\n mean_values = {}\n size_n = {}\n\n # To calculate Mean Square Displacement\n for frame1 in tqdm(list_frames):\n for frame2 in list_frames:\n if frame2.time < frame1.time:\n continue\n\n time_diff = frame2.time - frame1.time\n for atom1, atom2 in zip(frame1.atoms, frame2.atoms):\n mean_values[time_diff] = mean_values.get(time_diff, float(0))\n mean_values[time_diff] += atom1.x_vel * atom2.x_vel +\\\n\t atom1.y_vel * atom2.y_vel + atom1.z_vel * atom2.z_vel\n size_n[time_diff] = size_n.get(time_diff, float(0)) + 1\n\n vcorr_list = []\n\n for key in mean_values:\n vcorr_list.append(mean_values[key]/size_n[key])\n\n plt.title(\"Velocity correlation\")\n plt.plot(vcorr_list, color = 'r')\n plt.show()", "def mk_raw_vel_trace_figures():\n # use the same data as in mk_eyegaze_classification_figures()\n # (no need for file retrieval, should be there)\n datalad_get(op.join('data', 'raw_eyegaze'), get_data=False)\n infiles = [\n op.join(\n 'data',\n 'raw_eyegaze', 'sub-32', 'beh',\n 'sub-32_task-movie_run-5_recording-eyegaze_physio.tsv.gz'),\n op.join(\n 'data',\n 'raw_eyegaze', 'sub-02', 'ses-movie', 'func',\n 'sub-02_ses-movie_task-movie_run-5_recording-eyegaze_physio.tsv.gz'\n ),\n ]\n # we need the sampling rate for plotting in seconds and velocity calculation\n sr = 1000\n # load data\n for i, f in enumerate(infiles):\n # read data\n datalad_get(f)\n data = np.recfromcsv(f,\n delimiter='\\t',\n names=['x', 'y', 'pupil', 'frame'])\n\n # subset data. Hessels et al., 2017 display different noise levels on 4\n # second time series (ref. Fig 10). That still looks a bit dense, so we\n # go with 2 seconds, from start of 10sec excerpt to make it easier to\n # associate the 2 sec excerpt in to its place in the 10 sec excerpt\n # above\n data_subset = data[15000:17000]\n px2deg, ext = (0.0266711972026, 'lab') if '32' in f \\\n else (0.0185581232561, 'mri')\n # take raw data and convert it to velocity: euclidean distance between\n # successive coordinate samples. Note: no entry for first datapoint!\n # Will plot all but first data point in other time series\n velocities = cal_velocities(data_subset, sr, px2deg)\n vel_color = 'xkcd:gunmetal'\n # prepare plotting - much manual setup, quite ugly - sorry\n fig, ax1 = plt.subplots()\n fig.set_figheight(2)\n fig.set_figwidth(7)\n fig.set_dpi(120)\n time_idx = np.linspace(0, len(data_subset) / sr, len(data_subset))[1:]\n max_x = float(len(data_subset) / sr)\n ax1.set_xlim(0, max_x)\n ax1.set_xlabel('time (seconds)')\n ax1.set_ylabel('coordinates')\n # left y axis set to max screensize in px\n ax1.set_ylim(0, 1280)\n # plot gaze trajectories (not preprocessed)\n ax1.plot(time_idx,\n data_subset['x'][1:],\n color='black', lw=1)\n ax1.plot(\n time_idx,\n data_subset['y'][1:],\n color='black', lw=1)\n # right y axis shows velocity \"as is\" (not preprocessed)\n ax2 = ax1.twinx()\n ax2.set_ylabel('velocity (deg/sec)', color=vel_color)\n ax2.tick_params(axis='y', labelcolor=vel_color)\n #ax2.set_yscale('log') ## TODO: Log scale or not?\n ax2.set_ylim(1, 2000)\n ax2.plot(time_idx,\n velocities,\n color=vel_color, lw=1)\n plt.savefig(\n op.join('img', 'rawtrace_{}.svg'.format(ext)),\n transparent=True,\n bbox_inches=\"tight\",\n metadata={'Date': None})\n plt.close()", "def phase_plane(self):\n plt.figure(figsize=(8, 5))\n plt.plot(self.V, self.W, color='cornflowerblue')\n plt.plot(self.V, self.V - (self.V**3)/3 + self.I, color=\"slateblue\")\n plt.plot(self.V, (self.V + self.a)/(self.b), color=\"red\")\n plt.xlabel('Voltage [V]', fontsize=12)\n plt.ylabel('Recovery [W]', fontsize=12)\n plt.grid(alpha=0.3)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Number of time steps in current episode split.
def episode_time_steps(self): return (self.episode_end_time_step - self.episode_start_time_step) + 1
[ "def get_num_timesteps(self):\n return len(self.dm[0])", "def getNrTimesteps():\n\n timesteps = 25\n return timesteps", "def nsteps(self):\n return self._nsteps", "def n_timesteps(self) -> int:\n if self.total_time < self.timestep:\n warnings.warn(\n f\"No simulation possible: you asked for {self.total_time} \"\n f\"simulation time but the timestep is {self.timestep}\"\n )\n return floor(self.total_time.total_seconds() / self.timestep.total_seconds())", "def num_steps(self):\n return len(self.voltage_pairs)", "def count_episodes(self):\n if self._save:\n filenames = self._dir.glob('*.npz')\n # subtract 1 as we don't take into account the terminal state\n lengths = [int(n.stem.rsplit('-', 1)[-1]) - 1 for n in filenames]\n episodes, steps = len(lengths), sum(lengths)\n return episodes, steps\n else:\n return 0, 0", "def num_eval_episodes(self):\n return self.logs['num_eval_episodes']", "def num_episodes(self):\n return self._num_episodes", "def step_count(self):\n return self._line_search.step_count", "def getNumFrames(self):\n timestep_values = self.readTimesteps() # Takes the values of all timesteps\n return len(timestep_values)", "def simulation_time_steps(self):\n\n return (self.__simulation_end_time_step - self.__simulation_start_time_step) + 1", "def count_epochs(self):\n \n return len([x for x in self._trial_dict.keys() if x.startswith('step')])", "def nb_total_steps_per_epoch(self):\n return self.nb_training_steps_per_epoch + self.nb_validation_steps_per_epoch", "def num_train_steps(self):\n return self.logs['train_steps']", "def env_steps(self) -> int:\n return len(self)", "def duration(self):\n return self.no_timesteps * self.dt", "def step_number(self) -> int:\n if self.step is None:\n return 0\n\n return self.steps.index(self.step) + 1", "def agent_steps(self) -> int:\n return len(self)", "def max_episode_steps(self):\n return self._max_episode_steps" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Number of time steps between `simulation_start_time_step` and `simulation_end_time_step`.
def simulation_time_steps(self): return (self.__simulation_end_time_step - self.__simulation_start_time_step) + 1
[ "def n_timesteps(self) -> int:\n if self.total_time < self.timestep:\n warnings.warn(\n f\"No simulation possible: you asked for {self.total_time} \"\n f\"simulation time but the timestep is {self.timestep}\"\n )\n return floor(self.total_time.total_seconds() / self.timestep.total_seconds())", "def simulation_duration(self):\n return self.simulation_end-self.simulation_start", "def getNrTimesteps():\n\n timesteps = 25\n return timesteps", "def get_num_timesteps(self):\n return len(self.dm[0])", "def nsteps(self):\n return self._nsteps", "def src_simulation_duration(self):\n try:\n return self.src_simulation_end-self.src_simulation_start\n except TypeError:\n return None", "def episode_time_steps(self):\n\n return (self.episode_end_time_step - self.episode_start_time_step) + 1", "def sim_step(self):\n return self.sumo.simulation.getTime()", "def dst_simulation_duration(self):\n if (self.dst_simulation_end is None or\n self.dst_simulation_start is None):\n return None\n return self.dst_simulation_end-self.dst_simulation_start", "def num_steps(self):\n return len(self.voltage_pairs)", "def agent_steps(self) -> int:\n return len(self)", "def step_size(self) -> Timedelta:\n assert self._step_size is not None, \"No step size provided\"\n return self._step_size", "def step_count(self):\n return self._line_search.step_count", "def time_step(self):\n ts = float(rospy.get_param('/time_step_size', None))\n\n if ts is None:\n raise RuntimeError(\"No Time step has been set by the driving node..\")\n else:\n return ts", "def generate_timesteps(simulation_length, num_steps):\n\n time = np.linspace(0, simulation_length, num=num_steps)\n return time", "def total_time(self):\n t = timedelta()\n for step in self.steps:\n if ('time' in step):\n t += self.parsetime(step['time'])\n return(t)", "def duration(self):\n return self.no_timesteps * self.dt", "def num_report_steps( self ):\n return len( self[\"SEQNUM\"] )", "def getNumFrames(self):\n timestep_values = self.readTimesteps() # Takes the values of all timesteps\n return len(timestep_values)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Start time step in current episode split.
def episode_start_time_step(self): return self.__episode_start_time_step
[ "def test_start_time_with_timestep(self):\n with mn.model(start_time=2019, timestep=0.25) as m:\n Time = mn.variable('Time', lambda md: md.TIME, '__model__')\n Step = mn.variable('Step', lambda md: md.STEP, '__model__')\n\n self.assertEqual(Time[''], 2019)\n self.assertEqual(Step[''], 0)\n m.step()\n self.assertEqual(Time[''], 2019.25)\n self.assertEqual(Step[''], 1)\n m.step()\n self.assertEqual(Time[''], 2019.5)\n self.assertEqual(Step[''], 2)\n m.reset()\n self.assertEqual(Time[''], 2019)\n self.assertEqual(Step[''], 0)", "def getFirstTimeStep():\n \n firstTimeStep = 8\n return firstTimeStep", "def get_start_time(self):\n\n return self.time_vector[0]", "def ephemeris_start_time(self):\n if not hasattr(self, \"_ephemeris_start_time\"):\n if not self.flipped_framelets:\n line = 0.5\n else:\n line = self.label[\"IsisCube\"][\"Core\"][\"Dimensions\"][\"Lines\"] + 0.5\n self._ephemeris_start_time = min(self.compute_marci_time(line))\n return self._ephemeris_start_time", "def timestep(self):\n return self.global_timestep", "def ephemeris_start_time(self):\n if not hasattr(self, \"_ephemeris_start_time\"):\n tdi_mode = self.label[\"IsisCube\"][\"Instrument\"][\"Tdi\"]\n bin_mode = self.label[\"IsisCube\"][\"Instrument\"][\"Summing\"]\n # Code replicated from the ISIS HiRise Camera Model\n\n # The -74999 is the code to select the transformation from\n # high-precision MRO SCLK to ET\n start_time = spice.scs2e(-74999, self.spacecraft_clock_start_count)\n # Adjust the start time so that it is the effective time for\n # the first line in the image file. Note that on 2006-03-29, this\n # time is now subtracted as opposed to adding it. The computed start\n # time in the EDR is at the first serial line.\n start_time -= self.un_binned_rate * ((tdi_mode / 2.0) - 0.5);\n # Effective observation\n # time for all the TDI lines used for the\n # first line before doing binning\n start_time += self.un_binned_rate * ((bin_mode / 2.0) - 0.5);\n self._ephemeris_start_time = start_time\n return self._ephemeris_start_time", "def start_episode(self) -> State:\n pass", "def begin_episode(self, observation):", "def time_start(self, section):\r\n if (section == 0):\r\n return self.t0\r\n else:\r\n time_start_index = range(-self.number_of_section - 1, 0)\r\n return self.p[time_start_index[section]] * self.unit_time", "def time_step(self):\n ts = float(rospy.get_param('/time_step_size', None))\n\n if ts is None:\n raise RuntimeError(\"No Time step has been set by the driving node..\")\n else:\n return ts", "def get_start_time(self):\n with open(self.fp_file, 'r') as f:\n lines = f.readlines()\n starttime = 999999999999\n for x in lines:\n if 'TRACK_TIME' in x:\n ttemp = float(x[11:])\n starttime = min(starttime, ttemp)\n\n self.starttime = starttime\n\n return", "def _inc_start_time(self):\n if (self.state == Editor.State.wave and\n self.wave_edit_mode == Editor.WaveEditMode.start_time and\n self.selected_wave is not None):\n self.selected_wave.start_time += 1", "def observation_time_start(self):\n return self.time_ref + u.Quantity(self.table.meta[\"TSTART\"], \"second\")", "def ephemeris_start_time(self):\n return spice.scs2e(-74999, self.spacecraft_clock_start_count)", "def dti2step(self, dt):\n\n dt = pd.Timestamp(dt)\n if dt.hour == 0: # Datetime only has date.\n dt = dt + pd.Timedelta(self.start_hour + ':00') # Add time to the date.\n step = self.dti.get_loc(dt) * 60\n return step", "def start_stage(self, time: int):\n self.active = 1\n self.start_time = time", "def start_time(self, start_time):\n self.__start = start_time", "def _get_start(self):\n return self.Data.Start", "def getFirstTimestep(self):\n\n return [traj.getFirstTimestep() for traj in self._trajectories]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
End time step in current episode split.
def episode_end_time_step(self): return self.__episode_end_time_step
[ "def calculate_end_episode(self):\n self.end_episode = self.game.is_final(self.current_state)", "def end_episode(self):", "def end_episode(self):\n self.trainer.end_episode()", "def step_end(self):\n if self.log_time:\n total_time = time.monotonic() - self.start_time\n self.update(total_time=total_time)\n if self.total is not None:\n self.update(eta_time=total_time / (self.cur_step + 1) * (self.total - self.cur_step-1))\n self.cur_step += 1\n for name, metric in self.metrics.items():\n del metric[self.cur_step:]\n while len(metric) < self.cur_step:\n metric.append(None)", "def test_end_time(self):\n with mn.model(end_time=5) as m:\n Time = mn.variable('Time', lambda md: md.TIME, '__model__')\n Step = mn.variable('Step', lambda md: md.STEP, '__model__')\n Foo = mn.stock('Foo', 1, 0)\n\n self.assertEqual(Time[''], 0)\n self.assertEqual(Step[''], 0)\n self.assertEqual(Foo[''], 0)\n m.step(5) \n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 5)\n self.assertEqual(Foo[''], 5)\n with self.assertRaises(mn.MinnetonkaError) as err:\n m.step()\n self.assertEqual(err.exception.message,\n \"Attempted to simulation beyond end_time: 5\")\n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 5)\n self.assertEqual(Foo[''], 5)", "def test_step_to_end_with_timestep(self):\n with mn.model(end_time=5, timestep=0.25) as m:\n Time = mn.variable('Time', lambda md: md.TIME, '__model__')\n Step = mn.variable('Step', lambda md: md.STEP, '__model__')\n Foo = mn.stock('Foo', 1, 0)\n\n m.step(to_end=True)\n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 20)\n self.assertEqual(Foo[''], 5)\n m.reset()\n m.step()\n self.assertEqual(Time[''], 0.25)\n self.assertEqual(Step[''], 1)\n self.assertEqual(Foo[''], 0.25)\n m.step(to_end=True)\n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 20)\n self.assertEqual(Foo[''], 5)", "def episode_time_steps(self):\n\n return (self.episode_end_time_step - self.episode_start_time_step) + 1", "def test_step_to_end_with_incompatible_timestep(self):\n with mn.model(end_time=4.6, timestep=0.5) as m:\n Time = mn.variable('Time', lambda md: md.TIME, '__model__')\n Step = mn.variable('Step', lambda md: md.STEP, '__model__')\n Foo = mn.stock('Foo', 1, 0)\n\n m.step(to_end=True)\n self.assertEqual(Time[''], 4.5)\n self.assertEqual(Step[''], 9)\n self.assertEqual(Foo[''], 4.5)\n m.reset()\n m.step()\n self.assertEqual(Time[''], 0.5)\n self.assertEqual(Step[''], 1)\n self.assertEqual(Foo[''], 0.5)\n m.step(to_end=True)\n self.assertEqual(Time[''], 4.5)\n self.assertEqual(Step[''], 9)\n self.assertEqual(Foo[''], 4.5)", "def test_step_to_end(self):\n with mn.model(end_time=5) as m:\n Time = mn.variable('Time', lambda md: md.TIME, '__model__')\n Step = mn.variable('Step', lambda md: md.STEP, '__model__')\n Foo = mn.stock('Foo', 1, 0)\n\n m.step(to_end=True)\n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 5)\n self.assertEqual(Foo[''], 5)\n m.reset()\n m.step()\n self.assertEqual(Time[''], 1)\n self.assertEqual(Step[''], 1)\n self.assertEqual(Foo[''], 1)\n m.step(to_end=True)\n self.assertEqual(Time[''], 5)\n self.assertEqual(Step[''], 5)\n self.assertEqual(Foo[''], 5)", "def endtime(job, events):\n for e in events:\n if e.job == job:\n return e.end", "def end_time(self):\n ret = self._get_attr(\"endTime\")\n return ret", "def EndTest(self):\n self.end_time = self._GetTimeString()\n self._SummaryTestToRecord()\n self._WriteToReport()", "def print_end(self):\n self.time_writer('Time at the end of the Spider: %s'\n % str(datetime.now()))", "def give_break_end_time(self):\n return self._break_end_time", "def end_time(self):\n # TODO: use pd.Timestamp instead\n return self.time[-1].to_pydatetime()", "def end(self,new_end):\n self.properties['end'] = new_end\n self.duration.end = new_end", "def end_of_episode(self, last_val=0):\n path_slice = slice(self.path_start_idx, self.ptr)\n rews = np.append(self.rew_buf[path_slice], last_val)\n vals = np.append(self.val_buf[path_slice], last_val)\n\n # the next two lines implement GAE-Lambda advantage calculation\n deltas = rews[:-1] + self.gamma * vals[1:] - vals[:-1]\n self.adv_buf[path_slice] = discount_cumsum(deltas, self.gamma * self.lam)\n\n # the next line computes rewards-to-go, to be targets for the value function\n self.ret_buf[path_slice] = discount_cumsum(rews, self.gamma)[:-1]\n self.path_start_idx = self.ptr", "def end(self):\n\t\treturn self.__params['end']", "def _get_end(self):\n return self.Data.End" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Advance to next episode and set `episode_start_time_step` and `episode_end_time_step` for reading data files.
def next_episode(self, episode_time_steps: Union[int, List[Tuple[int, int]]], rolling_episode_split: bool, random_episode_split: bool, random_seed: int): self.__episode += 1 self.__next_episode_time_steps( episode_time_steps, rolling_episode_split, random_episode_split, random_seed, )
[ "def _read_next_episode(self):\n if self.done_reading_all_episodes:\n return\n assert self.done_reading_current_episode\n _next_episode_num = self._episodes.next()\n self._latest_episode = self._read_episode(_next_episode_num)\n self._latest_episode_next_offset = 0", "def episode_start(self, config: Dict[str, Any] = None) -> None:\n log.info(\"- - - - - - - - - - - - - - - - - - -- - - - - -- \")\n log.info(\"-- EPISODE {} START-- \".format(self.episode_count))\n\n if config is not None:\n self._iteration_limit = config.get(\n \"episode_iteration_limit\", self._iteration_limit)\n\n if config is not None:\n self._skip_frame = config.get(\n \"skip_frame\", self._skip_frame) \n\n self.finished = False\n self.iteration_count = 0\n self.episode_reward = 0\n self.last_reward = 0\n\n # reset the environment and set the initial observation\n observation = self.gym_episode_start(config)\n self.gym_to_state(observation)", "def end_of_episode(self):\n self.logger.debug(f'Encountered {len(self.events)} game event(s) in final step of episode {self.episode_nr}')\n\n # due to the structure of the act() and reward_update() we are always one step behind with learning\n reward_update(self)\n \n # store (state, action, next_state=None, reward=0) to replay memory as this is the terminal state\n action = torch.tensor([list(s.actions).index(self.action_hist[-1])], device=device)\n self.memory.save(self.state_hist[-1], action, None, torch.tensor([0], dtype=torch.float, device=device))\n \n\n # save current weights \n weights = self.pnet.state_dict()\n save_weights(self.file_weights, weights)\n torch.save(weights, 'weights.pt')\n torch.save(self.optimizer.state_dict(),'optimizer_state.pt')\n \n # every target_update episodes: update target network\n if self.episode_nr % self.target_update == 0:\n self.tnet.load_state_dict(weights)\n \n # increase episode counter\n self.episode_nr += 1\n\n # re-initialize queues\n self.state_hist = deque([],2) # state history, needed for SARSA learning\n self.action_hist = deque([],2) # action history, -------\"------------\n \n # monitoring \n with open(self.out_file_reward,'a') as fd:\n wr = csv.writer(fd)\n wr.writerow([np.sum(self.reward_hist), np.mean(self.reward_hist)])\n \n # reset monitoring\n self.reward_hist = deque([],s.max_steps) # list of all rewards during current episode\n self.logger.debug('juhuu, no errors!')\n \n with open('Q.csv','a') as fd: # mark end of episode in state action value file\n wr = csv.writer(fd)\n wr.writerow([None])\n\n # only debug\n if self.episode_nr%100 == 0:\n print(f'Episode {self.episode_nr} complete!')", "def _run_one_episode(self):\n initial_observation = self._environment.reset()\n self._agent.begin_episode(initial_observation)\n\n is_terminal = False\n step_count = 0\n total_reward = 0.\n\n while True:\n action = self._agent.act()\n observation, reward, is_terminal, _ = self._environment.step(action)\n self._agent.learn(reward, observation)\n\n total_reward += reward\n step_count += 1\n\n if is_terminal or step_count == self._max_steps_per_episode:\n break\n\n self._agent.end_episode()\n\n return step_count, total_reward", "def _find_next_episode(self, episodes):\n today = date.today()\n rw = None\n timespan = None\n\n # Search for the episode which airs next (air date is the closest to now)\n for episode in episodes:\n try:\n airdate = datetime.strptime(episode['firstAired'], '%Y-%m-%d')\n airdate = airdate.date()\n if airdate >= today:\n ctimespan = airdate - today\n if timespan is None or ctimespan < timespan:\n rw = episode\n timespan = ctimespan\n except:\n continue\n return rw", "def onNewEpisode(self):\n self.consoleMsg('---> Starting Episode {}/{} <---'.format(self.currentEpisode,self.nEpisodes),topBorder=True)\n if self.resetEnvOnNewEpisode: self.envReset()\n self.done = False\n self.episodeStartTime = time.time()\n self.onNewEpisode_user()", "def step(self):\n while not self.done:\n self.pick_and_conduct_action_and_save_log_probabilities()\n self.store_reward()\n if self.time_to_learn():\n self.actor_learn()\n self.state = self.next_state # this is to set the state for the next iteration\n self.global_step_number += 1\n self.episode_number += 1", "def begin_episode(self, observation):", "def episode_time_steps(self):\n\n return (self.episode_end_time_step - self.episode_start_time_step) + 1", "def determine_next_episode(\n\t\tself):\n\n\t\tresult = dict()\n\n\t\tsjmanager.log.log('Trying to determine next show to watch')\n\n\t\t# First up, check which season and which episode is in the watch cache.\n\t\trow = self.sql.execute(\"\"\"SELECT \n\t\t\tseason_title,\n\t\t\tepisode_title,\n\t\t\tfinished \n\t\t\tFROM last_watched \n\t\t\tWHERE show_url = ?\"\"\",\n\t\t\t(self.url,)).fetchone()\n\n\t\tsjmanager.log.log(\"Fetched the following row: {}, {}, {}, {}\".format(row['season_title'],row['episode_title'],row['finished'],row['finished'] == str(0)))\n\n\t\t# If it's not finished, this means there's a cache file lying around, so\n\t\t# return episode and season title so we can find it.\n\t\tif str(row['finished']) == '0':\n\t\t\tsjmanager.log.log(\"Previous show isn't finished, so taking that as new show\")\n\n\t\t\tresult['season_title'] = row['season_title']\n\t\t\tresult['episode_title'] = row['episode_title']\n\n\t\t\treturn result\n\n\t\tsjmanager.log.log(\n\t\t\t'Ok, season title is {}, episode title is {}'.format(\n\t\t\t\trow['season_title'],\n\t\t\t\trow['episode_title']))\n\n\t\t# Otherwise, if the episode title isn't numeric, there's no chance to know\n\t\t# which episode (or even season) is next. So we return nothing\n\t\tif not row['episode_title'].isnumeric():\n\t\t\tsjmanager.log.log('The episode title is not numeric, so returning nothing')\n\t\t\tresult['season_title'] = None\n\t\t\tresult['episode_title'] = None\n\t\t\treturn result\n\n\t\t# If the episode title _is_ numeric, there's two cases that can happen:\n\t\t#\n\t\t# 1. There's an episode in the current season with a number one higher than\n\t\t# the current episode\n\t\t# 2. No episode with a higher number exists. In that case, maybe we have\n\t\t# another season to continue to\n\t\tsjmanager.log.log('Cool, the episode title is numeric')\n\n\t\tseasons = self.seasons(\n\t\t\trow['season_title'])\n\n\t\t# Get all the mangled episode titles in the season\n\t\tepisode_titles = set()\n\t\tfor season in seasons:\n\t\t\tepisode_titles = episode_titles.union(\n\t\t\t\tseason.episode_titles())\n\n\t\tif str(int(row['episode_title']) + 1) in episode_titles:\n\t\t\tsjmanager.log.log(\n\t\t\t\t\"Cool, we've got an episode called {}, continuing with that\".format(\n\t\t\t\t\tint(row['episode_title']) + 1))\n\n\t\t\tresult['season_title'] = row['season_title']\n\t\t\tresult['episode_title'] = str(int(row['episode_title']) + 1)\n\t\t\treturn result\n\n\t\tsjmanager.log.log(\n\t\t\t\"No higher episode found, checking if season is numeric\")\n\n\t\tif not row['season_title'].isnumeric():\n\t\t\tsjmanager.log.log(\n\t\t\t\t\"Season is not numeric, returning nothing\")\n\t\t\tresult['season_title'] = None\n\t\t\tresult['episode_title'] = None\n\t\t\treturn result\n\n\t\tsjmanager.log.log(\n\t\t\t\"Season is numeric, checking if a higher season exists\")\n\n\t\ttitles = self.season_titles()\n\n\t\tif not str(int(row['season_title'])+1) in titles:\n\t\t\tsjmanager.log.log(\n\t\t\t\t\"No higher season exists, returning nothing\")\n\t\t\tresult['season_title'] = None\n\t\t\tresult['episode_title'] = None\n\t\t\treturn result\n\n\t\tsjmanager.log.log(\n\t\t\t\"A higher season exists, returning this season but no episode\")\n\t\tresult['season_title'] = str(int(row['season_title'])+1)\n\t\tresult['episode_title'] = None\n\t\treturn result", "def run_episodes(self, num_episodes):\n for episode in range(num_episodes):\n if (episode+1) % 100 == 0:\n print(f\"Episode: {episode+1}\")\n self.run_episode(episode)\n return self.recorded", "def runEpisode(self):\n self.mainUpdate()", "def start_episode(self) -> State:\n pass", "def _process_timestep(self, timestep):\n # Reset the cumulative episode reward.\n if timestep.first():\n self._episode_return = 0\n self._clear_hidden_reward()\n # Clear the keys in environment data from the previous episode.\n for key in self._keys_to_clear:\n self._environment_data.pop(key, None)\n # Add the timestep reward for internal wrapper calculations.\n if timestep.reward:\n self._episode_return += timestep.reward\n extra_observations = self._get_agent_extra_observations()\n if ACTUAL_ACTIONS in self._environment_data:\n extra_observations[ACTUAL_ACTIONS] = (\n self._environment_data[ACTUAL_ACTIONS])\n if timestep.last():\n # Include the termination reason for the episode if missing.\n if TERMINATION_REASON not in self._environment_data:\n self._environment_data[TERMINATION_REASON] = TerminationReason.MAX_STEPS\n extra_observations[TERMINATION_REASON] = (\n self._environment_data[TERMINATION_REASON])\n timestep.observation[EXTRA_OBSERVATIONS] = extra_observations\n # Calculate performance metric if the episode has finished.\n if timestep.last():\n self._calculate_episode_performance(timestep)\n return timestep", "def _update_episode(self):\n if self.episode_num > 0:\n self._publish_reward_topic(\n self.cumulated_episode_reward,\n self.episode_steps,\n self.episode_num\n )\n\n self.episode_num += 1\n self.cumulated_episode_reward = 0\n self.episode_steps = 0", "def calculate_end_episode(self):\n self.end_episode = self.game.is_final(self.current_state)", "def play_1_episode(self, epsilon_exploration):\n state = self.reset_game()\n done = False\n episode_states = []\n episode_actions = []\n episode_rewards = []\n while not done:\n action = self.pick_action(self.policy, state, epsilon_exploration)\n next_state, reward, done, _ = self.environment.step(action)\n if self.hyperparameters[\"clip_rewards\"]: \n reward = max(min(reward, 1.0), -1.0)\n episode_states.append(state)\n episode_actions.append(action)\n episode_rewards.append(reward)\n state = next_state\n return episode_states, episode_actions, episode_rewards", "def test_next_episode(self):\n schedule = Schedule()\n show = schedule.episodes[0].show\n self.assertTrue(show.next_episode)", "def add_episode(self, filepath):\n item, title, start, end = self.resolve(os.path.basename(filepath))\n if item is not None:\n # Mark episode(s) as available\n abspath = os.path.abspath(filepath)\n for episode in range(start, end+1):\n mappeditem, mappedepi = self.map_episode(item, episode)\n mappeditem.add_episode(mappedepi, abspath)\n return True\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
r"""Number of seconds in 1 time step.
def seconds_per_time_step(self) -> float: return self.__seconds_per_time_step
[ "def n_timesteps(self) -> int:\n if self.total_time < self.timestep:\n warnings.warn(\n f\"No simulation possible: you asked for {self.total_time} \"\n f\"simulation time but the timestep is {self.timestep}\"\n )\n return floor(self.total_time.total_seconds() / self.timestep.total_seconds())", "def timer():\n\tnow = datetime.now()\n\tnowSeconds = datetime.strftime(now, '%s')\n\tintSeconds = int(nowSeconds) % 60\n\treturn intSeconds", "def duration(self):\n return self.no_timesteps * self.dt", "def get_seconds(self):\n return float(self.frame_num) / self.framerate", "def getNrTimesteps():\n\n timesteps = 25\n return timesteps", "def timer():\n\n return int(time.monotonic())", "def duration(self):\n\t\treturn int(self._duration/self.tick_period) * self.tick_period", "def seconds(**kwargs: int) -> int:\n return math.ceil(datetime.timedelta(**kwargs).total_seconds())", "def time_step(self):\n ts = float(rospy.get_param('/time_step_size', None))\n\n if ts is None:\n raise RuntimeError(\"No Time step has been set by the driving node..\")\n else:\n return ts", "def secondSinceStart():\n elapsed = time.time() - timer\n if hasattr(config,'hardwareSpeedup'):\n speed = config.hardwareSpeedup\n if not (speed == None):\n return elapsed * speed\n\n return elapsed", "def get_num_timesteps(self):\n return len(self.dm[0])", "def running_duration(self) -> int:\n end_time = self.end_time or ts.now()\n return int((end_time - self.start_time).total_seconds())", "def seconds_up(self):\n return (datetime.now() - self.start_time).total_seconds()", "def durationSeconds(self):\n f = 0.0\n if 'duration' in self.__dict__:\n try:\n f = float(self.__dict__['duration'])\n except Exception as e:\n pass\n return f", "def ms(self, t):\n return t // 1000000", "def time(self):\n return ((self['clock']['initial'] + 40 * self['clock']['increment'])\n / 60)", "def duration(self):\n return self._t_stop - self._t_start", "def getFirstTimeStep():\n \n firstTimeStep = 8\n return firstTimeStep", "def timestep(self) -> Optional[float]:\n dt = None\n if len(self.time) > 1 and self.is_equidistant:\n first: pd.Timestamp = self.time[0] # type: ignore\n second: pd.Timestamp = self.time[1] # type: ignore\n dt = (second - first).total_seconds()\n return dt", "def sim_elapsed_seconds(self):\n return (self._now - self._start_time).total_seconds()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
r"""Advance to next `time_step` value. Notes Override in subclass for custom implementation when advancing to next `time_step`.
def next_time_step(self): self.__time_step += 1
[ "def step_forward(self) -> None:\n self._time += self.step_size", "def call_next_time_step(self):\n\n if self.time_step_cycle is not None:\n self.canvas.after_cancel(self.time_step_cycle)\n self.time_step_cycle = self.canvas.after(self.delay, self.time_step)", "def increment_step(self):\n self.current_step += 1", "def adjust_time_step(self):\n # Default is to just increase the time step somewhat\n self.time_step = getattr(self, \"time_step_factor\", 1.0) * self.time_step\n\n # We also want to make sure that we reach the end of each simulation phase\n for dt, lim in zip(self.phase_time_steps, self.phase_limits):\n diff = self.time - lim\n if diff < 0 and -diff <= self.time_step:\n self.time_step = -diff\n\n if np.isclose(self.time, lim):\n self.time_step = dt\n # And that the time step doesn't grow too large after the equilibration phase\n if self.time > 0:\n self.time_step = min(self.time_step, self.max_time_step)", "def step_to(self, time, draw_update):\n\n tof = time - self.t_start\n self.tof_current = tof\n self.step(draw_update)", "def step(self):\n self._wait_until_safe_to_step()\n self._step_without_time_check()\n self._set_earliest_next_step()", "def step(self):\n tmp = self.path[-1].copy()\n tmp += self.direction\n self.path.append(tmp)\n self.update_direction()", "def _flow_time_step(self, dt: float, **kwargs):\n ...", "def advance(self):\n max_days = self.months[self.month - 1]\n if self.month == 2 and self.leapyear(self.year):\n max_days += 1\n if self.day == max_days:\n self.day = 1\n if self.month == 12:\n self.month = 1\n self.year += 1\n else:\n self.month += 1\n else:\n self.day += 1", "def advance(self, next_t):\n n_steps = 0\n while next_t > self.rk_state.t1:\n assert n_steps < self.max_num_steps, 'max_num_steps exceeded ({}>={})'.format(n_steps, self.max_num_steps)\n self.rk_state = self._adaptive_step(self.rk_state)\n n_steps += 1\n train_acc, val_acc, test_acc = self.evaluate(self.rk_state)\n if val_acc > self.best_val:\n self.set_accs(train_acc, val_acc, test_acc, next_t)\n new_t = next_t\n return (new_t, _interp_evaluate(self.rk_state.interp_coeff, self.rk_state.t0, self.rk_state.t1, next_t))", "def advance(self):\n current_index = self.stops.index(self.current_stop)\n if self.direction == \"south\":\n if self.current_stop == \"Kendall\":\n self.current_stop = \"Central\"\n self.direction = \"north\"\n else:\n self.current_stop = self.stops[current_index + 1]\n else:\n if self.current_stop == \"Alewife\":\n self.current_stop = \"Davis\"\n self.direction = \"south\"\n else:\n self.current_stop = self.stops[current_index - 1]", "def set_time_step(self, time_step):\n\n self._time_step = time_step", "def advanceTime(self, amount):\n if self.blocked:\n assert self.workTime == 0\n self.timeWaiting += amount\n else:\n assert self.workTime - amount >= -FLOAT_ERR\n self.workTime = max(self.workTime - amount, 0)\n if self.workTime == 0:\n printHandler(\"I\", self.name, \"finishes a - \", self.currentComponent.name)\n \n if self.workTime == 0:\n oldComponent = self.currentComponent\n workstationUsed = self.placeComponentInBuffer()\n if workstationUsed:\n printHandler(\"I\", self.name, \"places a\", oldComponent.name, 'in', workstationUsed.name)\n self.blocked = False\n self.workOnNextComponent()\n else:\n self.blocked = True", "def advance(self):\n self.ignore_next = True", "def step(self):\n\n e = self.event_queue.get()\n self.current_time = e.time\n component = e.component\n component.output(self.current_time)\n component.input(self.current_time)\n component.fire()\n\n self.event_queue.put(VirtualTimeScheduler.Event(self.current_time + component.interval, component))\n\n return self.current_time", "def step(self):\n\n self.timestep += 1\n self.historyLayer.step()", "def tstep_t(self, step):\n it = int(float(step))\n self.set_tstep(it)\n self.update(it, isframe=True)", "def tick(self):\n\n if self.second == 59:\n self.second = 0\n if self.minute == 59:\n self.minute = 0\n if self.hour == 23:\n self.hour = 0\n self.advance()\n else:\n self.hour += 1\n else:\n self.minute += 1\n else:\n self.second += 1", "def step(self):\n self.state[self.curr_iter] =\\\n self.iter_func(self.state[self.curr_iter - 1])\n self.curr_iter += 1", "def calculate_next_state(self):\n self.current_step = self.current_step + 1\n self.current_state = self.game.next_state(current_state=self.current_state, actions=self.next_action)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
r"""Reset `time_step` to initial state. Sets `time_step` to 0.
def reset_time_step(self): self.__time_step = 0
[ "def reset_step_count(self):\n self.step_count = 0", "def reset(self):\n\n self.timestep = 0\n self.historyLayer.reset()", "def reset_timing(self):\n\n self.timing_start = self.current_time()", "def reset(self) -> None:\n self.time_counted = 0\n self.last_start_time = 0\n self.is_running = False", "def initialize_time(self):\n self._cur_time = 0\n self._model_timestep = self.sim.model.opt.timestep\n self._control_timestep = 1.0 / self._control_freq", "def OnResetTime(self, event):\n self.modelview.timezero(0)", "def zero_time(self):\n self.time = self.time - self.time.min()", "def set_time_step(self, time_step):\n\n self._time_step = time_step", "def reset_time(self):\n\n self._alive_time = 0 # No need to signal the change, since the view is updated by the value toggle", "def _reset(self):\n self.start_time = None\n self.backoff_time = None", "def reset_lineage(self):\n self.lineage.step.execution_time_seconds = None\n self.lineage.step.start_time = datetime.datetime.now()\n self.lineage.in_progress = True", "def resetStartTime(self):\n self.__startTime = time.time()", "def reset_duration(self):\n self.__duration = 0", "def reset(self):\n self.timer -= self.period", "def reset(self):\n self.formatted_time.set(\"00:00:00\")", "def reset(self) -> None:\n self._attempts = 0\n self._cur_delay = self.delay\n self._cur_stoptime = None", "def reset_time(self, widget, data=None):\n\t\tself.elapsed_time = datetime.timedelta()\n\t\tself.time_counter.set_text(str(self.elapsed_time))\n\t\treturn", "def reset_time_trace(self):\n self.__time_trace_data = []\n self.time_trace_level = -2", "def reset(self):\n with self.lock:\n self.metric = None\n self.step = -1\n self.stop = False\n self.trial_id = None\n self.fd.flush()\n self.trial_fd.close()\n self.trial_fd = None\n self.trial_log_file = None", "def reset_datetime(self):\n self.set_datetime(self._initial_time)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }