query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| negatives
sequencelengths 19
20
| metadata
dict |
---|---|---|---|
Sets the errors of this MigrateListingResponse. | def errors(self, errors):
self._errors = errors | [
"def errors(self, errors):\n \n self._errors = errors",
"def errors(self, errors):\n self._errors = errors",
"def validation_errors(self, validation_errors):\n\n self._validation_errors = validation_errors",
"def validation_errors(self, validation_errors):\n self._validation_errors = validation_errors",
"def errors(self) -> Sequence['outputs.BatchAIErrorResponse']:\n return pulumi.get(self, \"errors\")",
"def getErrorsList(self):\n return self.__errors",
"def import_errors(self, import_errors):\n\n self._import_errors = import_errors",
"def add_errors_to_response(self):\n\n if (self.eventlist.count_errors() == 0):\n return\n rstr = str(self.eventlist)\n if (self.responsedata == None):\n self.responsedata = rstr\n else:\n self.responsedata.append(rstr)",
"def get_template_errors(self):\n if not hasattr(self, u'_errors'):\n self._errors = MutliErrorDict()\n return self._errors",
"def errors(self):\n return tuple(self._errors)",
"def validation_errors(self):\n return self._validation_errors",
"def check_set_errors(self):\n raise NotImplementedError(\"Implement it in a subclass.\")",
"def SetErrorAtIndex(self, index):\n self.rows[index].error = True",
"def mark_error(self):\r\n self.status = ERROR",
"def import_errors(self):\n return self._import_errors",
"def artifact_import_errors(self, artifact_import_errors):\n\n self._artifact_import_errors = artifact_import_errors",
"def errors(self):\n self.addedPrograms()\n self.addedLibraries()\n self.addedVariables()\n self.modifiedVariables()\n return self._errors",
"def mark_current_record_as_error(self, list_error: list[InvalidCellError]) -> None:\n raise NotImplementedError",
"def __set_errors_json(self, error_count_by_operation, errors_by_operation):\n message = \"{0} error/s reported.\".format(error_count_by_operation)\n log_file_path = self.logger.file_logger.log_file_path\n message += \" The latest {0} error/s are shared in detail. To view all errors, review this log file on the machine: {1}\".format(len(errors_by_operation), log_file_path) if error_count_by_operation > 0 else \"\"\n return {\n \"code\": Constants.PatchOperationTopLevelErrorCode.SUCCESS if error_count_by_operation == 0 else Constants.PatchOperationTopLevelErrorCode.ERROR,\n \"details\": errors_by_operation,\n \"message\": message\n }"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the inventory_item_group_key of this MigrateListingResponse. | def inventory_item_group_key(self, inventory_item_group_key):
self._inventory_item_group_key = inventory_item_group_key | [
"def item_group_id(self, item_group_id):\n\n self._item_group_id = item_group_id",
"def group_id(self, group_id):\n \n self._group_id = group_id",
"def group_id(self, group_id):\n\n self._group_id = group_id",
"def inventory_id(self, inventory_id):\n\n self._inventory_id = inventory_id",
"def group_id(self, group_id):\n self._group_id = group_id",
"def item_group_href(self, item_group_href):\n\n self._item_group_href = item_group_href",
"def group_identifier(self, group_identifier):\n\n self._group_identifier = group_identifier",
"def group(self, group):\n \n self._group = group",
"def set_group(self, group):\n self._group = group",
"def group(self, group):\n\n self._group = group",
"def item_group_type(self, item_group_type):\n\n self._item_group_type = item_group_type",
"def group(self, group):\n self._group = group",
"def group_name(self, group_name):\n\n self._group_name = group_name",
"def ipgroup(self, ipgroup):\n self._ipgroup = ipgroup",
"def modify(self, key: int, data: Dict[str, Any]) -> APIResponse:\n return self._put(\"detail\", {\"group_pk\": key}, data)",
"def im_group_ids(self, im_group_ids):\n\n self._im_group_ids = im_group_ids",
"def group_name(self, group_name):\n self._group_name = group_name",
"def group_ids(self, group_ids):\n\n self._group_ids = group_ids",
"def shift_group(self, shift_group):\n\n self._shift_group = shift_group"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the inventory_items of this MigrateListingResponse. | def inventory_items(self, inventory_items):
self._inventory_items = inventory_items | [
"def inventory(self, inventory):\n\n self._inventory = inventory",
"def put_inventory(self, InstanceId: str, Items: List) -> Dict:\n pass",
"def setInventory(self, inv):\n self.inventory = inv\n\n # Make the slotsize accessible in the middle and foreground methods\n self.slotSize = 90\n # Loop the rows and columns and create an empty inventory grid\n for y in range(4):\n for x in range(4):\n self.itemSlots[y * 4 + x].setItem(self.inventory.getItem(y * 4 + x))\n\n for i, itemstack in enumerate(self.inventory.getEquipped()):\n self.itemSlots[16 + i].setItem(itemstack)",
"def inventory_id(self, inventory_id):\n\n self._inventory_id = inventory_id",
"def items(self, items: List[InlineResponse200Items]):\n if items is None:\n raise ValueError(\"Invalid value for `items`, must not be `None`\") # noqa: E501\n\n self._items = items",
"def prepare_inventory(self):\n pass",
"def add_inventory(self, current_inventory):\n for item in self.inventory:\n current_inventory.append(item)\n # remove supplies from the tile\n self.inventory = []",
"def clean_up_inventory(self):\n self.inventory = [i for i in self.inventory if i.quantity != 0]",
"def inventory(self):\n return self._inventory",
"def inventory_id(self, inventory_id):\n if inventory_id is None:\n raise ValueError(\"Invalid value for `inventory_id`, must not be `None`\") # noqa: E501\n\n self._inventory_id = inventory_id",
"def inventory_level(self, inventory_level):\n\n self._inventory_level = inventory_level",
"def populate_initial_inventory(self):\r\n\r\n weapons_file = open('initial-inventory.json', \"r\")\r\n json_data = json.loads(weapons_file.read())\r\n weapons_file.close()\r\n\r\n weapons = json_data['weapons']\r\n for weapon in weapons:\r\n requests.post(\"http://\" + self.ip_address + \":3000/Weapons\", data=weapon)",
"def profile_inventory(self, profile_inventory):\n\n self._profile_inventory = profile_inventory",
"def set_ingredients(self, new_ingredients):\n self.ingredients = new_ingredients",
"def updated_items(self, updated_items):\n\n self._updated_items = updated_items",
"def setitems(self, items):\n self.clear()\n # FIXME: this allows you to pass in an OrderedDict as well :-)\n self.update(items)",
"def inventories(self, inventories):\n self._inventories = inventories",
"def setitems(self, items):\r\n self.clear()\r\n # FIXME: this allows you to pass in an OrderedDict as well :-)\r\n self.update(items)",
"def update_inventory(self, inventory, generation):\n self._update_generation(generation, 'update_inventory')\n if self.has_inventory_changed(inventory):\n LOG.debug('Updating inventory in ProviderTree for provider %s '\n 'with inventory: %s', self.uuid, inventory)\n self.inventory = copy.deepcopy(inventory)\n return True\n LOG.debug('Inventory has not changed in ProviderTree for provider: %s',\n self.uuid)\n return False"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the listing_id of this MigrateListingResponse. | def listing_id(self, listing_id):
self._listing_id = listing_id | [
"def listing_id(self, listing_id):\n self._listing_id = listing_id",
"def listing_id(self):\n return self._listing_id",
"def delete_listing(request, listing_id):\n listing = get_object_or_404(Listing, pk=listing_id)\n\n listing.delete()\n messages.success(\n request,\n 'Your listing has been removed from the database.')\n\n return redirect(reverse('addlisting'))",
"def listing_visibility(self, listing_visibility):\n\n self._listing_visibility = listing_visibility",
"def listing_details(listing_id, lang=None, context=None):\n settings = get_settings(context=context)\n base_url = settings.get('mls_site', None)\n api_key = settings.get('mls_key', None)\n debug = api.env.debug_mode\n config = get_configs(context=context, merged=True)\n params = {}\n if config.get('show_unverified', False):\n params['apiowner'] = settings.get('agency_id')\n params['show_unverified'] = True\n if config.get('show_unverified_only', False):\n params['show_unverified_only'] = True\n resource = ListingResource(base_url, api_key=api_key, debug=debug)\n try:\n listing = resource.get(listing_id, lang=lang, params=params)\n except MLSError as e:\n logger.warn(e)\n return None\n listing = listing.get('listing', None)\n if listing is not None:\n agent = copy.deepcopy(listing.get('contact', {}).get('agent'))\n listing['original_agent'] = agent\n return listing",
"def update(self, amz_listing):\n amz_listing.sku = self.asin\n amz_listing.title = self.title\n amz_listing.brand = self.brand\n amz_listing.model = self.model\n amz_listing.upc = self.upc\n amz_listing.quantity = self.quantity\n amz_listing.url = self.url\n amz_listing.salesrank = self.salesrank\n amz_listing.offers = self.offers\n amz_listing.hasprime = self.prime\n\n # Only update price if price information is provided\n if self._tag.xpath('.//Offers'):\n amz_listing.price = self.price",
"def view_and_edit_listing(request, listing_id):\n categories = Category.objects.all()\n listing = get_object_or_404(Listing, pk=listing_id)\n\n if request.method == 'POST':\n editform = AddListingForm(\n request.POST,\n request.FILES,\n instance=listing)\n if editform.is_valid():\n listing.save()\n messages.success(\n request,\n 'Thank you. Your listing has been updated')\n return redirect(reverse('addlisting'))\n else:\n editform = AddListingForm(instance=listing)\n\n context = {\n 'editform': editform,\n 'listing': listing,\n 'categories': categories\n }\n return render(request, 'editlisting.html', context)",
"def listing_show(listing_id):\n\n listing = Listing.query.get_or_404(listing_id)\n return (jsonify(listing=listing.serialize(isDetailed=True)), 200)",
"def vault_id(self, vault_id):\n self._vault_id = vault_id",
"def set_shopping_list(self, shopping_list_id):\n self._shopping_list = shopping_list_id",
"def direction_id(self, direction_id):\n\n self._direction_id = direction_id",
"def list_id(self, list_id):\n if (self.local_vars_configuration.client_side_validation and\n list_id is not None and list_id < 1): # noqa: E501\n raise ValueError(\"Invalid value for `list_id`, must be a value greater than or equal to `1`\") # noqa: E501\n\n self._list_id = list_id",
"def extract_listing_data(listing):\n\n # get the data dict from the listing\n listing_data = listing.data\n # get the SystemNames for all of this listing's data tuples\n listing_data_keys = list(listing_data.keys())\n # instantiate a Listing object with the RETS PK as the system_id value\n listing_object = models.Listing(system_id=listing.resource_key)\n # get all the fields from the Listing model\n listing_object_fields = models.Listing._meta.get_fields()\n # iterate over each Listing model field\n for field in listing_object_fields:\n # Skip relation fields\n if not field.is_relation:\n try:\n # assign the field's db_column to a variable\n field_db_column = field.db_column\n except AttributeError:\n # if no db_column, skip it\n continue\n if field_db_column in listing_data_keys:\n # if we find the db_column in the SystemNames list, update the new\n # Listing object accordingly\n if listing_data[field_db_column]:\n if type(listing_data[field_db_column]) in [dt.datetime,\n dt.date]:\n attr = listing_data[field_db_column].replace(\n tzinfo=pytz.UTC)\n setattr(listing_object, field.name, attr)\n else:\n setattr(listing_object, field.name,\n normalize_none(listing_data[field_db_column]))\n return listing_object",
"def set_lernort_id(self, value):\n self._lernort_id = value",
"def sportsbook_id(self, sportsbook_id):\n\n self._sportsbook_id = sportsbook_id",
"def deleteListing(id):\n try:\n # Call delete_one() on listings collection\n db.listings.delete_one({\"_id\": id})\n return redirect(url_for(\"main.landingPage\"))\n except (ValueError):\n # Return custom 500 error page, set status code to 500\n return render_template(\"500.html\"), 500",
"def setID(self, idf):\n self.id = idf",
"def set_lernart_id(self, value):\n self._lernart_id = value",
"def listing_create():\n listing_data = request.json.get(\"listing\")\n form = ListingCreateForm(data=listing_data)\n\n if form.validate():\n listing = Listing.create(form)\n db.session.commit()\n # TODO: reevaluate error with a try and except later\n return (jsonify(listing=listing.serialize(isDetailed=True)), 201)\n else:\n errors = []\n for field in form:\n for error in field.errors:\n errors.append(error)\n return (jsonify(errors=errors), 400)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the marketplace_id of this MigrateListingResponse. | def marketplace_id(self, marketplace_id):
self._marketplace_id = marketplace_id | [
"def set_AWSMarketplaceId(self, value):\n super(ListOrdersInputSet, self)._set_input('AWSMarketplaceId', value)",
"def set_AWSMarketplaceId(self, value):\n super(GetReportRequestListInputSet, self)._set_input('AWSMarketplaceId', value)",
"def registration_marketplace_id(self, registration_marketplace_id):\n\n self._registration_marketplace_id = registration_marketplace_id",
"def marketplace_id(self) -> Any:\n return pulumi.get(self, \"marketplace_id\")",
"def listing_id(self, listing_id):\n self._listing_id = listing_id",
"def listing_id(self, listing_id):\n\n self._listing_id = listing_id",
"def place_id(self):\n return self._place_id",
"def place_id(self):\n return self.place['id']",
"def trade_id(self, trade_id):\n\n self._trade_id = trade_id",
"def feed_id(self, feed_id):\n\n self._feed_id = feed_id",
"def sportsbook_id(self, sportsbook_id):\n\n self._sportsbook_id = sportsbook_id",
"def checkout_id(self, checkout_id):\n\n self._checkout_id = checkout_id",
"def sales_point_id(self, sales_point_id):\n\n self._sales_point_id = sales_point_id",
"def set_id(self, car_id):\n self.id = car_id",
"def brand_id(self, brand_id):\n\n self._brand_id = brand_id",
"def market_id(self) -> int:\n return self.order.market_id",
"def get_marketplace(self, marketplace_id):\n return MarketplaceResource(self._config).get(marketplace_id)",
"def team_id(self, team_id):\n\n self._team_id = team_id",
"def branding_theme_id(self, branding_theme_id):\n\n self._branding_theme_id = branding_theme_id"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the status_code of this MigrateListingResponse. | def status_code(self, status_code):
self._status_code = status_code | [
"def status_code(self, status_code):\n self._status_code = status_code",
"def set_status_code(self, status_code):\n if (status_code is not None) and (type(status_code) is int):\n self.status_code = status_code\n else:\n self.status_code = 500",
"def set_status(self, code):\n self.status = code\n self.status_message = messages.Response.REASON.get(code, \"Unknown\")",
"def set_status(self, status):\n self.response_dict(status=status)\n self.response.set_status(code=status)",
"def http_status_code(self, http_status_code):\n\n self._http_status_code = http_status_code",
"def status_code(self) -> int:\n return self._response.status_code",
"def status429(self, status429):\n \n self._status429 = status429",
"def status(self, status):\n\n self._status = status",
"def status200(self, status200):\n \n self._status200 = status200",
"def status(self, status):\n self._status = status",
"def set_status(self, status: HTTPProxyStatus) -> None:\n self._status = status\n self.update_actor_details(status=self._status)",
"def set_status( code ):",
"def statusChanged(self, statusCode):\n self.factory.status = statusCode",
"def status(self, status):\n self.__status = status",
"def status_code(self) -> int:\n return self._error.response.status_code",
"def status(self, value):\r\n if isinstance(value, (int, long)):\r\n if 100 <= value <= 999:\r\n st = _RESPONSE_STATUSES.get(value, '')\r\n if st:\r\n self._status = '%d %s' % (value, st)\r\n else:\r\n self._status = str(value)\r\n else:\r\n raise ValueError('Bad response code: %d' % value)\r\n elif isinstance(value, basestring):\r\n if isinstance(value, unicode):\r\n value = value.encode('utf-8')\r\n if _RE_RESPONSE_STATUS.match(value):\r\n self._status = value\r\n else:\r\n raise ValueError('Bad response code: %s' % value)\r\n else:\r\n raise TypeError('Bad type of response code.')",
"def status(self, value):\n if isinstance(value, (long, int)):\n if 100 <= value <= 900:\n status = _RESPONSE_STATUSES.get(value, '')\n if status:\n self._status = '%d %s' % (value, status)\n else:\n self._status = str(value)\n else:\n raise ValueError('Bad response code: %d' % value)\n elif isinstance(value, basestring):\n if isinstance(value, unicode):\n value = value.encode('utf-8')\n if _RE_RESPONSE_STATUS.match(value):\n self._status = value\n else:\n raise ValueError('Bad response code: %d' % value)\n else:\n raise TypeError('Bad type of response code.')",
"def set_custom_status(self, status: Any):\n self._custom_status = status",
"def get_status_code(self, response):\n if hasattr(response, 'status_int'):\n return response.status_int\n return response.status"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the warnings of this MigrateListingResponse. | def warnings(self, warnings):
self._warnings = warnings | [
"def warnings(self, warnings):\n if warnings is None:\n raise ValueError(\"Invalid value for `warnings`, must not be `None`\") # noqa: E501\n\n self._warnings = warnings",
"def flag_warnings(self):\n if not self._warning_status:\n self._warning_status = True",
"def warning(self, warning):\n\n self._warning = warning",
"def finalise_warnings(self):\n # Set the visibility of the warning header\n if not self._warning_status:\n self._warnings.add_css_classes(\"hide\")\n # Add an non-visible section that the publisher can\n # read to determine if there were problems\n s = self.add_section(name=\"status\",css_classes=(\"hide\",))\n s.add(\"Status: %s\" % ('OK' if not self._warning_status\n else 'WARNINGS',))",
"def allow_warnings(self, allow_warnings):\n self._allow_warnings = allow_warnings",
"def allow_warnings(self, allow_warnings):\n\n self._allow_warnings = allow_warnings",
"def _log_warnings_postprocess(response):\n # NOTE: .initial_metadata() will block.\n for key, value in response.initial_metadata():\n if key == \"x-wf-warnings\":\n warnings.warn(value)\n\n return response",
"def init_warnings(self):\n self._warning_status = False\n self._warnings = self.add_section()\n self._warnings.add(\n self.warning(\"There are issues with one or more \"\n \"lanes or samples\",size=50))",
"def warnings(self) -> List[Error]:",
"def warning_count(self, warning_count):\n\n self._warning_count = warning_count",
"def warning(self, warning):\n pass",
"def _make_SMART_warnings(self):\n if smartctl_type[self.interface] == 'scsi':\n return\n for attr in self.attributes:\n if attr is not None:\n if attr.when_failed == 'In_the_past':\n self.messages.append(\"\".join(\n [attr.name, \" failed in the past with value \",\n attr.worst, \". [Threshold: \", attr.thresh, ']']))\n if not self.assessment == 'FAIL':\n self.assessment = 'WARN'\n elif attr.when_failed == 'FAILING_NOW':\n self.assessment = 'FAIL'\n self.messages.append(\"\".join(\n [attr.name, \" is failing now with value \",\n attr.value, \". [Threshold: \", attr.thresh, ']']))\n elif not attr.when_failed == '-':\n self.messages.append(\"\".join(\n [attr.name, \" says it failed '\", attr.when_failed,\n \"'. [V=\", attr.value, \",W=\", attr.worst, \",T=\",\n attr.thresh, ']']))\n if not self.assessment == 'FAIL':\n self.assessment = 'WARN'",
"def warnings(self):\n\n return [item for item in self._content_parser.entries if item.kind == 'WARNING']",
"def _warn(self, warning=None):\r\n debug.err('Warning: %s' % warning)\r\n\r\n if core.FW_conf['settings'].TestRun.ExecutionMode == 'Leader' and warning != None:\r\n executeInFollower(\"self.warn('%s')\" % (warning,))\r\n\r\n if type(warning) != types.ListType:\r\n warning = [warning]\r\n\r\n self.result.addStepWarning(warning)",
"def warning(self, msg, transfers):\n self.validation_exceptions.extend(self._create_exceptions(msg, transfers, ValidationType.WARNING))",
"def warning_spoilers(self, warning_spoilers):\n\n self._warning_spoilers = warning_spoilers",
"def set_warning(cls):\n cls.lvl = logging.WARNING\n cls.log.setLevel(cls.lvl)",
"def warn(self, warning=None):\r\n\r\n if self.getName() != 'Main':\r\n warning = self.getName() + ': ' + warning\r\n\r\n debug.err('Warning: %s' % warning)\r\n if type(warning) != types.ListType:\r\n warning = [warning]\r\n\r\n if self.result:\r\n self.result.addStepWarning(warning)",
"def allow_warnings(self):\n return self._allow_warnings"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return 'WHERE' clause that implements kwds_filter constraints. | def _build_where_clause(**kwds_filter):
clause = []
params = []
items = kwds_filter.items()
items = sorted(items, key=lambda x: x[0]) # Ordered by key.
for key, val in items:
if _is_nsiterable(val):
clause.append(key + ' IN (%s)' % (', '.join('?' * len(val))))
for x in val:
params.append(x)
else:
clause.append(key + '=?')
params.append(val)
clause = ' AND '.join(clause) if clause else ''
return clause, params | [
"def create_where():\n conjunction = Forward().setResultsName(\"conjunction\")\n nested = Group(Suppress(\"(\") + conjunction + Suppress(\")\")).setResultsName(\n \"conjunction\"\n )\n\n maybe_nested = nested | constraint\n inverted = Group(not_ + maybe_nested).setResultsName(\"not\")\n full_constraint = maybe_nested | inverted\n conjunction <<= full_constraint + OneOrMore(and_or + full_constraint)\n return upkey(\"where\") + Group(conjunction | full_constraint).setResultsName(\"where\")",
"def _build_where_clause(**kwds_filter):\n clause = []\n params = []\n items = kwds_filter.items()\n items = sorted(items, key=lambda x: x[0]) # Ordered by key.\n for key, val in items:\n if nonstringiter(val):\n clause.append(key + ' IN (%s)' % (', '.join('?' * len(val))))\n for x in val:\n params.append(x)\n else:\n clause.append(key + '=?')\n params.append(val)\n\n clause = ' AND '.join(clause) if clause else ''\n return clause, params",
"def where_clause(self, kwargs):\n where_clause = super(AnalyticalTools, self).where_clause(kwargs)\n if '_id' in kwargs:\n where_clause &= Expression(AnalyticalTools.id, OP.EQ, kwargs['_id'])\n if 'name' in kwargs:\n name_oper = OP.EQ\n if 'name_operator' in kwargs:\n name_oper = getattr(OP, kwargs['name_operator'])\n where_clause &= Expression(AnalyticalTools.name, name_oper, kwargs['name'])\n if 'encoding' in kwargs:\n where_clause &= Expression(AnalyticalTools.encoding, OP.EQ, kwargs['encoding'])\n return where_clause",
"def get_where_clause(self, params: Dict) -> str:\n return ''",
"def get_where_clause(self, feature, params=()):\n where_clause = []\n for pk in self.pk_cols:\n params += (feature[pk],)\n where_clause.append(pk + \" = (?)\")\n\n where_clause = \" WHERE \" + \" AND \".join(where_clause)\n return where_clause, params",
"def where_clause(self):\n where_clause = \"\\nWHERE \"\n if not self.index_column:\n # If no index_column, there is no where clause. The whole\n # source table is dumped.\n return \"\"\n\n if self.new_index_value is None:\n # Either the table is empty or the index_column is all NULL\n return \"\"\n\n if self.lookback_index_value and not self.full_refresh:\n # This should always happen except on the initial load or if a\n # full_refresh is explicitly requested. The lookback_index_value should\n # be None in the full_refresh case but this adds an extra guard.\n where_clause += \"{} > '{}' AND \".format(\n self.index_column, self.lookback_index_value\n )\n elif self.old_index_value and not self.full_refresh:\n # This should only happen if lookback value doesn't load for some reason.\n\n where_clause += \"{} > '{}' AND \".format(\n self.index_column, self.old_index_value\n )\n\n # Note that we include the new_index_value as an upper bound even in\n # the initial load and full_refresh cases. This ensures that the value\n # recorded in the index table correctly aligns with the data pulled.\n where_clause += \"{} <= '{}'\".format(self.index_column, self.new_index_value)\n return where_clause",
"def where(self, where_expr, *args, **kwargs):\n if where_expr is not None and args:\n if kwargs.get(\"or_filters\", False):\n self.query[\"where\"] = self.or_(where_expr, *args)\n else:\n self.query[\"where\"] = self.and_(where_expr, *args)\n else:\n self.query[\"where\"] = where_expr if where_expr is not None else \"\"",
"def _make_sql_where_clause(**kwargs):\n logger = logging.getLogger(\"imars_etl.{}\".format(\n __name__,\n )\n )\n result = []\n for key in kwargs:\n val = kwargs[key]\n if key in VALID_FILE_TABLE_COLNAMES and key != \"filepath\":\n result.append('{}=\"{}\"'.format(key, val))\n result = ' AND '.join(result)\n logger.debug(result)\n return result",
"def Where(self, *args):\n self.where_conds = reduce(operator.and_, args)\n return self",
"def _getSQLWhere(self, inputTable, queryMeta):\n\t\tsqlPars = {}\n\t\tinputPars = dict((p.name, p.value) for p in inputTable.iterParams())\n\t\treturn base.joinOperatorExpr(\"AND\",\n\t\t\t[cd.asSQL(inputPars, sqlPars, queryMeta)\n\t\t\t\tfor cd in self.condDescs]), sqlPars",
"def where(self, cond):\n return self.filter(lambda x: _(x).contains(cond))",
"def params_where(params, where):\n if where:\n params['where'] = ' AND '.join(where)",
"def where(condition):\r\n return ('', []) if condition.clause == '' else (f'WHERE {condition.clause}', list(condition.params))",
"def where(self, **kwargs):\n\n condition = []\n for k, v in kwargs.items():\n index = k.rfind('_')\n field = k[:index]\n opt = opt_map[k[index + 1:]]\n v = add_quotes(v)\n if opt == 'IN':\n if not isinstance(v, tuple):\n raise RuntimeError(\"Opt(IN) value must be tuple!\")\n v = \"({})\".format(\", \".join([add_quotes(x) for x in v]))\n elif opt == 'BETWEEN':\n if not isinstance(v, tuple):\n raise RuntimeError(\"Opt(BETWEEN) value must be tuple!\")\n v = \"{} AND {}\".format(add_quotes(v[0]), add_quotes(v[-1]))\n condition.append(\"{} {} {}\".format(field, opt, str(v)))\n self._condition = ' and '.join(condition)\n return self",
"def to_sql_where_constraint(data: dict, like_columns: list) -> str:\n if len(data) == 0:\n return \"\"\n where = \" WHERE {}\"\n filters = []\n for k, v in data.items():\n if k in like_columns:\n filters.append(\"{0}::text ILIKE %({0})s\".format(k))\n data[k] = '%{}%'.format(v)\n else:\n filters.append(\"{0} = %({0})s\".format(k))\n return where.format(\" AND \".join(filters))",
"def get_where_expr(self):\n return self._where_expr",
"def where(self, table, what='*', order=None, group=None, limit=None, \r\n offset=None, _test=False, **kwargs):\r\n where = []\r\n for k, v in kwargs.iteritems():\r\n where.append(k + ' = ' + sqlquote(v))\r\n return self.select(table, what=what, order=order, \r\n group=group, limit=limit, offset=offset, _test=_test, \r\n where=SQLQuery.join(where, ' AND '))",
"def _extract_where(self, query) :\n\t\tquery = copy.copy(query)\n\t\t\n\t\t# discard the insert information\n\t\tif self.n.sparql.insert in query :\n\t\t\tdel query[self.n.sparql.insert]\n\t\t\n\t\t# discard the delete information\n\t\tif self.n.sparql.delete in query :\n\t\t\tdel query[self.n.sparql.delete]\n\t\t\n\t\t# build the where clause with outlined variables\n\t\treturn self.python_to_SPARQL_long(query)",
"def filter_conditions():\r\n # Not a one-liner. You're welcome. :-)\r\n self._validate_filters(filters)\r\n joins, wheres, join_params, where_params = [], [], [], []\r\n for n, (k, v) in enumerate(filters.iteritems()):\r\n joins.append(\r\n 'LEFT JOIN tidings_watchfilter f{n} '\r\n 'ON f{n}.watch_id=w.id '\r\n 'AND f{n}.name=%s'.format(n=n))\r\n join_params.append(k)\r\n wheres.append('(f{n}.value=%s '\r\n 'OR f{n}.value IS NULL)'.format(n=n))\r\n where_params.append(hash_to_unsigned(v))\r\n return joins, wheres, join_params + where_params"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Normalize value for use as SQLite column name. | def _normalize_column(column):
if not isinstance(column, str):
msg = "expected column of type 'str', got {0!r} instead"
raise TypeError(msg.format(column.__class__.__name__))
column = column.strip()
column = column.replace('"', '""') # Escape quotes.
if column == '':
column = '_empty_'
return '"' + column + '"' | [
"def normalize_tablename(x):\n return normalize_name_generic(x, remove_leading_nums=True)",
"def _normalize_name(name):\n if name:\n return name",
"def __normalize_name(self):\n self.normalized_name = normalizeSimplified(self.name)",
"def normalize(value):\n if value:\n return value.replace('.', '').strip(',:/; ')",
"def sanitize_column_name(name):\n answer = name\n for i, j in ((\" \", \"_\"), (\"-\", \"_\"), (\"/\", \"_\"), (\"&\", \"and\")):\n answer = answer.replace(i, j)\n return \"\".join(\n [\n char\n for char in answer\n if char.isalpha() or char.isdigit() or char == \"_\"\n ]\n )",
"def normalize(self, value):\n raise NotImplementedError",
"def clean_column_name(self, column_name):\n column_name = column_name.lower().strip().replace(\"\\n\", \"\")\n replace_columns = {old.lower(): new.lower()\n for old, new in self.replace_columns}\n column_name = str(replace_columns.get(column_name, column_name).strip())\n replace = [\n (\"%\", \"percent\"),\n (\"&\", \"and\"),\n (\"\\xb0\", \"degrees\"),\n (\"^\", \"_power_\"),\n (\"<\", \"_lthn_\"),\n (\">\", \"_gthn_\"),\n ]\n replace += [(x, '') for x in (\")\", \"?\", \"#\", \";\" \"\\n\", \"\\r\", '\"', \"'\")]\n replace += [(x, '_') for x in (\" \", \"(\", \"/\", \".\", \"+\", \"-\", \"*\", \":\", \"[\", \"]\")]\n\n column_name = reduce(lambda x, y: x.replace(*y), replace, column_name)\n\n while \"__\" in column_name:\n column_name = column_name.replace(\"__\", \"_\")\n column_name = column_name.lstrip(\"0123456789_\").rstrip(\"_\")\n replace_dict = {\n \"group\": \"grp\",\n \"order\": \"ordered\",\n \"check\": \"checked\",\n \"references\": \"refs\",\n \"long\": \"lon\",\n \"column\": \"columns\",\n \"cursor\": \"cursors\",\n \"delete\": \"deleted\",\n \"insert\": \"inserted\",\n \"join\": \"joins\",\n \"select\": \"selects\",\n \"table\": \"tables\",\n \"update\": \"updates\",\n \"date\": \"record_date\",\n \"index\": \"indices\"\n }\n for x in (\")\", \"\\n\", \"\\r\", '\"', \"'\"):\n replace_dict[x] = ''\n for x in (\" \", \"(\", \"/\", \".\", \"-\"):\n replace_dict[x] = '_'\n if column_name in replace_dict:\n column_name = replace_dict[column_name]\n return column_name",
"def _normalize_name(self, name: str) -> str:\n return name.replace(\"@\", \"\").replace(\"/\", \"-\").replace(\"_\", \"-\")",
"def safe_column_name(string):\n string = unidecode(string.replace(' ', '_').lower())\n return re.sub(r'[^0-9a-z_]','', string)",
"def normalize_header(name, existing=[]):\n name = re.sub('\\W+', '', name, flags=re.UNICODE).lower()\n # TODO handle multiple columns with the same name.\n return name",
"def _validate_column_name(col_name : str) -> str:\n\n if col_name[0].isdigit():\n return f'\"{col_name}\"'\n return col_name",
"def _valid_column(column_name):\n return str(column_name)",
"def standardize_column_names(self, df):\n df.columns = [c.replace(\" \",\"_\").lower() for c in df.columns]\n return df",
"def normalize(value):\n if value is None:\n return None\n if isinstance(value, str):\n value = value.strip()\n if value == '':\n return None\n return value",
"def db_to_ascii(field):\n field = field.replace('_','-')\n return field",
"def _sanitize_field_name(self, field_name):\n field_name = field_name.replace(self._field_prefix, '')\n return field_name.replace('.', '_')",
"def _normalize(self, metric_name, submit_method, prefix):\n metric_prefix = \"mongodb.\" if not prefix else \"mongodb.{0}.\".format(prefix)\n metric_suffix = \"ps\" if submit_method == RATE else \"\"\n\n # Replace case-sensitive metric name characters\n for pattern, repl in self.CASE_SENSITIVE_METRIC_NAME_SUFFIXES.iteritems():\n metric_name = re.compile(pattern).sub(repl, metric_name)\n\n # Normalize, and wrap\n return u\"{metric_prefix}{normalized_metric_name}{metric_suffix}\".format(\n normalized_metric_name=self.normalize(metric_name.lower()),\n metric_prefix=metric_prefix, metric_suffix=metric_suffix\n )",
"def clean_column(self, colname):\n assert is_string_dtype(self.df[colname]), 'Column is not string type. Please assert ' \\\n 'that the given column is string type'\n\n new_df = self.df\n\n new_df[colname] = new_df[colname].apply(lambda x: x.strip().lower())\n new_df[colname] = new_df[colname].replace('', 'unassociated')\n logging.info('Column cleaned successfully.')\n\n return Fermi_Dataset(new_df)",
"def clean_stata_varname(column_name: str) -> str:\n cleaned = varname_strip(column_name)\n letter_start = varname_valid_start(cleaned)\n truncated = varname_truncate(letter_start)\n return truncated"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Alternate constructor to load an existing collection of records into a tempoarary SQLite database. Loads data (an iterable of lists, tuples, or dicts) into a temporary table | def from_records(cls, data, columns=None):
temptable = TemporarySqliteTable(data, columns)
return cls(temptable.connection, temptable.name) | [
"def from_records(cls, data, columns=None):\n connection, table = _load_temp_sqlite_table(columns, data)\n return cls(connection, table)",
"def load_data(cursor, table, *args, **kwds):\n try:\n records, = args\n columns = None\n except ValueError:\n columns, records = args\n\n default = kwds.pop('default', '')\n if kwds:\n msg = 'load_data() got unexpected keyword argument {0!r}'\n raise TypeError(msg.format(next(iter(kwds.keys()))))\n\n records = iter(records)\n first_record = next(records, None)\n if columns:\n if first_record:\n records = chain([first_record], records)\n else:\n if not first_record:\n return # <- EXIT! (No table created.)\n try: # Try mapping.\n columns = list(first_record.keys())\n records = chain([first_record], records)\n except AttributeError:\n try: # Try namedtuple.\n columns = first_record._fields\n records = chain([first_record], records)\n except AttributeError:\n columns = first_record # Use first row as column names.\n\n if not isinstance(columns, Iterable) or isinstance(columns, str):\n msg = 'expected iterable of strings, got {0!r}'\n raise TypeError(msg.format(columns))\n columns = list(columns) # Make sure columns is a sequence.\n\n if isinstance(first_record, Mapping):\n records = ([rec.get(c, '') for c in columns] for rec in records)\n\n with savepoint(cursor):\n if table_exists(cursor, table):\n alter_table(cursor, table, columns, default=default)\n else:\n create_table(cursor, table, columns, default=default)\n insert_records(cursor, table, columns, records)",
"def load(self):\n cursor = self.connection.cursor()\n for row in self.data:\n sql = self.createInsertStatement(row)\n cursor.execute(sql, row)\n self.connection.commit()\n cursor.close()\n del cursor",
"def do_import(f, db, table, drop=False, create=True, progress=None):\n cur = db.cursor()\n\n if drop:\n create = True # this makes no sense otherwise\n try:\n cur.execute(\"DROP TABLE {0}\".format(table))\n except sqlite3.OperationalError, e:\n pass # no such table, ignore\n pass\n\n csvr = csv.DictReader(f)\n # DictReader will read the list of field names from the first line\n columns = [ \"{0} VARCHAR(1024)\".format(c) for c in csvr.fieldnames ]\n # TODO: Different database drivers use different syntax\n qmarks = [ \"?\" for c in csvr.fieldnames ]\n insert = \"INSERT INTO {0} VALUES ({1})\".format(table, ', '.join(qmarks))\n\n if create:\n query = \"CREATE TABLE {0} ({1})\".format(table, ', '.join(columns))\n cur.execute(query)\n pass\n\n count = 0\n for row in csvr:\n count += 1\n if progress is not None and (count % 10000) == 0:\n progress(count)\n pass\n values = []\n for f in csvr.fieldnames:\n values.append(row[f])\n pass\n cur.execute(insert, values)\n pass\n db.commit()\n if progress is not None:\n progress(count)\n pass",
"def loadToSqlite(self, data):\n conn = sqlite3.connect('myDb.db')\n c = conn.cursor()\n #Delete table if already exists\n c.execute('''DROP TABLE IF EXISTS orders''')\n #Create table\n c.execute('''CREATE TABLE orders (MATERIAL text, COORDER text, DOC_NUMBER text, S_ORD_ITEM text,\n REFER_DOC text, REFER_ITM text)''')\n for tup in data:\n c.execute('INSERT INTO orders VALUES (?,?,?,?,?,?)', tup)\n conn.commit()\n conn.close()",
"def load_records():\n\n with open('seed_data/records.csv', 'rb') as csvfile:\n data = csv.reader(csvfile)\n for row in data:\n record_id, user_id, common_name, date_time, latitude, longitude, notes, seen, num_birds = row\n\n record = Record(record_id=record_id, user_id=user_id, common_name=common_name,\n date_time=date_time, latitude=latitude, longitude=longitude, \n notes=notes, seen=seen, num_birds=num_birds)\n\n db.session.add(record)\n\n db.session.commit()",
"def __init__(self, entries):\n # objects representing database records\n self.entries = entries",
"def load_data(self, objs, *args, **kwds):\n if isinstance(objs, string_types):\n obj_list = glob(objs) # Get shell-style wildcard matches.\n if not obj_list:\n __tracebackhide__ = True\n raise FileNotFoundError('no files matching {0!r}'.format(objs))\n elif not isinstance(objs, list) \\\n or isinstance(objs[0], (list, tuple, dict)): # Not a list or is a\n obj_list = [objs] # reader-like list.\n else:\n obj_list = objs\n\n cursor = self._connection.cursor()\n with savepoint(cursor):\n table = self._table or new_table_name(cursor)\n for obj in obj_list:\n if ((\n isinstance(obj, string_types)\n and obj.lower().endswith('.csv')\n ) or (\n isinstance(obj, file_types)\n and getattr(obj, 'name', '').lower().endswith('.csv')\n )\n ):\n load_csv(cursor, table, obj, *args, **kwds)\n else:\n reader = get_reader(obj, *args, **kwds)\n load_data(cursor, table, reader)\n\n self._append_obj_string(obj)\n\n if not self._table and table_exists(cursor, table):\n self._table = table",
"def csv_to_sql():\n print \"\\nLoad to SQLite database...\"\n db = sqlite3.connect(FILENAME[:-4]+\".db\")\n cur = db.cursor()\n\n # Loads each csv into the specified table\n load_table(cur, sql_schema.nodes, 'nodes.csv')\n load_table(cur, sql_schema.nodes_tags, 'nodes_tags.csv')\n load_table(cur, sql_schema.ways, 'ways.csv')\n load_table(cur, sql_schema.ways_tags, 'ways_tags.csv')\n load_table(cur, sql_schema.ways_nodes, 'ways_nodes.csv')\n\n db.commit() # Commits these changes to the database\n print 'Done!'",
"def _load_fixture(filename):\n\n # Read the binary data into text\n with open(filename, 'rb') as stream:\n content = stream.read().decode('utf-8')\n\n # Decode the data as JSON\n data = json.loads(content)\n\n # Instantiate a session.\n session = Session()\n\n # Iterate through the entries to add them one by one.\n for item in data:\n # Resolve model from the table reference.\n table = Base.metadata.tables[item['model'].split('.')[-1]]\n\n # Add the primary key.\n item['fields']['id'] = item['pk']\n\n # Add a new row.\n session.connection().execute(table.insert().values(**item['fields']))\n\n # Commit the session to the database.\n session.commit()",
"def load_fixtures(self, dbname, table, data):\n db = self.databases[dbname]['db']\n db.execute('BEGIN')\n for row in data:\n columns = row.keys()\n q = db.Insert(table, cols=columns)\n db.execute(q, row)\n db.execute('COMMIT')",
"def setup_sample_data(no_of_records):\n rows_in_database = [{'id': counter, 'name': get_random_string(string.ascii_lowercase, 20), 'dt': '2017-05-03'}\n for counter in range(0, no_of_records)]\n return rows_in_database",
"def connect_db_and_load_data(cls):\n db.connect()\n db.create_tables([Product], safe=True)\n load_data(transform_data('./inventory.csv'))",
"def load_csv(db: Session, file_name: str):\n\n with open(file_name, \"r\") as f:\n csv_reader = csv.DictReader(f)\n\n for row in csv_reader:\n db_record = Songdb(\n track_number=row[\"track_number\"],\n album=row[\"album\"],\n acousticness=row[\"acousticness\"],\n danceability=row[\"danceability\"],\n energy=row[\"energy\"],\n instrumentalness=row[\"instrumentalness\"],\n speechiness=row[\"speechiness\"],\n tempo=row[\"tempo\"],\n popularity=row[\"popularity\"],\n liveness=row[\"liveness\"],\n loudness=row[\"loudness\"],\n valence=row[\"valence\"],\n uri=row[\"uri\"],\n name=row[\"name\"],\n id=row[\"id\"]\n )\n db.add(db_record)\n db.commit()\n return",
"def dbLoad():\n conn = sqlite3.connect(SQLFILE)\n with conn:\n r = shapefile.Reader(ZIPCODES).shapeRecords()\n print(\"shapes to process %s\" % len(r))\n for d in r:\n print(\"shape record length %s\" % len(d.record))\n [print(\"%s %s\" % (x, y)) for x, y in enumerate(d.record)]\n bbox = d.shape.bbox\n points = (\n d.shape.points\n ) # TODO: load into collections.OrderedDict and pickle\n latMax = d.shape.bbox[3]\n latMin = d.shape.bbox[1]\n lonMax = d.shape.bbox[2]\n lonMin = d.shape.bbox[0]\n insert = INSERT % (\n d.record[D.STATEFP],\n d.record[D.CD115FP],\n d.record[D.GEOID],\n d.record[D.NAMELSAD],\n d.record[D.LSAD],\n d.record[D.CDSESSN],\n d.record[D.MTFCC],\n d.record[D.FUNCSTAT],\n d.record[D.ALAND],\n d.record[D.AWATER],\n d.record[D.INTPTLAT],\n d.record[D.INTPTLON],\n bbox,\n points,\n latMax,\n latMin,\n lonMax,\n lonMin,\n )\n conn.execute(insert)",
"def load_expenditures():\n\n Expenditure.query.delete()\n\n with open(expenditure_file) as f:\n for _ in range(1):\n next(f)\n \n for row in f:\n row = row.rstrip()\n expenditure_data = row.split(\",\")\n print(expenditure_data)\n\n id = expenditure_data[0]\n category_id = expenditure_data[1]\n price = expenditure_data[2]\n date_of_expenditure = expenditure_data[3]\n expenditure_userid = expenditure_data[4]\n where_bought = expenditure_data[5]\n description = expenditure_data[6]\n\n expenditure = Expenditure(\n id = id,\n category_id = category_id,\n price = price,\n date_of_expenditure = get_datetime(date_of_expenditure),\n expenditure_userid = expenditure_userid,\n where_bought = where_bought,\n description = description\n )\n\n db.session.add(expenditure)\n\n db.session.commit()",
"def __init__(self):\n # create the db in-memory\n self.con = sqlite3.connect(\":memory:\")\n self.cur = self.con.cursor()\n self.cur.execute(\n \"\"\"CREATE TABLE orders (id integer PRIMARY KEY, isBid integer, price real, qty real)\"\"\"\n )\n self.con.commit()",
"def from_csv(self, path):\n for model, table in [(self.Dataset, 'dataset'),\n (self.Datarun, 'datarun'),\n (self.Hyperpartition, 'hyperpartition'),\n (self.Classifier, 'classifier')]:\n df = pd.read_csv(os.path.join(path, '%ss.csv' % table))\n\n # parse datetime columns. This is necessary because SQLAlchemy can't\n # interpret strings as datetimes on its own.\n # yes, this is the easiest way to do it\n for c in inspect(model).attrs:\n if type(c) != ColumnProperty:\n continue\n col = c.columns[0]\n if type(col.type) == DateTime:\n df[c.key] = pd.to_datetime(df[c.key],\n infer_datetime_format=True)\n\n for _, r in df.iterrows():\n # replace NaN and NaT with None\n for k, v in list(r.iteritems()):\n if pd.isnull(v):\n r[k] = None\n\n # insert the row into the database\n create_func = getattr(self, 'create_%s' % table)\n create_func(**r)",
"def _construct_database(self, corpus_path):\n database = Database()\n # construct the database\n for words, poss in self.IO.read_sentences(corpus_path):\n triples = self.exactor.extract(words, poss)\n for triple in triples:\n database.insert(triple)\n\n return database"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parse the auditbeat log file, to generate audit event model and write to the result file(optional) | def parse(self, output=True):
if not self.type == LogType.audit:
log.error("LogParser doesn't support nonetype yet.")
return
stashes = list()
with open(self.path_log, 'r') as f:
for line in f.readlines():
event: Dict = json.loads(line)
keys = event.keys()
# drop irrelevant keys of dict
for key in DROPS:
if key in event.keys():
event.pop(key)
# retrieve json info
timestamp, process, file = None, None, None
if "@timestamp" in event.keys():
timestamp = event["@timestamp"]
if "process" in event.keys():
process = event["process"]
if "file" in event.keys():
file = event["file"]
try:
audit:Dict = event["auditd"]
except KeyError:
raise KeyError(f"line: {line} does not have audit field, parse failed.")
# recontruct audit unit
paths, session = None, None
if "paths" in audit.keys():
paths = audit["paths"]
if "session" in audit.keys():
session = audit["session"]
try:
msg_type, result, sequence, data = \
audit["message_type"],audit["result"], audit["sequence"], audit["data"]
except KeyError:
raise KeyError(f"Audit {audit} does not have certain keys, parse failed.")
auditd = Auditd(paths, msg_type, sequence, result, data, session)
beat_state = BeatState(timestamp, process, file, auditd)
# # TODO: the current code is to add dict format data
# self.events.append(beat_state)
stashes.append(beat_state)
return stashes | [
"def parse_file(self):\n with open(self.file_name, 'r', errors='ignore') as log_file:\n for line in log_file:\n self.process_line(line)",
"def __parse(self):\n lines = self.file.readlines()\n name_idx = 2\n name_idx_found = False\n pathre = re.compile(r\"^[A-Z]:[\\\\/]\\w+\")\n for i in range(0, len(lines)):\n line = lines[i]\n if line.strip() != \"\": # check if line isn't empty\n if pathre.match(line):\n self.path = line.strip()\n continue\n tokens = line.split()\n time_str = tokens[0] + \" \" + tokens[1]\n try:\n time = datetime.strptime(time_str, \"%m/%d/%y %H:%M:%S\")\n except ValueError:\n raise LogParseError('Invalid log format. Date must be first \\\n token for each log event.') \n if not name_idx_found:\n name_idx = tokens.index('Monitoring')\n name_idx_found = True\n name = \"\"\n if tokens[name_idx].strip() == 'Monitoring':\n name = tokens[name_idx].lower() + \" \" + tokens[name_idx + 1].lower()\n duration = 0.0\n else:\n name = tokens[name_idx].lower()\n duration = tokens[name_idx + 1]\n self.events[name] = Event(time, name, duration)\n self.start = self.events['monitoring started']\n self.end = self.events['monitoring stopped']",
"def read_auditlog(file: str):\n with open(file, \"r\") as fp:\n for line in fp:\n yield Auditlog.parse(json.loads(line))",
"def events(self) -> Generator[dict, None, None]:\n\n for audit_file, audit_type in self.identified_files.items():\n temp_file_path = f\"{self.tempdir.name}/{audit_file}\"\n\n if audit_type == \"stateagentinspector\":\n yield from self.parse_agent_events(temp_file_path)\n\n # If we have atleast the hits.json file, we can make alert nodes\n if self.alert_files[\"hits.json\"]:\n yield from self.parse_alert_files(self.tempdir.name)\n\n self.tempdir.cleanup()",
"def parse(self):\n line_no = 1\n for line in self.log_file:\n match = self.log_regex.match(line)\n if match:\n self.handle_match(match)\n line_no += 1",
"def generate_audit(self, output_path):\n \n with open(output_path, 'wb') as csvfile:\n csvwriter = csv.writer(csvfile, delimiter=',')\n csvwriter.writerow([entry[0] for entry in self.fields])\n for trade, bo_error in self.errors.items():\n values = self.get_values(trade)\n csvwriter.writerow(values)\n \n print('Output written to %s' % output_path)",
"def parse(self):\n with open(self.log, 'r', errors='ignore') as f:\n log_lines = f.readlines()\n\n # iterate each line in the log file\n i = 0\n while i < len(log_lines):\n j = i + 1\n line = self.preprocess_line(log_lines[i])\n match = self.get_match(line)\n # if match is true then we found a line containing a filepath,\n # a line number, and a warning/error\n if match and not self.is_suppressed(line):\n filepath = match.group(1) or 'warning'\n linenum = match.group(2)\n warning = match.group(4)\n if not filepath in self.out:\n self.out[filepath] = set()\n if linenum:\n warning = f'{warning} line ({linenum})'\n self.out[filepath].add(warning)\n # in this case, the filepath might contain several errors on separate lines,\n # so we need to iterate next lines until we find a line\n # that matches the regex defined in this class\n while j < len(log_lines):\n next_line = self.preprocess_line(log_lines[j])\n match = self.get_match(next_line)\n if match:\n break\n if next_line:\n self.out[filepath].add(self.preprocess_line(next_line))\n j += 1\n i = j\n return self.out",
"def log_parser(log):\n # substitute for GREP -- finds 'eventtype' field.\n # required as this file has a different number of fields per line\n logname = copy.copy(log)\n log = open(log, \"r\").readlines()\n pic = filter(lambda s: 'Picture' in s, log)\n vid = filter(lambda s: 'Video' in s, log)\n\n # write out files from stringio blobs into numpy genfromtxt\n pic = np.genfromtxt(io.StringIO(''.join(pic)), delimiter='\\t', \n dtype=[('subject', '|S64'), \n ('trial', 'i32'),\n ('eventtype', '|S64'),\n ('code', '|S64'),\n ('time', 'i32'),\n ('ttime', 'i32'),\n ('uncertainty1', 'i32'),\n ('duration', 'i32'),\n ('uncertainty2', 'i32'),\n ('reqtime', 'i32'),\n ('reqduration', 'i32'),\n ('stimtype', '|S64'),\n ('pairindex', 'i32')])\n\n vid = np.genfromtxt(io.StringIO(''.join(vid)), delimiter='\\t',\n dtype=[('subject', '|S64'), \n ('trial', 'i32'),\n ('eventtype', '|S64'),\n ('code', '|S64'),\n ('time', 'i32'),\n ('ttime', 'i32'),\n ('uncertainty1', 'i32')])\n\n # ensure our inputs contain a 'MRI_start' string.\n if pic[0][3] != 'MRI_start':\n logger.error('log {} does not contain an MRI_start entry!'.format(logname))\n raise ValueError\n else:\n # this is the start of the fMRI run, all times are relative to this.\n mri_start = pic[0][7]\n return pic, vid, mri_start",
"def process_logfile(self):\n with open(self.log_file, 'r') as f:\n for line in f:\n if line.strip() != '':\n data = [x.strip() for x in line.split(':')]\n self.log_data.update({ data[0] : data[1] })",
"def events(self):\n for line_num, line in enumerate(self.file_handler):\n if not line:\n break\n # process line input to dictionary\n data = json.loads(line)\n # add id information\n data['id'] = line_num\n # update timestamp history\n timestamp = self._get_timestamp(data)\n self.last_two_timestamps = [self.last_two_timestamps[-1], timestamp]\n self.event_timestamps[line_num] = timestamp\n\n self.alarms.append(0) # add field for alarms\n self.users.append(data['user']) # add field for user\n self.anomalies.append(data.get('is_anomaly', 0)) # add field for anomalies\n if 'is_anomaly' in data:\n del data['is_anomaly'] # remove anomaly information from data for contestants\n\n # return line id and serialized JSON as string representing one event\n str_dump = json.dumps(data)\n logger.info(self._get_inner_time() + ' > ' + str_dump)\n yield line_num, str_dump",
"def parseLog(file, actionArray):\n \ttimestamp = 0\n \tstop = False\n \twith open(file) as f:\n \t nLines = 0\n \t for line in f:\n \t for action in actionArray:\n \t pattern = action['pattern']\n \t timeFunc = action['tfunc']\n \t actionFunc = action['func']\n \t userData = None\n \t if action.has_key('userdata'):\n \t \tuserData = action['userdata']\n \t m = pattern.match(line)\n \t if m and actionFunc:\n \t timestamp = timeFunc(m)\n \t if not actionFunc(timestamp, m, {'userData':userData, 'lineNo':nLines}):\n \t action['func'] = None\n \t break\n \t nLines+=1\n \t # for line\n \t # print 'parsed '+str(nLines)+' lines'",
"def test_create_audit_sink(self):\n pass",
"def generate_audit(self, output_path):\n\n audit_columns = (\n 'TradeNumber', 'ConnectedRef', 'StartDate', 'EndDate',\n 'AgeingFromStart', 'Error', 'Portfolio', 'SecurityType',\n 'SecurityName', 'G1Borrower', 'G1Lender', 'Acquirer',\n 'Counterparty', 'Quantity', 'VAT', 'G1Fee', 'FAFee'\n )\n audit_row = namedtuple('AuditRow', ','.join(audit_columns))\n acm_time = acm.Time()\n\n writer = csv.writer(open(output_path, \"wb\"), dialect='excel', delimiter=',')\n writer.writerow(audit_columns)\n\n for trade, bo_error in self.errors.items():\n data = {}\n data['TradeNumber'] = trade.Oid()\n data['ConnectedRef'] = trade.ConnectedTrade().Oid()\n data['StartDate'] = trade.Instrument().StartDate()\n data['EndDate'] = trade.Instrument().EndDate()\n data['AgeingFromStart'] = acm_time.DateDifference(acm_time.DateToday(), data['StartDate'])\n data['Error'] = bo_error\n data['Portfolio'] = trade.Portfolio().Name()\n data['SecurityType'] = trade.Instrument().Security().InsType()\n data['SecurityName'] = trade.Instrument().Security().Name()\n data['G1Borrower'] = trade.AdditionalInfo().SL_G1Counterparty1()\n data['G1Lender'] = trade.AdditionalInfo().SL_G1Counterparty2()\n data['Acquirer'] = trade.Acquirer().Name()\n data['Counterparty'] = trade.Counterparty().Name()\n data['Quantity'] = round(trade.FaceValue())\n data['VAT'] = trade.Instrument().AdditionalInfo().SL_VAT()\n data['G1Fee'] = trade.AdditionalInfo().SL_G1Fee2()\n data['FAFee'] = trade.Instrument().Legs()[0].FixedRate()\n\n writer.writerow(audit_row(**data))\n\n print \"Output written to {0}\".format(output_path)",
"def generate_ev_file(id_test):\n print(\"generate_ev_file\")\n \n ev_output_file_name=id_test+\".ev\"\n ev_input_file_name=id_test+\"_events.csv\"\n f_output = io.open(INPUT_PARSER_RESULTS_DIR+ev_output_file_name, \"w\",newline='\\n')\n f_input = io.open(AGRODEVS_INPUT_DIR+ev_input_file_name, \"r\")\n \n input_reader = csv.reader(f_input, delimiter=',')\n field_names_list = next(input_reader)\n if (field_names_list[0]!=\"campaign\"):\n print(\"First field of events file input should be 'campaing' but is:\"+field_names_list[0])\n print(\"Cannot generate event file\")\n return\n else:\n print(field_names_list)\n for line in input_reader:\n #generate timestamp for campaign\n #campania = int(int(ms)/100)+int(ss)*10+int(mm)*600+int(hh)*36000\n campaign = int(line[0])\n ms = (campaign*100)%1000\n ss = ((campaign*100)//1000)%60\n mm = ((campaign*100)//60000)%60\n hh = ((campaign*100)//360000)\n timeFormat = \"{:0>2d}\"\n msFormat = \"{:0>3d}\"\n timestamp_begin_event = str(timeFormat.format(hh))+\":\"+ \\\n str(timeFormat.format(mm))+\":\"+ \\\n str(timeFormat.format(ss))+\":\"+ \\\n str(msFormat.format(ms))\n timestamp_end_event = str(timeFormat.format(hh))+\":\"+ \\\n str(timeFormat.format(mm))+\":\"+ \\\n str(timeFormat.format(ss))+\":\"+ \\\n str(msFormat.format(ms+1))\n \n print(\"timestamp generated: \"+timestamp_begin_event)\n \n #generate events\n #begin events\n \n \n port_idx =0\n for event_port in line[1:]:\n port_idx=port_idx+1\n #print(\"processing port: \"+str(field_names_list[port_idx]))\n begin_event=CELL_DEVS_EXTERNAL_EVENT_BEGIN+ \\\n field_names_list[port_idx]+ \\\n \" \"+str(line[port_idx])\n \n f_output.write(timestamp_begin_event+\" \"+begin_event+\"\\n\")\n \n #end events\n port_idx=0\n for event_port in line[1:]:\n port_idx=port_idx+1\n #print(\"processing port: \"+str(field_names_list[port_idx]))\n end_event=CELL_DEVS_EXTERNAL_EVENT_ENDS+ \\\n field_names_list[port_idx]+ \\\n \" \"+str(line[port_idx])\n f_output.write(timestamp_end_event+\" \"+end_event+\"\\n\")\n \n \n \n f_input.close()\n f_output.close()",
"def test_read_audit_sink(self):\n pass",
"def run_audit(self):\n for each_description in self.descriptor_files:\n schema = StateSchema(**each_description)\n\n metadata = schema.metadata\n descriptors = schema.descriptors\n\n Logger.log_info(\n f'Auditing {metadata.archive} in {metadata.stateLegalName} from {metadata.repoName}.'\n )\n\n try:\n # Construct paths\n archive_path = (\n self.mggg_states_dir + \"/\" + metadata.repoName + \"/\" + metadata.archive\n )\n file_path = self.expand_zipfile(archive_path) + metadata.fileName\n\n # Find column names\n total_population_col = descriptors.totalPopulation\n county_fips_col = descriptors.countyFIPS\n\n # Import and read shapefiles\n # DEPRECATED to make stuff more abstractable\n # if county_fips_col:\n # shapefile = gdutils.extract.read_file(\n # file_path, column=county_fips_col\n # )\n # else:\n shapefile = gdutils.extract.read_file(file_path)\n\n\n # Hard checks\n # TODO: get these to automatically load\n self.errors += checks.TotalPopulationCheck(schema, shapefile, self.scratch_dir).audit()\n self.errors += checks.CountyTotalPopulationCheck(schema, shapefile, self.scratch_dir).audit()\n self.errors += checks.FromGraphGerrychainCheck(schema, shapefile, self.scratch_dir).audit()\n\n # Soft checks\n checks.DataExistenceCheck(schema, shapefile, self.scratch_dir).audit()\n\n except KeyboardInterrupt:\n Logger.log_info(\n f'Captured KeyboardInterrupt! Skipping {metadata[\"archive\"]} in {each_description[\"metadata\"][\"stateLegalName\"]} from {each_description[\"metadata\"][\"repoName\"]}!'\n )\n pass",
"def get_mxml_events_data(self, filename,start_timeformat, end_timeformat):\n temp_data = list()\n tree = ET.parse(filename)\n root = tree.getroot()\n process = root.find('Process')\n procInstas = process.findall('ProcessInstance')\n i = 0\n for procIns in procInstas:\n sup.print_progress(((i / (len(procInstas) - 1)) * 100), 'Reading log traces ')\n caseid = procIns.get('id')\n complete_timestamp = ''\n auditTrail = procIns.findall('AuditTrailEntry')\n for trail in auditTrail:\n task = ''\n user = ''\n event_type = ''\n type_task = ''\n timestamp = ''\n attributes = trail.find('Data').findall('Attribute')\n for attr in attributes:\n if (attr.get('name') == 'concept:name'):\n task = attr.text\n if (attr.get('name') == 'lifecycle:transition'):\n event_type = attr.text\n if (attr.get('name') == 'org:resource'):\n user = attr.text\n if (attr.get('name') == 'type_task'):\n type_task = attr.text\n work_flow_ele = trail.find('WorkflowModelElement').text\n event_type = trail.find('EventType').text\n timestamp = trail.find('Timestamp').text\n originator = trail.find('Originator').text\n timestamp = datetime.datetime.strptime(trail.find('Timestamp').text[:-6], start_timeformat)\n temp_data.append(\n dict(caseid=caseid, task=task, event_type=event_type, user=user, start_timestamp=timestamp,\n end_timestamp=timestamp))\n\n i += 1\n raw_data = temp_data\n temp_data = self.reorder_mxml(temp_data)\n sup.print_done_task()\n return temp_data, raw_data",
"def manipulate_timestamp_exchange_logs(self, file_path, logger):\n f = io.open(file_path, \"r\", encoding=\"utf-8\")\n\n first_line = f.readline()\n d = json.loads(first_line)\n latest_event = datetime.strptime(d[\"CreationTime\"],\"%Y-%m-%dT%H:%M:%S\")\n\n now = datetime.now()\n now = now.strftime(\"%Y-%m-%dT%H:%M:%S\")\n now = datetime.strptime(now,\"%Y-%m-%dT%H:%M:%S\")\n\n difference = now - latest_event\n f.close()\n\n for line in fileinput.input(file_path, inplace=True):\n d = json.loads(line)\n original_time = datetime.strptime(d[\"CreationTime\"],\"%Y-%m-%dT%H:%M:%S\")\n new_time = (difference + original_time)\n\n original_time = original_time.strftime(\"%Y-%m-%dT%H:%M:%S\")\n new_time = new_time.strftime(\"%Y-%m-%dT%H:%M:%S\")\n print (line.replace(original_time, new_time),end ='')",
"def processEventLog(log):\n pass"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Initialise clusters by alternating the bins to which the vectors are assigned. | def alternating_bins_initialisation(self, pixel_data, a=None, b=None):
if not a or not b:
a = 0
b = len(pixel_data)
clusters = defaultdict(list)
for i in range(a, b): # selecting sevens as data set
clusters[i % self.K].append(pixel_data[i])
return clusters | [
"def __initCluster(self):\n data_size, cluster_center = self.data_size, self.cluster_center\n self.cluster_temp = np.zeros(data_size, dtype=int)\n self.cluster_upper_bound = np.full(len(cluster_center), float('inf'), dtype=float)\n for center in cluster_center:\n self.cluster_temp[center] = center",
"def initClusters(self):\n if len(self.labelList) != len(self.pointList):\n \traise ValueError(\"Label List and Point List not the same length!\")\n for i in range(len(self.labelList)):\n self.centroids[self.labelList[i]] = self.pointList[i]\n self.pointcounts[self.labelList[i]] = 1",
"def populate_grid(self):\n from cemc_cpp_code import hoshen_kopelman\n self.bins[:, :, :] = 0\n for atom in self.atoms:\n if atom.symbol in self.track_elements:\n n = self.get_bin(atom.index)\n self.bins[n[0], n[1], n[2]] += 1\n\n # Run the Hoshen-Kopelman algorithm to label the \n # bins into clusters\n self.clusters = hoshen_kopelman(self.bins)",
"def initClusters(self):\n print 'Initializing Cluster Centers'\n numFeatureAddedForCluster = [0]*self.k\n # initialize numclass of -out of-k cluster to be the center of the full sketches\n if self.numclass <= self.k:\n for fidx in self.fullIndex:\n fclass = self.classid[fidx]\n if fclass < self.k:\n # add each full sketch to the corresponding cluster, then divide\n self.clusterCenters[fclass] = map(operator.add,\n self.clusterCenters[fclass],\n self.features[fidx])\n numFeatureAddedForCluster[fclass] += 1\n\n for clusterCenterIdx in range(self.numclass):\n self.clusterCenters[clusterCenterIdx] = [cfloat/numFeatureAddedForCluster[clusterCenterIdx] for cfloat in self.clusterCenters[clusterCenterIdx]]\n\n # for the remaining cluster centers, randomly select from the non-selected features\n numClustSelected = self.numclass\n while numClustSelected < self.k:\n featIdx = randint(0, len(self.features))\n if not self.isFull[featIdx]:\n self.clusterCenters[numClustSelected] = self.features[featIdx]\n numClustSelected += 1",
"def create_clusters(self):\n ex = 0\n print 'Iter - Purity Gini Index'\n while ex < self.MAX_ITERATION:\n new_clusters = np.zeros(self.centroids.shape)\n distances = euclidean_distances(self.vectors, self.centroids).argmin(axis=1)\n for i in range(self.K):\n indexes = np.argwhere(distances == i)\n data = self.vectors[indexes.transpose()[0]]\n if data.shape[0] > 1:\n new_clusters[i] = (np.sum(data, axis=0) / data.shape[0])\n else:\n new_clusters[i] = np.sum(data, axis=0)\n print ex, '----', self.cal_purity()\n ex += 1\n if np.allclose(self.centroids, new_clusters, atol=self.TOLERANCE):\n break\n self.centroids = new_clusters",
"def _initial_clusters(self):\n clusters = []\n for i in range(self.point_count):\n clusters.append(self._create_cluster_from_index(i))\n return clusters",
"def update_cluster_assignment(self):\n for i in range(self.vectors_num):\n self.update_vec_cluster_assignment(i)",
"def build_clusters(self):\n clusters = dict()\n remaining_bs = copy.copy(self.bs_list)\n curr_cluster = set()\n\n while len(remaining_bs) > 0:\n # start new cluster with random remaining cell\n if len(curr_cluster) == 0:\n bs = self.rng.choice(remaining_bs)\n curr_cluster.add(bs)\n remaining_bs.remove(bs)\n\n # add closest cell to cluster\n else:\n center_x = np.mean([bs.pos.x for bs in curr_cluster])\n center_y = np.mean([bs.pos.y for bs in curr_cluster])\n center = Point(center_x, center_y)\n closest_bs = min(remaining_bs, key=lambda x: center.distance(x.pos))\n # add to cluster and remove from remaining cells\n curr_cluster.add(closest_bs)\n remaining_bs.remove(closest_bs)\n\n # if cluster is full, save and reset\n if len(curr_cluster) == self.cluster_size:\n for bs in curr_cluster:\n clusters[bs] = curr_cluster\n curr_cluster = set()\n\n # add remaining cells in curr cluster, even if it's not full\n for bs in curr_cluster:\n clusters[bs] = curr_cluster\n\n return clusters",
"def initialize(self):\n self.SIZE = self.vectors.shape[0]\n # todo can use max distance to allocation farthest apart points\n self.centroids = self.vectors[[random.randint(1, self.SIZE) for x in range(self.K)], :]",
"def atlas_clusters():\n pass",
"def initialize(img):\n w, h, _ = img.shape\n for c in current_cluster_centers:\n x = np.random.randint(w)\n y = np.random.randint(h)\n c[:] = img[x, y]",
"def initialize_dom(img: np.ndarray):\n\n channels = img.shape[2]\n\n for cluster in range(numclusters):\n for channel in range(channels):\n cmin = np.amin(img[:,:,channel]) # channel's min\n cmax = np.amax(img[:,:,channel]) # channel's max\n current_cluster_centers[cluster, 0, channel] = np.random.uniform(cmin, cmax)\n\n print(\"Current clusters:\\n\", current_cluster_centers)",
"def compute_clusters(self, documents):\n ###TODO\n for d in range(0, len(documents)):\n maxi = 999999999\n for cid in range(0, len(self.means)):\n dist = self.distance(documents[d], self.means[cid], self.norms[cid])\n if dist < maxi:\n maxi = dist\n clust = cid \n self.cluster[d] = clust",
"def generate_clusters(indices, nspikes=100):\n # 2 different clusters, with 3 spikes in cluster 1\n clusters = np.zeros(nspikes, dtype=np.int32)\n clusters[indices] = 1\n return clusters",
"def _cluster_into_bins(eval_data, ref_data, num_clusters):\r\n\r\n cluster_data = np.vstack([eval_data, ref_data])\r\n kmeans = sklearn.cluster.MiniBatchKMeans(n_clusters=num_clusters, n_init=10)\r\n labels = kmeans.fit(cluster_data).labels_\r\n\r\n eval_labels = labels[:len(eval_data)]\r\n ref_labels = labels[len(eval_data):]\r\n\r\n eval_bins = np.histogram(eval_labels, bins=num_clusters,\r\n range=[0, num_clusters], density=True)[0]\r\n ref_bins = np.histogram(ref_labels, bins=num_clusters,\r\n range=[0, num_clusters], density=True)[0]\r\n return eval_bins, ref_bins",
"def clusters_allocate_cells(self):\n for cluster in self.clusters:\n cluster.cells[:] = []\n for cell in self.block_proc:\n wdists = []\n for cluster in self.clusters:\n s = cluster.size\n d = ( (cell.x-cluster.x)**2 + (cell.y-cluster.y)**2 +\n (cell.z-cluster.z)**2 )\n d = numpy.sqrt(d)\n c = self.c\n # TODO: choose a better distance function below\n r = d*(c+(1-c)*numpy.exp(-s/d))\n r = numpy.clip(r,0,r)\n wdists.append(r)\n self.clusters[numpy.argmin(wdists)].cells.append(cell)",
"def assign_data2clusters(X, C):\n pass",
"def __init__(self):\n ## self.clusters[cluster] = list of coordinates\n self.clusters = {}\n ## self.centroids[cluster] = centroid\n self.centroids = {}",
"def random_cluster_assignments(self):\n for i in range(self.vectors_num):\n self.cluster_assignment[i] = random.randrange(0, self.cluster_num)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the codebook vectors. | def get_cb_vectors(self):
return self.cb_vectors | [
"def get_word_vectors(self):\n return self.word_vectors",
"def vectors(self):\n return {key:self[key] for key in self._vectors.keys()}",
"def get_vectors(model, corpus_size, vectors_size, vectors_type):\r\n vectors = np.zeros((corpus_size, vectors_size))\r\n for i in range(0, corpus_size):\r\n prefix = vectors_type + '_' + str(i)\r\n vectors[i] = model.docvecs[prefix]\r\n return vectors",
"def test_get_vectors(self):\n text_1, vector_1, tag_1 = \"foo\", \"foo_1\", \"SPORT\" \n text_2, vector_2, tag_2 = \"bar\", \"bar_1\", \"BUSINESS\"\n\n self.c.add_text(text_1)\n self.c.add_text(text_2)\n\n self.c.add_vector(vector_1, text_1, tag_1) \n self.c.add_vector(vector_2, text_2, tag_2) \n\n names = [name for name, _ in self.c.get_vectors()] \n self.assertEquals(set(names), set(self.c.get_vectors_name())) # 1\n\n tags = [vector.tag for _, vector in self.c.get_vectors()] \n self.assertEquals(set([tag_1, tag_2]), set(tags)) # 2",
"def vectors_train(self):\n return self._vectors(train=True)",
"def boxVectors(self):\n return self.box_vectors",
"def get_vector_ids(self):\n pass",
"def getVectors(self):\n vectors = dict()\n i = 0\n N = len(self.db.invertedIndex)\n for w, (idf, docs) in self.db.invertedIndex.items():\n for doc, tf in docs.items():\n try:\n vectors[doc][i] = tf * idf\n except KeyError as k:\n vectors[doc] = {i: tf * idf}\n i += 1\n i = 0;\n return vectors",
"def get_base_vectors(self):\n\n return self.__base_vectors",
"def word_vec_list(sem_model, relevant_words):\n word_list = []\n vector_list = []\n for word in relevant_words:\n sem = retrieve_vector(sem_model, word)\n if sem is not None:\n #print(\"word:\", word, sem)\n vector_list.append(sem)\n word_list.append(word)\n return vector_list, word_list",
"def getVector(text):\n url = cfg.use_vectoriser\n res = requests.post(url, json={'text': text, 'access_key': cfg.vectoriser_access_key})\n res_dictionary = res.json()\n return res_dictionary['vectors']",
"def getVectors(location='.', file='OUTCAR'):\n f = open('%s/%s' % (location, file), 'r')\n vectors = []\n while True:\n nextLine = f.readline()\n if not nextLine:\n if not vectors:\n vectors = [[[0, 0, 0], [0, 0, 0], [0, 0, 0]]]\n break\n if 'VOLUME and BASIS-vectors are now :' in nextLine:\n f.readline() # dashed line\n f.readline() # cutoff energy\n f.readline() # volume line\n f.readline() # direct lattice vectors\n a = [float(x) for x in f.readline().split()[:3]]\n b = [float(x) for x in f.readline().split()[:3]]\n c = [float(x) for x in f.readline().split()[:3]]\n vectors.append([a, b, c])\n return vectors",
"def vectors_test(self):\n return self._vectors(train=False, test_validation=True)",
"def word_vec_list(sem_model, relevant_words):\n word_list = []\n vector_list = []\n for word in relevant_words:\n sem = retrieve_vector(sem_model, word)\n if sem is not None:\n vector_list.append(sem)\n word_list.append(word)\n return vector_list, word_list",
"def basis_vectors(self):\n return self._basis_vectors",
"def basisVectors(self):\n return self.__basisVectors",
"def infer_vectors(self, reports, labels):\n logger.info('Inferring vectors from Doc2Vec model')\n tagged_docs = self.tag_dataset(reports, labels)\n vecs = [self.model.infer_vector(tag.words) for tag in tagged_docs]\n vecs = np.array(vecs)\n return vecs",
"def get_vecs(self):\n return self.get_training_cluster_vecs()",
"def load_vector_dictionary():\n return read_word2vecs_from_file(VECTOR_FILE)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Extracts features from the final codebook vectors using the L2 norm. The way it works is that we pass in the data as an argument and the function produces len(data) feature vectors such that f(x_i)=[a_1 ... a_K] and a_j = || x_i c_j || where c_j is the codebook vector. | def extract_features(self, data):
# TODO: Should feature extraction be done on the testing data? In the lecture notes
# TODO: it is not done with the training data, but with the test data.
# TODO: Maybe we should use the validate data when we do cross-validation.
features = np.zeros([len(data)*self.K]).reshape(len(data), self.K)
for i in range(len(data)):
for j in range(self.K):
features[i][j] = np.linalg.norm(data[i] - self.cb_vectors[j])
return features | [
"def process_and_get_features(self, data):\n vector_data = self.get_tokenized_index_vectors(data)\n features = self.process_example_vectors(vector_data)\n return features",
"def GenerateFeatures(data, fs, window_length_s, window_shift_s):\n window_length = window_length_s * fs\n window_shift = window_shift_s * fs\n labels, subjects, features = [], [], []\n for subject, activity, df in data:\n for i in range(0, len(df) - window_length, window_shift):\n window = df[i: i + window_length]\n accx = window.accx.values\n accy = window.accy.values\n accz = window.accz.values\n features.append(Featurize(accx, accy, accz, fs=fs))\n labels.append(activity)\n subjects.append(subject)\n labels = np.array(labels)\n subjects = np.array(subjects)\n features = np.array(features)\n return labels, subjects, features",
"def _make_features(self, data):\n return np.transpose(self.colspec.get_cols(data))",
"def define_features_vectorizer(columns, training_data, testing_data = None, ngramrange=(1,1)):\n #intialise Countvectorizer and fit transform to data\n vectorizer = CountVectorizer(ngram_range = ngramrange)\n vectorizer.fit_transform(training_data[columns].values)\n \n #build matrixes for training_features and testing_features\n training_features=vectorizer.transform(training_data[columns].values)\n\n if testing_data is not None:\n testing_features=vectorizer.transform(testing_data[columns].values)\n else:\n testing_features = None\n \n \n return vectorizer, training_features, testing_features",
"def create_vectorized_features(data_dir, feature_version=2):\n extractor = PEFeatureExtractor(feature_version)\n\n print(\"Vectorizing training set\")\n X_path = os.path.join(data_dir, \"X_train.dat\")\n y_path = os.path.join(data_dir, \"y_train.dat\")\n raw_feature_paths = [os.path.join(data_dir, \"train_features_{}.jsonl\".format(i)) for i in range(6)]\n nrows = sum([1 for fp in raw_feature_paths for line in open(fp)])\n vectorize_subset(X_path, y_path, raw_feature_paths, extractor, nrows)\n\n print(\"Vectorizing test set\")\n X_path = os.path.join(data_dir, \"X_test.dat\")\n y_path = os.path.join(data_dir, \"y_test.dat\")\n raw_feature_paths = [os.path.join(data_dir, \"test_features.jsonl\")]\n nrows = sum([1 for fp in raw_feature_paths for line in open(fp)])\n vectorize_subset(X_path, y_path, raw_feature_paths, extractor, nrows)",
"def features(data):\n\n return data[:,1:]",
"def _featurize(self, predictions: SequenceSample) -> List[np.ndarray]:\n feature_vectors: List[np.ndarray] = []\n source = predictions.origin_words\n\n char_nn_scores = self.char_nn_lm_score(predictions.paths)\n word_nn_scores = self.word_nn_lm_score(predictions.paths)\n\n for i, (score, hypothesis) in enumerate(zip(predictions.scores, predictions.paths)):\n obss = list(zip(hypothesis, source))\n length = len(source)\n feature_vector = np.array([\n 1.,\n length,\n self.language_model.score(hypothesis) / length,\n char_nn_scores[i],\n word_nn_scores[i],\n score / length,\n sum(w in self.language_model for w in hypothesis) / length,\n sum(h[:self.prefix_size] == s[:self.prefix_size] for h, s in obss) / length,\n sum(h[-self.suffix_size:] == s[-self.prefix_size:] for h, s in obss) / length,\n self.language_model.score(hypothesis) * score / length,\n np.mean([editdistance.eval(h, s) for h, s in obss]),\n np.mean([float(obs in self.train_set_uniq) for obs in obss]),\n np.mean([self.train_counter.get(obs, self.discount) for obs in obss]),\n ])\n feature_vectors.append(feature_vector)\n return feature_vectors",
"def extract_features(data_dir,mode='train'):\n files = get_files(data_dir)\n t0 = time.time()\n features = list()\n labels = list()\n for f in files:\n freq = get_frequencies(f)\n if mode=='train':\n sents = corpus_reader(f)\n labels.extend(d2l(sents,f,freq))\n elif mode=='decode':\n sents = corpus_reader(f,tag='pos')\n else:\n print('Invalid mode!')\n break\n features.extend(d2f(sents,f,freq)) \n dt = time.time() - t0\n print('Total feature extraction time: %d seconds' % dt)\n return features,labels",
"def images_to_feature_vectors(images, bbox_size=None, train=False):\n # If no bounding box size is supplied then compute a suitable\n # bounding box by examining sizes of the supplied images.\n if bbox_size is None:\n bbox_size = get_bounding_box_size(images)\n\n bbox_h, bbox_w = bbox_size\n nfeatures = bbox_h * bbox_w\n fvectors = np.empty((len(images), nfeatures))\n\n for i, image in enumerate(images):\n padded_image = np.ones(bbox_size) * 255\n h, w = image.shape\n h = min(h, bbox_h)\n w = min(w, bbox_w)\n\n \"\"\"Here I've centred the characters, as I believe the covariance\n matricies will more easily pick up distinct features of characters when\n they are centrally aligned (instead of an L being in the same position\n as the right hand side of an M, it'd be in the middle, where there'd be\n a clearer distinction as the middle of an M doesn't usually extend a\n full character height, whereas an L will).\n \"\"\"\n h_start = round((bbox_h/2)-(h/2))\n w_start = round((bbox_w/2)-(w/2))\n padded_image[h_start:h_start+h, w_start:w_start+w] = image[0:h, 0:w]\n\n #----------Denoising\n #Simple thresholding\n threshold = lambda image: np.where(image > 127, 255, 0)\n\n #By histographical analysis, I'm fairly certain x is 90 for page 2. \n #Using this denoising improves page 2 significantly, but only that page.\n threshold2 = lambda image: np.where(image > 255-90, 255, image)\n\n #This method \"stretches\" all the values away from 128, which I thought\n # may be a marginally better approach than hard thresholding as it'd\n # preserve some of the \"confidence\" inherently expressed in the greyness\n # of each pixel.\n def stretch(image, factor=5):\n image = np.round((image-128)*factor + 128)\n image = np.where(image > 255, 255, image)\n image = np.where(image < 0, 0, image)\n return image\n\n #I tried median sizes 2, 3, & 4. I found size 3 works best.\n median = lambda image: scipy.ndimage.median_filter(padded_image, size=3)\n\n #I found that if the median kernel is shaped vertically, it performs\n # better. I suspect this is due to the fact that a lot of characters are\n # composed of vertical lines.\n median2 = lambda image: scipy.ndimage.median_filter(image, size=(3,2))\n\n #I decided to try using a diamond shaped vertical footprint to squeeze\n # some extra % out, as the font doesn't tend to have square corners.\n # This brought a minor improvement over a simple kernel of size (3,2).\n padded_image = scipy.ndimage.median_filter(padded_image, \n footprint=np.array([[0,1,0],[1,1,1],[1,1,1],[0,1,0]]))\n\n #Reshaping to a column vector.\n fvectors[i, :] = padded_image.reshape(1, nfeatures)\n\n return fvectors",
"def create_feature_matrix(data,model):\n # data will be X_train and X_test\n temp = []\n for items in data:\n temp.append(list(items.split()))\n\n feature_matrix = []\n for sentences in temp:\n feature_matrix.append(pmeanFT.get_sentence_embedding(sentences, model,meanlist))\n \n return feature_matrix",
"def get_tokenized_index_vectors(self, data):\n vec_data = []\n for example in data:\n word = [self.ROOT] + [self.token2idx[w] if w in self.token2idx.keys() else self.UNK for w in\n example['lemma']]\n pos = [self.P_ROOT] + [self.token2idx[POS + p] if POS + p in self.token2idx.keys() else self.P_UNK for p in\n example['c_pos']]\n head = [-1] + example['head']\n label = [-1] + [self.token2idx[LAB + ll] if LAB + ll in self.token2idx.keys() else -1 for ll in\n example['label']]\n entry = dict()\n entry['word'] = word\n entry['pos'] = pos\n entry['head'] = head\n entry['label'] = label\n vec_data.append(entry)\n return vec_data",
"def vec_train(self, data):\n\n # Din kode her\n\n # Tips: Bruk fit_transform() for å spare kjøretid.\n\n vec = self.vectorizer.fit_transform(data).toarray()\n vec_tfidf = self.tfidf.fit_transform(vec)\n\n return vec, vec_tfidf",
"def gen_features(self, X):",
"def get_all_features(train_data, test_data):\n #train_wc_matrix, test_wc_matrix = get_word_count_features(train_data, test_data)\n train_idf_matrix, test_idf_matrix = get_idf_features(train_data, test_data)\n train_ngram_matrix, test_ngram_matrix = get_ngram_features(train_data, test_data)\n # train_liwc_matrix, test_liwc_matrix = get_liwc_features(train_data, test_data)\n return sparse.hstack([train_idf_matrix, train_ngram_matrix]), \\\n sparse.hstack([test_idf_matrix, test_ngram_matrix])",
"def vectorize(data,word2vec,story_maxlen,question_maxlen):\n res = dict()\n res['story'] = []\n res['question'] = []\n res['answer'] = []\n for key in ['story','question']:\n for example in data[key]:\n entry = []\n for word in example:\n entry.append(word2vec[word])\n res[key].append(entry)\n res[key] = np.array(res[key])\n for ans in data['answer']:\n res['answer'].append(word2vec[ans])\n res['answer'] = np.array(res['answer'])\n res['story'] = pad_sequences(res['story'],story_maxlen)\n # try:\n # res['story'] = pad_sequences(res['story'],story_maxlen)\n # except:\n # print res['story']\n # exit()\n res['question'] = pad_sequences(res['question'],question_maxlen)\n return res",
"def create_feature_vector(features, length):\n START_IDX = 0\n END_IDX = 1\n\n output_vector = np.zeros(length)\n\n # negative strand\n for loc in features[-1]:\n output_vector[loc[START_IDX]:loc[END_IDX]] = 1 \n\n # positive strand\n for loc in features[1]:\n output_vector[loc[START_IDX]:loc[END_IDX]] = 2\n\n return output_vector",
"def generate_feature_vector(self, test_document, n):\n m = len(self.bag_of_features)\n feature_vector = np.zeros(m)\n for feature, col in self.bag_of_features.items():\n if feature in test_document.tfs['all'].keys():\n tf = test_document.tfs['all'][feature]\n df = self.df_term[feature]\n tf_idf = calculate_tf_idf(tf=tf, df=df, doc_num=n)\n feature_vector[col] = tf_idf\n\n np.linalg.norm(feature_vector, axis=0)\n test_document.feature_vector = feature_vector\n return feature_vector",
"def extract_features(data):\n features = [feature for feature, _ in data]\n labels = [label for _, label in data]\n return features, labels",
"def featurize(data, w2i):\n\tX = np.zeros(shape=(len(data), len(w2i)))\n\tY = np.zeros(shape=(len(data), 2))\n\tfor cnt, review in enumerate(data):\n\t\ttdlist = [0]*len(w2i)\n\t\tfor word in review['text'].split():\n\t\t\tif word in w2i:\n\t\t\t\ttdlist[w2i[word]] = 1\n\t\tX[cnt,:] = tdlist\n\n\t\tpos = review['polarity'] == \"pos\"\n\t\tY[cnt,:] = [int(pos), int(not pos)]\n\n\treturn X, Y"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the node_b of this NetflowFilters. | def node_b(self, node_b):
self._node_b = node_b | [
"def setB(self, b):\n self.b = b",
"def b(self, b):\n\n self._b = b",
"def __init__(self, bnodes=None):\n self.bnodes = bnodes if bnodes else []\n self._initialize()",
"def set_bias_for_node(node: Node, value: np.ndarray):\n bias = get_bias_for_node(node)\n if bias is None:\n raise Exception('Can\\'t set bias for node {} because node does not have a bias'.format(node.name))\n set_node_value(bias, value)",
"def set_a_b(self, a, b)->None:\r\n self.a = a\r\n self.b = b",
"def _onSetParameterBIgnoreBounds(self, value):\n self._parameters['b'] = value\n self._logger.info(\"Parameter 'b' of function '{}' changed to {}\".format(self._function, value))\n self.functionChanged.emit(self._dim, self._function, self._parameters.copy())",
"def node_a(self, node_a):\n\n self._node_a = node_a",
"def bnet_membership(self, bnet_membership):\n\n self._bnet_membership = bnet_membership",
"def nbf(self, nbf):\n\n self._nbf = nbf",
"def _onSetParameterB(self, value):\n self._parameters['b'] = min(max(value, self._parameters['lower']), self._parameters['upper']) # Limit at upper and lower\n self._logger.info(\"Parameter ba' of function '{}' changed to {}\".format(self._function, value))\n self.functionChanged.emit(self._dim, self._function, self._parameters.copy())",
"def trend_value_b(self, trend_value_b):\n\n self._trend_value_b = trend_value_b",
"def SetNode(self, node):\n self.node = node.Parameters[\"Image\"].binding",
"def set_node(self, node):\n self.__node = node",
"def node_config(self, node_config):\n\n self._node_config = node_config",
"def update_bnodes(VO, tlnum, delta):\n bnodes = VO.bnodes\n for i in xrange(tlnum, len(bnodes)+1):\n bnodes[i-1] += delta",
"def b_mode(self, b_mode):\n self._b_mode = b_mode",
"def set_node(self, n, value):\n node = self.get_node(n)\n if node:\n node.value = value",
"def content_b(self, content_b):\n\n self._content_b = content_b",
"def bpointer(self, nid):\r\n if nid is not None:\r\n self._bpointer = nid\r\n else:\r\n # print(\"WARNNING: the bpointer of node %s \" \\\r\n # \"is set to None\" % self._identifier)\r\n self._bpointer = None"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the qos_type of this NetflowFilters. | def qos_type(self, qos_type):
self._qos_type = qos_type | [
"def qos(self, qos: int):\n if qos is not None and qos > 2: # noqa: E501\n raise ValueError(\"Invalid value for `qos`, must be a value less than or equal to `2`\") # noqa: E501\n if qos is not None and qos < 0: # noqa: E501\n raise ValueError(\"Invalid value for `qos`, must be a value greater than or equal to `0`\") # noqa: E501\n\n self._qos = qos",
"def _set_qos(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=qos.qos, is_container='container', presence=False, yang_name=\"qos\", rest_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'callpoint': u'int_ve_qos_conf_cp_worker', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"qos must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=qos.qos, is_container='container', presence=False, yang_name=\"qos\", rest_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'callpoint': u'int_ve_qos_conf_cp_worker', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__qos = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_qos(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_qos_openconfig_qos__qos, is_container='container', yang_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"qos must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_qos_openconfig_qos__qos, is_container='container', yang_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__qos = t\n if hasattr(self, '_set'):\n self._set()",
"def add_qos(self, qos):\n \n qos_id = qos[\"ovsdb:qos-entries\"][0][\"qos-id\"]\n self.qos_dict[qos_id] = qos",
"def _set_qos(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_qos_openconfig_qos_elements__qos, is_container='container', yang_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"qos must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_qos_openconfig_qos_elements__qos, is_container='container', yang_name=\"qos\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__qos = t\n if hasattr(self, '_set'):\n self._set()",
"def set_qos_stat_type(self, iface, ptype):\n pytest.skip(\"Method is not supported by Iperf TG\")",
"def qos(self) -> int:\n return self._qos",
"def associate_qos(self, qos_id, vol_type_id):\n url = \"qos-specs/%s/associate\" % qos_id\n url += \"?vol_type_id=%s\" % vol_type_id\n resp, body = self.get(url)\n self.validate_response(schema.associate_qos, resp, body)\n return rest_client.ResponseBody(resp, body)",
"def disassociate_qos(self, qos_id, vol_type_id):\n url = \"qos-specs/%s/disassociate\" % qos_id\n url += \"?vol_type_id=%s\" % vol_type_id\n resp, body = self.get(url)\n self.validate_response(schema.disassociate_qos, resp, body)\n return rest_client.ResponseBody(resp, body)",
"def setQType(self, q):\n self.qType = q",
"def set_qos_key(self, qos_id, **kwargs):\n put_body = json.dumps({\"qos_specs\": kwargs})\n resp, body = self.put('qos-specs/%s' % qos_id, put_body)\n body = json.loads(body)\n self.validate_response(schema.set_qos_key, resp, body)\n return rest_client.ResponseBody(resp, body)",
"def qos(self):\n if self == SubscribeResult.qos0:\n rv = 0\n elif self == SubscribeResult.qos1:\n rv = 1\n elif self == SubscribeResult.qos2:\n rv = 2\n else:\n raise TypeError()\n\n return rv",
"def topic_type(self, topic_type):\n\n self._topic_type = topic_type",
"def os_type(self, os_type):\n\n self._os_type = os_type",
"def os_type(self, os_type):\n self._os_type = os_type",
"def modify_qos_policy(self, group_name):\n\n vol_qos_el = NaElement('volume-qos-attributes')\n vol_qos_el.child_add(NaElement('policy-group-name', group_name))\n vol_attrs_set_el = NaElement('volume-attributes')\n vol_attrs_set_el.child_add(vol_qos_el)\n attributes_el = NaElement('attributes')\n attributes_el.child_add(vol_attrs_set_el)\n self.modify(attributes_el)",
"def queue_type(self, queue_type):\n self._queue_type = queue_type",
"def mode_type(self, mode_type):\n\n self._mode_type = mode_type",
"def setType(self, filterType):\n self.stub.SetType(filter_pb2.FilterSetTypeRequest(filter=self.data, type=filterType),\n timeout=Cuebot.Timeout)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the device_interfaces of this NetflowFilters. | def device_interfaces(self, device_interfaces):
self._device_interfaces = device_interfaces | [
"def network_interfaces(self, network_interfaces):\n\n self._network_interfaces = network_interfaces",
"def serial_interfaces(self, serial_interfaces):\n self._serial_interfaces = serial_interfaces",
"def ethernet_interfaces(self, ethernet_interfaces):\n self._ethernet_interfaces = ethernet_interfaces",
"def netflow_devices(self, netflow_devices):\n\n self._netflow_devices = netflow_devices",
"def ifaces(self, ifaces):\n \n self._ifaces = ifaces",
"def update_interfaces_config(self):\n\n for i in self._nodes.items():\n node = i[1]\n devices = node[\"devices\"]\n all_devices = devices[\"other_devices\"]\n all_devices.update(devices[\"dpdk_devices\"])\n all_devices.update(devices[\"kernel_devices\"])\n\n current_ifcs = {}\n interfaces = {}\n if \"interfaces\" in node:\n current_ifcs = node[\"interfaces\"]\n if current_ifcs:\n for ifc in current_ifcs.values():\n dvid = ifc[\"pci_address\"]\n if dvid in all_devices:\n VppPCIUtil.vpp_create_interface(\n interfaces, dvid, all_devices[dvid]\n )\n node[\"interfaces\"] = interfaces\n\n self.updateconfig()",
"def _set_interfaces(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interfaces_openconfig_interfaces__interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interfaces must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interfaces_openconfig_interfaces__interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interfaces = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interfaces(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interfaces_openconfig_qos_interfaces__qos_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interfaces must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interfaces_openconfig_qos_interfaces__qos_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interfaces = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interfaces(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interfaces_openconfig_qos__qos_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interfaces must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interfaces_openconfig_qos__qos_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interfaces = t\n if hasattr(self, '_set'):\n self._set()",
"def devices(self, devices):\n\n self._devices = devices",
"def update_interfaces(self, interfaces):\n for i in interfaces:\n self.update_interface(i)",
"def _set_interfaces(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interfaces_openconfig_mpls__mpls_signaling_protocols_segment_routing_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/mpls', defining_module='openconfig-mpls', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interfaces must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interfaces_openconfig_mpls__mpls_signaling_protocols_segment_routing_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/mpls', defining_module='openconfig-mpls', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interfaces = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"name\",yc_interface_openconfig_interfaces__interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"name\",yc_interface_openconfig_interfaces__interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def get_interfaces(self, **kwargs):\n url = 'ipam/interfaces'\n args = ('tenant_id', 'network_id', 'device_id')\n params = {}\n for key, value in kwargs.iteritems():\n if key in args:\n params[key] = value\n res = json.loads(self.get(url, params=params))\n return res['interfaces']",
"def _set_interfaces(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interfaces_openconfig_mpls_igp__mpls_signaling_protocols_segment_routing_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/mpls', defining_module='openconfig-mpls', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interfaces must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interfaces_openconfig_mpls_igp__mpls_signaling_protocols_segment_routing_interfaces, is_container='container', yang_name=\"interfaces\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/mpls', defining_module='openconfig-mpls', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interfaces = t\n if hasattr(self, '_set'):\n self._set()",
"def set_interface(ftdi, interface):\n return _ftdi1.set_interface(ftdi, interface)",
"def set_logical_device_configuration(device, logical_devices):\n context.context().set_logical_device_configuration(device, logical_devices)",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"interface_id\",yc_interface_openconfig_qos_interfaces__qos_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-id', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"interface_id\",yc_interface_openconfig_qos_interfaces__qos_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-id', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def device_groups(self, device_groups):\n\n self._device_groups = device_groups"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the ip_version of this NetflowFilters. | def ip_version(self, ip_version):
self._ip_version = ip_version | [
"def ip_version(self, ip_version):\n self._ip_version = ip_version",
"def ip_count(self, ip_count):\n\n self._ip_count = ip_count",
"def vip(self, vip):\n\n self._vip = vip",
"def cavity_filter_version(self, cavity_filter_version):\n\n self._cavity_filter_version = cavity_filter_version",
"def ip_version(self):\n return self._ip_version",
"def protocol_version(self, protocol_version):\n\n self._protocol_version = protocol_version",
"def setIp(self, new_ip):\n self.ip=new_ip",
"def vip_address(self, vip_address):\n self._vip_address = vip_address",
"def ip(self, ip):\n self._ip = ip",
"def ip(self, ip):\n\n self._ip = ip",
"def flow_encoding_version(self, flow_encoding_version):\n\n self._flow_encoding_version = flow_encoding_version",
"def ip(self, ip):\n self._ip = ip\n return self",
"def set_ip(self, party_ip) -> None:\n\n self._ip = party_ip",
"def version(self, version):\n \n self._version = version",
"def source_version(self, source_version):\n\n self._source_version = source_version",
"def node_version(self, node_version):\n\n self._node_version = node_version",
"def ip(self, ip: str):\n\n self._ip = ip",
"def IpVersion(self):\n if self.force_auto_sync:\n self.get('IpVersion')\n return self._IpVersion",
"def ip_address(self, ip_address):\n\n self._ip_address = ip_address"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the netflow_devices of this NetflowFilters. | def netflow_devices(self, netflow_devices):
self._netflow_devices = netflow_devices | [
"def netflow_filters(self, netflow_filters):\n\n self._netflow_filters = netflow_filters",
"def devices(self, devices):\n\n self._devices = devices",
"def device_groups(self, device_groups):\n\n self._device_groups = device_groups",
"def ext_devices(self, ext_devices):\n self._ext_devices = ext_devices",
"def set_visible_devices(devices, device_type=None):\n context.context().set_visible_devices(devices, device_type)",
"def set_active_devices(devices: List[str]) -> None:\n\n devices_to_use = set([int(gpu) for gpu in devices])\n\n physical_devices = [\n physical_device\n for i, physical_device in enumerate(\n tf.config.experimental.list_physical_devices(\"GPU\")\n )\n if i in devices_to_use\n ]\n\n tf.config.experimental.set_visible_devices(physical_devices, \"GPU\")\n for physical_device in physical_devices:\n tf.config.experimental.set_memory_growth(physical_device, True)\n\n return tf.distribute.MirroredStrategy()",
"def set_logical_device_configuration(device, logical_devices):\n context.context().set_logical_device_configuration(device, logical_devices)",
"def device_interfaces(self, device_interfaces):\n\n self._device_interfaces = device_interfaces",
"def devicenodes(self, devicenodes):\n\n self._devicenodes = devicenodes",
"def set_frequency_for_devices(self, devices, freq, exact=False):\n for device in devices:\n self.set_max_frequency(device, freq, exact)\n self.set_min_frequency(device, freq, exact)",
"def select_devices(self, devices):\n self.dm.selected_devices = devices",
"def set_device(self, device):",
"def dev_set(self, dev: base.DataType) -> None:\n self.dev = dev",
"def _set_device(self):\n self.device = torch.device(self.args.device)",
"def configure_devices(self, ports):\n\n new_devices = []\n \n # for each port create a new Device and start the underlying thread\n for p in ports:\n # print info\n # print('DeviceManager[' + time.strftime(\"%d-%m-%Y %H:%M:%S\") +\\\n # ']: new device connected (port ' + str(p) + ')')\n \n new_device = Device(p, self.tqdm_position, self.tqdm_pos_lock)\n self.configured_devices[new_device.id] = new_device\n new_devices.append(new_device)\n new_device.start()\n\n return new_devices",
"def _set_device(self):\n if(self.enable_gpu):\n device_name = tf.test.gpu_device_name()\n if device_name != '/device:GPU:0':\n raise SystemError('GPU device not found')\n if(self.verbose):\n print('Found GPU at: {}'.format(device_name))\n self.device_name = \"/gpu:0\"\n else:\n self.device_name = \"/cpu:0\"",
"def setFilters(self, filters):\n self.__filters = filters",
"def set_device(self, device):\n self.device = device",
"def configure_devices(self):\n # maybe change this back\n return self.config.configure_devices(self.debug)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the top of this NetflowFilters. | def top(self, top):
self._top = top | [
"def set_top(self, top: float) -> None:\n self._selected_top = top",
"def bb_top(self, bb_top: float):\n\n self._bb_top = bb_top",
"def bevel_top(self, bevel_top):\n self._bevel_top = bevel_top",
"def top(self):\n # Sets our Z value to one.\n self.setZValue(1)\n # Set every colliding items Z value to 0\n for sibling in self.collidingItems():\n sibling.setZValue(0)",
"def top_type(self, top_type):\n\n self._top_type = top_type",
"def topn(self, topn):\n self._topn = topn",
"def GripperTop(self, attop=True):\r\n \r\n return self.SetFlag(self.optionGripperTop, attop)",
"def top_bar(self, top_bar):\n\n self._top_bar = top_bar",
"def setTopLimit(self, newTopLimit):\n self.topLimit = newTopLimit",
"def OnWindowSetTop(self, Top=sentinel):",
"def transition_to_top(self):\n self.move_to_top_side()",
"def setTopTweets(self, topTweets):\n self.topTweets = topTweets\n return self",
"def setTopP(self, value):\n return self._set(topP=value)",
"def Top(self):\r\n\r\n self.dock_direction = AUI_DOCK_TOP\r\n return self",
"def scroll_top(self):\n self.scroll = 0",
"def page_top(self):\n self._pos = 0\n self._display()",
"def Top(*args, **kwargs):\n return _core_.SizerFlags_Top(*args, **kwargs)",
"def top_attire_color(self, top_attire_color):\n\n self._top_attire_color = top_attire_color",
"def top_threshold_expression(self, top_threshold_expression):\n\n self._top_threshold_expression = top_threshold_expression"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the app_type of this NetflowFilters. | def app_type(self, app_type):
self._app_type = app_type | [
"def app_type(self, app_type):\n self._app_type = app_type",
"def application_type(self, application_type: str):\n allowed_values = [\"transfer_of_whole\", \"creation_of_new_charge\"] # noqa: E501\n if application_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `application_type` ({0}), must be one of {1}\"\n .format(application_type, allowed_values)\n )\n\n self._application_type = application_type",
"def app_type(self):\n return self._app_type",
"def feed_type(self, feed_type):\n\n self._feed_type = feed_type",
"def _app_type(self):\n return self._event['app_type']",
"def application_type(self) -> str:\n return self._application_type",
"def set_type(self, type):\n self.type = type",
"def item_type(self, item_type):\n\n self._item_type = item_type",
"def application_type(self) -> str:\n return pulumi.get(self, \"application_type\")",
"def setFilter(self, type: int, filter: int) -> None:\n ...",
"def activity_type(self, activity_type):\n\n self._activity_type = activity_type",
"def application_type(self) -> Optional[str]:\n return pulumi.get(self, \"application_type\")",
"def set_instance_type(self, instance_type):\n self._instance_type = instance_type",
"def request_type(self, request_type):\n\n self._request_type = request_type",
"def site_type(self, site_type):\n self._site_type = site_type",
"def request_type(self, request_type):\n self._request_type = request_type",
"def set_type(self, type: int):\r\n self.type = type\r\n self.canvas.itemconfig(self.item, image=self._get_image())",
"def set_application(self, app):\n \n self.app = app",
"def item_group_type(self, item_group_type):\n\n self._item_group_type = item_group_type"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the nbar_application_names of this NetflowFilters. | def nbar_application_names(self, nbar_application_names):
self._nbar_application_names = nbar_application_names | [
"def application_name(self, application_name):\n\n self._application_name = application_name",
"def set_app_name(self, app_name):\n pass",
"def setNameFilters(self, filters):\n if self._completer:\n self._completer.model().setNameFilters(filters)",
"def set_filters(self, filters):\n self.widget.setNameFilters(filters)",
"def roma_app_name(self, roma_app_name):\n self._roma_app_name = roma_app_name",
"def application_tags(self, application_tags):\n\n self._application_tags = application_tags",
"def set_name(self, application_name):\r\n self._name = application_name",
"def applications(self, applications):\n\n self._applications = applications",
"def set_application_attributes(self, application):\n application.setOrganizationName(self.application_admin.get_organization_name())\n application.setOrganizationDomain(self.application_admin.get_organization_domain())\n application.setApplicationName(self.application_admin.get_name())\n application.setWindowIcon(self.application_admin.get_icon())",
"def app_name(self, app_name):\n self._app_name = app_name",
"def set_applications(self, applications):\n self.apps = applications\n self.settings.set_strv(OPTION_FOLDER_APPS, self.apps)",
"def app_name(self, value):\n self._app_name = value",
"def config_bucket_names(self, config_bucket_names: ConfigNodePropertyArray):\n\n self._config_bucket_names = config_bucket_names",
"def app_name(self, app_name):\n\n self._app_name = app_name",
"def set_band_names(self, band_names, imagename=None):\n self.set_option_for_imagename('band_names', imagename, band_names)",
"def set_application(self, app_name, app):\n app = app if isinstance(app, list) else [app]\n logging.debug(\"setting application: %s, app: \\n%s\",\n app_name,\n pprint(app))\n self._config['Application'][app_name] = app",
"def SetAppName(*args, **kwargs):\n return _core_.PyApp_SetAppName(*args, **kwargs)",
"def app_list(self, app_list):\n self._app_list = app_list",
"def category_names(self, category_names):\n\n self._category_names = category_names"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the node_a of this NetflowFilters. | def node_a(self, node_a):
self._node_a = node_a | [
"def setA(self, a):\n self.a = a",
"def setNodesAttribute(self, idx_node: torch.Tensor, idx_attribute: torch.Tensor, value: float):\n self.node_attribute_list[idx_node][0][idx_attribute] = value",
"def set_node(self, node):\n self.__node = node",
"def SetNode(self, node):\n self.node = node.Parameters[\"Image\"].binding",
"def __call__(self, node_A):\n new_node = Op.__call__(self)\n new_node.inputs = [node_A]\n new_node.name = \"Zeroslike(%s)\" % node_A.name\n return new_node",
"def __call__(self, node_A):\r\n new_node = Op.__call__(self)\r\n new_node.inputs = [node_A]\r\n new_node.name = \"Zeroslike(%s)\" % node_A.name\r\n return new_node",
"def __call__(self, node_A):\r\n new_node = Op.__call__(self)\r\n new_node.inputs = [node_A]\r\n new_node.name = \"Oneslike[%s]\" % node_A.name\r\n return new_node",
"def setNodesAttributes(self, idx_node: torch.Tensor, values: torch.Tensor):\n self.node_attribute_list[idx_node][0] = values",
"def node_b(self, node_b):\n\n self._node_b = node_b",
"def nodes(self, nodes):\n self._nodes = nodes",
"def nodes(self, nodes):\n\n self._nodes = nodes",
"def node_address(self, node_address):\n\n self._node_address = node_address",
"def node_config(self, node_config):\n\n self._node_config = node_config",
"def node_type(self, node_type):\n\n self._node_type = node_type",
"def change_atrrs_of_node(self, node_number, dict_with_attrs):\n nx.set_node_attributes(self, {node_number: dict_with_attrs})",
"def __init__(self, a_Nnodes):\n self.reset(a_Nnodes)",
"def set_node(self, n, value):\n node = self.get_node(n)\n if node:\n node.value = value",
"def node_data(self, node_data):\n\n self._node_data = node_data",
"def set_attribute(self, node, name, value):\r\n return self._send({'name': 'setAttribute', 'args': [node, name, value]})"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the conversation of this NetflowFilters. | def conversation(self, conversation):
self._conversation = conversation | [
"def conversation(self, conversation):\n \n self._conversation = conversation",
"def set_gift_conversation(self, conversation_string):\r\n self.gift_conversation = conversation_string",
"def update_conversation(self, conversation=None):\n pass",
"def conversation(self):\n return self._conversation",
"def on_invite_conversation(self, conversation):\n pass",
"def update_conversation(self, conversation):\n # StateUpdate.conversation is actually a delta; fields that aren't\n # specified are assumed to be unchanged. Until this class is\n # refactored, hide this by saving and restoring previous values where\n # necessary.\n\n # delivery_medium_option\n new_state = conversation.self_conversation_state\n if len(new_state.delivery_medium_option) == 0:\n old_state = self._conversation.self_conversation_state\n new_state.delivery_medium_option.extend(\n old_state.delivery_medium_option\n )\n\n # latest_read_timestamp\n old_timestamp = self.latest_read_timestamp\n self._conversation = conversation\n if parsers.to_timestamp(self.latest_read_timestamp) == 0:\n self_conversation_state = (\n self._conversation.self_conversation_state\n )\n self_conversation_state.self_read_state.latest_read_timestamp = (\n parsers.to_timestamp(old_timestamp)\n )",
"def chat(self, chat):\n \n self._chat = chat",
"def set_state(self, state):\n state = state.format(relation_name=self.relation_name)\n value = _get_flag_value(state, {\n 'relation': self.relation_name,\n 'conversations': [],\n })\n if self.key not in value['conversations']:\n value['conversations'].append(self.key)\n set_flag(state, value)",
"def update(self, conversation):\n self.content_type = \"application/json\"\n self.method = \"PATCH\"\n entity = Conversation(json.loads(self.send(conversation).content))\n self._initialize_collection_properties(entity)\n return entity",
"def create_conversation(self, conversation=None):\n pass",
"def _handle_conversation(self, conversation):\n conv_id = conversation.conversation_id.id\n conv = self._conv_dict.get(conv_id, None)\n if conv is not None:\n conv.update_conversation(conversation)\n else:\n self.add_conversation(conversation)",
"def patch_conversation(request_data: dict, conversation: Conversation):\n bound_logger = logger.bind(conversation_id=conversation.id)\n\n for field in [\"category\"]:\n # Looks a bit awkward but getattr/setattr lets us loop over all the fields as we can't access fields\n # in the object like a dictionary.\n\n if request_data.get(field) and request_data.get(field) != getattr(conversation, field):\n setattr(conversation, field, request_data[field])\n try:\n db.session.commit()\n except SQLAlchemyError:\n db.session.rollback()\n bound_logger.exception(\"Database error occurred while opening conversation\")\n raise InternalServerError(description=\"Database error occurred while opening conversation\")",
"def conversation_participant_name(self, conversation_participant_name):\n\n self._conversation_participant_name = conversation_participant_name",
"def conversation_participant_uuid(self, conversation_participant_uuid):\n\n self._conversation_participant_uuid = conversation_participant_uuid",
"def conversation(self, thread):\r\n assert isinstance(thread, int) and 0 <= thread < len(self._threads), \"Thread {} don't exists at channel {}!\".\\\r\n format(thread, self.name)\r\n return self._threads[thread][\"conversation\"]",
"async def on_send_to_conversation(\n self, claims_identity: ClaimsIdentity, conversation_id: str, activity: Activity,\n ) -> ResourceResponse:\n return await self._process_activity(\n claims_identity, conversation_id, None, activity,\n )",
"def sent(self, sent):\n\n self._sent = sent",
"def netflow_filters(self, netflow_filters):\n\n self._netflow_filters = netflow_filters",
"def initialise_conversation_model(self):\n self.conversation = model.conversation.ConversationSystem()\n #\n # Set all as alive\n for name in 'abcde':\n self.conversation.addKnowledge(['{0}-alive'.format(name)])\n #\n # And set the requires\n self.conversation.convertPresentToRequires('{0}-alive')"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the if_names of this NetflowFilters. | def if_names(self, if_names):
self._if_names = if_names | [
"def set_filters(self, filters):\n self.widget.setNameFilters(filters)",
"def setNameFilters(self, filters):\n if self._completer:\n self._completer.model().setNameFilters(filters)",
"def setFilters(self, filters):\n self.__filters = filters",
"def set_filter_status(self, filters):\n if filters in [0, 1, 2, 3]:\n self.instr.write(\"ILIN {0:d}\".format(filters))\n else:\n raise ValueError('Argument has to be either 0, 1, 2 or 3.')",
"def filters(self, filters):\n self._filters = filters",
"def netflow_filters(self, netflow_filters):\n\n self._netflow_filters = netflow_filters",
"def filters(self, filters):\n\n self._filters = filters",
"def _set_logical_interface_name(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(((([0-9]|1[0-6])/([1-9]|[1-9][0-9])(:[1-4])?)|([1-9]|[1-9][0-9]|[1-9][0-9][0-9]|10[0-1][0-9]|102[0-4]))\\\\.([1-9]|[1-9][0-9]+))'}), is_leaf=True, yang_name=\"logical-interface-name\", rest_name=\"logical-interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Logical interface name'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mac-address-table', defining_module='brocade-mac-address-table', yang_type='logical-ifname', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"logical_interface_name must be of a type compatible with logical-ifname\"\"\",\n 'defined-type': \"brocade-mac-address-table:logical-ifname\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(((([0-9]|1[0-6])/([1-9]|[1-9][0-9])(:[1-4])?)|([1-9]|[1-9][0-9]|[1-9][0-9][0-9]|10[0-1][0-9]|102[0-4]))\\\\.([1-9]|[1-9][0-9]+))'}), is_leaf=True, yang_name=\"logical-interface-name\", rest_name=\"logical-interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Logical interface name'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mac-address-table', defining_module='brocade-mac-address-table', yang_type='logical-ifname', is_config=True)\"\"\",\n })\n\n self.__logical_interface_name = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_name(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_name must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__interface_name = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_ifname(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"ifname\", rest_name=\"ifname\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-opstest', defining_module='brocade-opstest', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ifname must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"ifname\", rest_name=\"ifname\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-opstest', defining_module='brocade-opstest', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__ifname = t\n if hasattr(self, '_set'):\n self._set()",
"def set_FilterName(self, value):\n super(GetCallbackDataInputSet, self)._set_input('FilterName', value)",
"def filter_files(self, name_filters=None):\r\n if name_filters is None:\r\n name_filters = self.get_conf('name_filters', [])\r\n\r\n if self.filter_on:\r\n self.fsmodel.setNameFilters(name_filters)\r\n else:\r\n self.fsmodel.setNameFilters([])",
"def set_firewall_filter(self, name, **kwargs):\n\n self.fn_checkin(\"Configuring firewall filter\")\n\n #self.fw_filter[name] = {}\n #self.fw_filter[name]['ftype'] = 'filter'\n #this = self.ptr = self.fw_filter[name]\n\n this = utils.update_opts_from_args(kwargs,\n defaults={\n 'count': 1, 'action': 'set', 'index': 1,\n 'if_specific': False, 'fltr_specific': False,\n 'policer': False, 'in_if_specific': False,\n 'ftype': 'filter',\n 'src_addr': None, 'src_port': None,\n 'dst_addr': None, 'dst_port': None,\n })\n\n self.cmd = \"{} firewall\".format(this['action'])\n if this['family']:\n self.cmd += ' family {}'.format(this['family'])\n\n self.cmd_add(\"interface_specific\", 'if_specific', opt='flag')\n\n if this['policer']:\n if name not in self.fw_filter:\n self.fw_filter[name] = {}\n #self.fw_filter[name]['ftype'] = 'filter'\n self.ptr = self.fw_filter[name]\n self._update(this)\n self.cmd_add(\"policer {} filter-specific\".format(name), 'fltr_specific', opt='flag')\n self.cmd_add(\"policer {} if-exceeding bandwidth-limit\".format(name), 'if_ex_bw_lmt')\n self.cmd_add(\"policer {} if-exceeding burst-size-limit\".format(name),\n 'if_ex_burst_lmt')\n self.cmd_add(\"policer {} then\".format(name), 'pol_action')\n else:\n _cmd = '{} {}'.format(self.cmd, this['ftype'])\n for iter_ii in range(1, this['count'] + 1):\n name_tag = name + str(iter_ii)\n if name_tag not in self.fw_filter:\n self.fw_filter[name_tag] = {}\n self.ptr = self.fw_filter[name_tag]\n self._update(this)\n self.cmd_add(\"{} interface-specific\".format(name_tag), 'in_if_specific',\n opt='flag')\n\n self.cmd = _cmd + ' {} term {}'.format(name_tag, this['term'])\n for key in kwargs:\n if key == 'action_list':\n action_tag = \".{}\".format(iter_ii) if key == 'routing-instance' or \\\n key == 'count' else None\n self.cmd_add('then', 'action_list', tag=action_tag)\n continue\n\n self.cmd_add(\"from source-address\", 'src_addr')\n self.cmd_add(\"from source-port\", 'src_port')\n self.cmd_add(\"from destination-port\", 'dst_port')\n self.cmd_add(\"from destination-address\", 'dst_addr')\n\n status = self.config()\n\n return self.fn_checkout(status)",
"def set_filters(self, filters):\n if not isinstance(filters, dict):\n raise Exception(\"filters must be a dict\")\n self.filters = {}\n for key in filters.keys():\n value = filters[key]\n self.add_filter(key,value)",
"def names(self, names):\n\n self._names = names",
"def ifaces(self, ifaces):\n \n self._ifaces = ifaces",
"def set_names(self, *args):\n def setnames(a, b):\n CPX_PROC.chgcolname(self._env._e, self._cplex._lp, a, b,\n self._env._apienc)\n apply_pairs(setnames, self._conv, *args)",
"def tag_names(self, tag_names):\n\n self._tag_names = tag_names",
"def communications_flow_names(self, communications_flow_names):\n\n self._communications_flow_names = communications_flow_names"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the direction of this NetflowFilters. | def direction(self, direction):
self._direction = direction | [
"def set_direction(self, direction: str) -> None:\n self._send_command([{\"code\": DPCODE_FAN_DIRECTION, \"value\": direction}])",
"def set_direction(self, direction: str) -> None:\n self.wink.set_fan_direction(direction)",
"def setDirection( self, direction ):\n self.layout().setDirection(direction)\n self.reset()",
"def set_direction(self, new_dir):\n self.__direction = new_dir",
"def setdirection(self, *args, **kwargs):\n return _coordsys.coordsys_setdirection(self, *args, **kwargs)",
"async def async_set_direction(self, direction: str) -> None:\n if direction == DIRECTION_FORWARD:\n self._device.fan_dir = SENSEME_DIRECTION_FORWARD\n else:\n self._device.fan_dir = SENSEME_DIRECTION_REVERSE",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceICVF23_SetDirection(self, _arg)",
"def change_direction(self):\n if self._direction == 'Right':\n self._direction = 'Left'\n elif self._direction == 'Left':\n self._direction = 'Right'",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceICVF33_SetDirection(self, _arg)",
"def SetDirection(self, _arg: 'itkMatrixD22') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceICVF32_SetDirection(self, _arg)",
"def direction(self, direction = None):\r\n if direction in [N, S, E, W]:\r\n self._direction = direction\r\n return self._direction",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceVIF3_SetDirection(self, _arg)",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceICVF43_SetDirection(self, _arg)",
"def set_robot_direction(self, direction):\n _North = 0.0\n fdirection = float(direction)\n if fdirection < _North:\n raise ValueError('Direction must be provided as a positive value')\n\n if fdirection >= 360.0:\n fdirection = fdirection % 360.0\n\n self.direction = fdirection\n # raise NotImplementedError",
"def SetDirection(self, _arg: 'itkMatrixD22') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceICVF22_SetDirection(self, _arg)",
"async def async_set_direction(self, direction: str) -> None:\n await self._client.fan_command(\n key=self._key, direction=_FAN_DIRECTIONS.from_hass(direction)\n )",
"def SetDirection(self, _arg: 'itkMatrixD22') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceVIF2_SetDirection(self, _arg)",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceIVF23_SetDirection(self, _arg)",
"def SetDirection(self, _arg: 'itkMatrixD33') -> \"void\":\n return _itkGenerateImageSourcePython.itkGenerateImageSourceIVF33_SetDirection(self, _arg)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
To add parents to database | def add_parent(session, df):
try:
for _, row in df.iterrows():
parent = Parent()
parent.name = row['parent_name']
parent.family = row['family']
session.add(parent)
except Exception as ex:
session.rollback()
raise ex
else:
session.commit() | [
"def add_parents(self, parents):\n if len(parents) > 0:\n self.parents.append(parents)\n else:\n self.is_root = True",
"def add_parents(self, nodes):\n for n in self.node_list(nodes):\n self.add_parent(n)",
"def regenerate_parents( self ):\n ParentsRelation.objects.filter( taxon = self ).delete() \n if self.name != 'root':\n parent = self.parent\n index = 0\n while parent.name != 'root':\n ParentsRelation.objects.create( \n taxon = self,\n parent = Taxa.objects.get( name = parent.name ),\n index = index )\n parent = parent.parent\n index += 1\n ParentsRelation.objects.create( \n taxon = self,\n parent = Taxa.objects.get( name = 'root'),\n index = index )",
"def test_append_children_category(self):\n category = Category(catname='olympic games')\n category1 = Category(catname='Tennis')\n category.parents.append(category1)\n category.save()\n assert category.parents",
"def insert_commit_parents(self, parents, commit_id, sha, repo_id):\n cursor = self._cnx.cursor()\n for parent in parents:\n parent_id = self.select_commit_id(parent.hexsha, repo_id)\n\n if not parent_id:\n self._logger.warning(\"parent commit id not found! SHA parent \" + str(parent.hexsha))\n\n query = \"INSERT IGNORE INTO commit_parent \" \\\n \"VALUES (%s, %s, %s, %s, %s)\"\n\n if parent_id:\n arguments = [repo_id, commit_id, sha, parent_id, parent.hexsha]\n else:\n arguments = [repo_id, commit_id, sha, None, parent.hexsha]\n\n cursor.execute(query, arguments)\n self._cnx.commit()\n\n cursor.close()",
"def add_parent(self, pathway):\n self._parents.append(pathway)",
"def set_parents(self):\n route53 = self.pcf_field.get_particles(flavor=\"route53_record\")\n route53_record_pcf_name = route53.get(\"pcf_name\", self.name)\n ec2_particles = self.pcf_field.get_particles(flavor=\"ec2_instance\")\n\n self.pcf_field.particles[\"route53_record\"][route53_record_pcf_name].parents.update(list(ec2_particles.values()))\n self.pcf_field.link_particles(self.pcf_field.particles)",
"def set_parents_table(self) -> None:\n self.parents[\"A\"] = \"start\"\n self.parents[\"B\"] = \"start\"\n self.parents[\"fin\"] = None",
"def parents(self, parents):\n\n self._parents = parents",
"def update_parents(self):\n for a_parent in self.parents:\n for child in self.children:\n for a_dest in self.children[child]:\n if (a_dest[0] + a_parent.children[self][0][0],\n a_parent.children[self][0][1]) not in a_parent.children[child]:\n a_parent.children[child].append((a_dest[0] + a_parent.children[self][0][0],\n a_parent.children[self][0][1]))\n a_parent.update_parents()",
"def insert_all_commit_parents(self, parents, commit_id, sha, repo_id):\n to_insert = []\n for parent in parents:\n parent_id = self.select_commit_id(parent.hexsha, repo_id)\n\n if not parent_id:\n self._logger.warning(\"parent commit id not found! SHA parent \" + str(parent.hexsha))\n\n if parent_id:\n to_insert.append((repo_id, commit_id, sha, parent_id, parent.hexsha))\n else:\n to_insert.append((repo_id, commit_id, sha, None, parent.hexsha))\n\n if to_insert:\n cursor = self._cnx.cursor()\n query = \"INSERT IGNORE INTO commit_parent(repo_id, commit_id, commit_sha, parent_id, parent_sha) VALUES (%s, %s, %s, %s, %s)\"\n cursor.executemany(query, [i for i in to_insert])\n self._cnx.commit()\n cursor.close()",
"def parents(self, path):\n pass",
"def parenting(self):\n for child in self.childs:\n child.set_parent(self)",
"def set_parents(self, parents: list):\n\n for parent in parents:\n _ = lib.check_data(\"parent\", parent, Individual)\n self._parents = parents",
"def add_primary(self, dataset):\n root = self\n while root.parents:\n if len(root.parents) > 1:\n raise ValueError(\"This dataset has multiple parents - don't \"\n \"know which one to add to\")\n root = root.parents[0]\n root.parents.append(dataset)",
"def parent_ids(self, parent_ids):\n self._parent_ids = parent_ids",
"def parents(rectype, source, include):\n click.echo('Migrating {}s...'.format(rectype))\n with commit():\n import_parents_from_file(source, rectype=rectype, include=include)",
"def add_parent(self, parentobj):\n ## In a perfect world, I would check parentobj's type\n ## with isinstance(), but I'm not ready to take the perf hit\n self.parent = parentobj\n return True",
"def insert(self, parent, name):\n pid = self.db.insert_returning_id('simple', dict(parent=parent, name=name))\n return pid"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convenience redirect to find the root outcome group for a particular context. Will redirect to the appropriate outcome group's URL. | def redirect_to_root_outcome_group_for_context_global(request_ctx, **request_kwargs):
path = '/v1/global/root_outcome_group'
url = request_ctx.base_api_url + path.format()
response = client.get(request_ctx, url, **request_kwargs)
return response | [
"def redirect_to_root_outcome_group_for_context_accounts(request_ctx, account_id, **request_kwargs):\n\n path = '/v1/accounts/{account_id}/root_outcome_group'\n url = request_ctx.base_api_url + path.format(account_id=account_id)\n response = client.get(request_ctx, url, **request_kwargs)\n\n return response",
"def _redirect(context, request):\n if request.registry.fallback_base_url:\n path = request.path.lstrip(\"/\")\n redirect_url = \"%s/%s\" % (request.registry.fallback_base_url.rstrip(\"/\"), path)\n else:\n redirect_url = \"%s/%s/\" % (\n request.registry.fallback_url.rstrip(\"/\"),\n context.name,\n )\n\n return HTTPFound(location=redirect_url)",
"def launch_redirect(self, context):\n resource_link = URI.from_octets(\n \"resource/%08X/\" %\n context.resource.key()).resolve(context.get_app_root())\n return self.redirect_page(context, resource_link, 303)",
"def api_root():\n res = RedirectResponse(url='/api/ui', status_code=302)\n return res",
"def redirect_catchall():\n return redirect(\"/\")",
"def redirect(request):\n matchdict = request.matchdict.copy()\n url = request.route_url(route_name, traverse=(), **matchdict)\n return HTTPFound(location=url)",
"def base_url():\n return redirect(url_for('game.setup'), code=301)",
"def _set_url_root_path(app: Flask) -> None:\n target_url = app.config['ROOT_REDIRECT_TARGET']\n if target_url is None:\n return\n\n status_code = app.config['ROOT_REDIRECT_STATUS_CODE']\n\n def _redirect():\n return redirect(target_url, status_code)\n\n app.add_url_rule('/', endpoint='root', view_func=_redirect)",
"def redirect(to):\r\n def _redirect(environ, start_response):\r\n args, kwargs = environ['wsgiorg.routing_args']\r\n start_response('301 MOVED PERMANENTLY',\r\n [('Location', to.format(*args, **kwargs))])\r\n return []\r\n return _redirect",
"def redirect(self, location):\n self.redirect_see_other(location)",
"def redirect(self):\n if self.notification.activity.resource is None:\n external_url = self.notification.activity.external_resource_url\n return self.request.RESPONSE.redirect(external_url)\n\n oguid = self.notification.activity.resource.oguid\n\n if oguid.is_on_current_admin_unit:\n try:\n resource = oguid.resolve_object()\n if resource is None:\n raise Unauthorized()\n url = resource.absolute_url()\n except InvalidOguidIntIdPart:\n raise NotFound('Requested object has been deleted')\n\n else:\n admin_unit = ogds_service().fetch_admin_unit(oguid.admin_unit_id)\n url = ResolveOGUIDView.url_for(oguid, admin_unit)\n\n return self.request.RESPONSE.redirect(self.preserve_query_string(url))",
"def redirect_to_default():\n\n log(\"Received GET request for /generate, returning to default page\")\n return redirect(url_for(\"default\"))",
"def _userroot_redirect(request, ajax_data={}):\n user_root = request.url_for(\n \"images-userroot\", map_username=request.user.name)\n return found_redirect(\n request, request.GET.get(\"complete\", user_root), ajax_data)",
"def redirect(target):\n return {\n 'status': '302',\n 'statusDescription': 'Found',\n 'headers': {\n 'location': [{\n 'key': 'Location',\n 'value': target\n }]\n }\n }",
"def _redirect(self, identifier):\n\n return None",
"def result_get():\n return redirect(\"/\")",
"def folder_ad_default(request):\n\n try:\n root_folder = Folder.objects.active().get(parent=None)\n except Folder.DoesNotExist:\n return HttpResponseNotFound(msg.MSG_ROOT_FOLDER_DOESNT_EXIST)\n\n return redirect(reverse('folder-ad-detail', args=(root_folder.pk,)))",
"def show_lightning_round_score_start_redirect():\n return redirect(url_for(\"show_lightning_round_start_three_way_tie\"), 301)",
"def redirect_to(self, route_name, *args, **kwargs):\n self.redirect(self.uri_for(route_name, *args, **kwargs))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convenience redirect to find the root outcome group for a particular context. Will redirect to the appropriate outcome group's URL. | def redirect_to_root_outcome_group_for_context_accounts(request_ctx, account_id, **request_kwargs):
path = '/v1/accounts/{account_id}/root_outcome_group'
url = request_ctx.base_api_url + path.format(account_id=account_id)
response = client.get(request_ctx, url, **request_kwargs)
return response | [
"def redirect_to_root_outcome_group_for_context_global(request_ctx, **request_kwargs):\n\n path = '/v1/global/root_outcome_group'\n url = request_ctx.base_api_url + path.format()\n response = client.get(request_ctx, url, **request_kwargs)\n\n return response",
"def _redirect(context, request):\n if request.registry.fallback_base_url:\n path = request.path.lstrip(\"/\")\n redirect_url = \"%s/%s\" % (request.registry.fallback_base_url.rstrip(\"/\"), path)\n else:\n redirect_url = \"%s/%s/\" % (\n request.registry.fallback_url.rstrip(\"/\"),\n context.name,\n )\n\n return HTTPFound(location=redirect_url)",
"def launch_redirect(self, context):\n resource_link = URI.from_octets(\n \"resource/%08X/\" %\n context.resource.key()).resolve(context.get_app_root())\n return self.redirect_page(context, resource_link, 303)",
"def api_root():\n res = RedirectResponse(url='/api/ui', status_code=302)\n return res",
"def redirect_catchall():\n return redirect(\"/\")",
"def redirect(request):\n matchdict = request.matchdict.copy()\n url = request.route_url(route_name, traverse=(), **matchdict)\n return HTTPFound(location=url)",
"def base_url():\n return redirect(url_for('game.setup'), code=301)",
"def _set_url_root_path(app: Flask) -> None:\n target_url = app.config['ROOT_REDIRECT_TARGET']\n if target_url is None:\n return\n\n status_code = app.config['ROOT_REDIRECT_STATUS_CODE']\n\n def _redirect():\n return redirect(target_url, status_code)\n\n app.add_url_rule('/', endpoint='root', view_func=_redirect)",
"def redirect(to):\r\n def _redirect(environ, start_response):\r\n args, kwargs = environ['wsgiorg.routing_args']\r\n start_response('301 MOVED PERMANENTLY',\r\n [('Location', to.format(*args, **kwargs))])\r\n return []\r\n return _redirect",
"def redirect(self, location):\n self.redirect_see_other(location)",
"def redirect(self):\n if self.notification.activity.resource is None:\n external_url = self.notification.activity.external_resource_url\n return self.request.RESPONSE.redirect(external_url)\n\n oguid = self.notification.activity.resource.oguid\n\n if oguid.is_on_current_admin_unit:\n try:\n resource = oguid.resolve_object()\n if resource is None:\n raise Unauthorized()\n url = resource.absolute_url()\n except InvalidOguidIntIdPart:\n raise NotFound('Requested object has been deleted')\n\n else:\n admin_unit = ogds_service().fetch_admin_unit(oguid.admin_unit_id)\n url = ResolveOGUIDView.url_for(oguid, admin_unit)\n\n return self.request.RESPONSE.redirect(self.preserve_query_string(url))",
"def redirect_to_default():\n\n log(\"Received GET request for /generate, returning to default page\")\n return redirect(url_for(\"default\"))",
"def _userroot_redirect(request, ajax_data={}):\n user_root = request.url_for(\n \"images-userroot\", map_username=request.user.name)\n return found_redirect(\n request, request.GET.get(\"complete\", user_root), ajax_data)",
"def redirect(target):\n return {\n 'status': '302',\n 'statusDescription': 'Found',\n 'headers': {\n 'location': [{\n 'key': 'Location',\n 'value': target\n }]\n }\n }",
"def _redirect(self, identifier):\n\n return None",
"def result_get():\n return redirect(\"/\")",
"def folder_ad_default(request):\n\n try:\n root_folder = Folder.objects.active().get(parent=None)\n except Folder.DoesNotExist:\n return HttpResponseNotFound(msg.MSG_ROOT_FOLDER_DOESNT_EXIST)\n\n return redirect(reverse('folder-ad-detail', args=(root_folder.pk,)))",
"def show_lightning_round_score_start_redirect():\n return redirect(url_for(\"show_lightning_round_start_three_way_tie\"), 301)",
"def redirect_to(self, route_name, *args, **kwargs):\n self.redirect(self.uri_for(route_name, *args, **kwargs))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Modify an existing outcome group. Fields not provided are left as is; unrecognized fields are ignored. When changing the parent outcome group, the new parent group must belong to the same context as this outcome group, and must not be a descendant of this outcome group (i.e. no cycles allowed). | def update_outcome_group_global(request_ctx, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):
path = '/v1/global/outcome_groups/{id}'
payload = {
'title' : title,
'description' : description,
'vendor_guid' : vendor_guid,
'parent_outcome_group_id' : parent_outcome_group_id,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def update_outcome_group_accounts(request_ctx, account_id, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):\n\n path = '/v1/accounts/{account_id}/outcome_groups/{id}'\n payload = {\n 'title' : title,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'parent_outcome_group_id' : parent_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def update_group():\n _id = request.form['_id']\n name = request.form['name']\n data, code, message = FIELD_SERVICE.update_group(_id, name)\n return __result(data, code, message)",
"def test_modify_group(self):\n response = self.client.modify_group(\"ABC123\")\n self.assertEqual(response[\"method\"], \"POST\")\n self.assertEqual(response[\"uri\"], \"/admin/v1/groups/ABC123\")\n self.assertEqual(util.params_to_dict(response[\"body\"]), {\"account_id\": [self.client.account_id]})",
"def update_group(self, group, **attrs):\n return self._update(_group.Group, group, prepend_key=False, **attrs)",
"def test_patch_project_move_child(self):\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, self.category\n )\n self.make_assignment(new_category, self.user, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.category.sodar_uuid},\n )\n patch_data = {'parent': str(new_category.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 400, msg=response.content)",
"def update_group(self, group_name):\n self._runtime_error_if_called_during_showtime('update_group')\n self._current_update_group = group_name",
"def update_thing_group(thingGroupName=None, thingGroupProperties=None, expectedVersion=None):\n pass",
"def grp(self, grpNode):\n\t\tself._grp = grpNode",
"def update_group(self, group_name, new_group_name=None, new_path=None):\r\n params = {'GroupName' : group_name}\r\n if new_group_name:\r\n params['NewGroupName'] = new_group_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateGroup', params)",
"def update_research_group(self, employee_id, new_research_group):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET research_group = %s '\n 'WHERE id=%s;',\n (new_research_group, employee_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise",
"def test_update_group(self):\n # Create group\n group_name = data_utils.rand_name('group')\n description = data_utils.rand_name('Description')\n group = self.client.create_group(\n name=group_name, group_type='exclusivity',\n description=description)\n\n self.addCleanup(self.client.delete_group, group['id'])\n\n group_id = group.get('id')\n\n new_desc = data_utils.rand_name('UpdateDescription')\n updated_group = self.client.update_group(\n group_id, new_desc)\n\n self.assertEqual(updated_group['description'], new_desc)",
"def _group_append(groups, id, new_group):\n\n path_inds = []\n _, _, idx = Skeleton._group_parent(groups, id)\n while id is not None:\n path_inds.append(idx)\n id, idx, _ = Skeleton._group_parent(groups, id)\n\n path_inds = list(reversed(path_inds))\n\n if len(path_inds) == 1:\n groups[path_inds[0]]._replace(children=new_group)\n elif len(path_inds) == 2:\n groups[path_inds[0]].children[path_inds[1]]._replace(children=new_group)\n elif len(path_inds) == 3:\n groups[path_inds[0]].children[path_inds[1]].children[path_inds[2]]._replace(children=new_group)\n\n return groups",
"def edit_group_command(self):\n self.switch_frame(\"Edit Group\")\n id = self.parent.get_frame_id(\"Edit Group\")\n self.parent.frames[id].display_group(self.user.active_group)",
"def modify(self, key: int, data: Dict[str, Any]) -> APIResponse:\n return self._put(\"detail\", {\"group_pk\": key}, data)",
"def test_update_group(self):\n pass",
"async def update_group(\n payload: GroupIn,\n group_id: int = Path(..., gt=0),\n _=Security(get_current_access, scopes=[AccessType.admin])\n):\n return await crud.update_entry(groups, payload, group_id)",
"def replace_group(self, group: AOVGroup) -> None:\n idx = self.groups.index(group)\n\n self.groups[idx] = group",
"def request_group_update():\n target_group = Group.query.filter_by(id=request.args['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n return Response(\n render_template(\n 'admin/group/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/group/update\",\n id=target_group.id,\n name=target_group.name,\n meter=target_group.group_meter_id,\n group_production_meter_id_first=target_group.group_production_meter_id_first,\n group_production_meter_id_second=target_group.group_production_meter_id_second),\n mimetype='text/html')",
"def update_skill_group(SkillGroupArn=None, SkillGroupName=None, Description=None):\n pass"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Modify an existing outcome group. Fields not provided are left as is; unrecognized fields are ignored. When changing the parent outcome group, the new parent group must belong to the same context as this outcome group, and must not be a descendant of this outcome group (i.e. no cycles allowed). | def update_outcome_group_accounts(request_ctx, account_id, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):
path = '/v1/accounts/{account_id}/outcome_groups/{id}'
payload = {
'title' : title,
'description' : description,
'vendor_guid' : vendor_guid,
'parent_outcome_group_id' : parent_outcome_group_id,
}
url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def update_outcome_group_global(request_ctx, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}'\n payload = {\n 'title' : title,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'parent_outcome_group_id' : parent_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def update_group():\n _id = request.form['_id']\n name = request.form['name']\n data, code, message = FIELD_SERVICE.update_group(_id, name)\n return __result(data, code, message)",
"def test_modify_group(self):\n response = self.client.modify_group(\"ABC123\")\n self.assertEqual(response[\"method\"], \"POST\")\n self.assertEqual(response[\"uri\"], \"/admin/v1/groups/ABC123\")\n self.assertEqual(util.params_to_dict(response[\"body\"]), {\"account_id\": [self.client.account_id]})",
"def update_group(self, group, **attrs):\n return self._update(_group.Group, group, prepend_key=False, **attrs)",
"def test_patch_project_move_child(self):\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, self.category\n )\n self.make_assignment(new_category, self.user, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.category.sodar_uuid},\n )\n patch_data = {'parent': str(new_category.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 400, msg=response.content)",
"def update_group(self, group_name):\n self._runtime_error_if_called_during_showtime('update_group')\n self._current_update_group = group_name",
"def update_thing_group(thingGroupName=None, thingGroupProperties=None, expectedVersion=None):\n pass",
"def grp(self, grpNode):\n\t\tself._grp = grpNode",
"def update_group(self, group_name, new_group_name=None, new_path=None):\r\n params = {'GroupName' : group_name}\r\n if new_group_name:\r\n params['NewGroupName'] = new_group_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateGroup', params)",
"def update_research_group(self, employee_id, new_research_group):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET research_group = %s '\n 'WHERE id=%s;',\n (new_research_group, employee_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise",
"def test_update_group(self):\n # Create group\n group_name = data_utils.rand_name('group')\n description = data_utils.rand_name('Description')\n group = self.client.create_group(\n name=group_name, group_type='exclusivity',\n description=description)\n\n self.addCleanup(self.client.delete_group, group['id'])\n\n group_id = group.get('id')\n\n new_desc = data_utils.rand_name('UpdateDescription')\n updated_group = self.client.update_group(\n group_id, new_desc)\n\n self.assertEqual(updated_group['description'], new_desc)",
"def _group_append(groups, id, new_group):\n\n path_inds = []\n _, _, idx = Skeleton._group_parent(groups, id)\n while id is not None:\n path_inds.append(idx)\n id, idx, _ = Skeleton._group_parent(groups, id)\n\n path_inds = list(reversed(path_inds))\n\n if len(path_inds) == 1:\n groups[path_inds[0]]._replace(children=new_group)\n elif len(path_inds) == 2:\n groups[path_inds[0]].children[path_inds[1]]._replace(children=new_group)\n elif len(path_inds) == 3:\n groups[path_inds[0]].children[path_inds[1]].children[path_inds[2]]._replace(children=new_group)\n\n return groups",
"def edit_group_command(self):\n self.switch_frame(\"Edit Group\")\n id = self.parent.get_frame_id(\"Edit Group\")\n self.parent.frames[id].display_group(self.user.active_group)",
"def modify(self, key: int, data: Dict[str, Any]) -> APIResponse:\n return self._put(\"detail\", {\"group_pk\": key}, data)",
"def test_update_group(self):\n pass",
"async def update_group(\n payload: GroupIn,\n group_id: int = Path(..., gt=0),\n _=Security(get_current_access, scopes=[AccessType.admin])\n):\n return await crud.update_entry(groups, payload, group_id)",
"def replace_group(self, group: AOVGroup) -> None:\n idx = self.groups.index(group)\n\n self.groups[idx] = group",
"def request_group_update():\n target_group = Group.query.filter_by(id=request.args['id']).first()\n if target_group is None:\n return group_list(\"Unknown group.\")\n\n return Response(\n render_template(\n 'admin/group/create-update.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/admin/group/update\",\n id=target_group.id,\n name=target_group.name,\n meter=target_group.group_meter_id,\n group_production_meter_id_first=target_group.group_production_meter_id_first,\n group_production_meter_id_second=target_group.group_production_meter_id_second),\n mimetype='text/html')",
"def update_skill_group(SkillGroupArn=None, SkillGroupName=None, Description=None):\n pass"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Deleting an outcome group deletes descendant outcome groups and outcome links. The linked outcomes themselves are only deleted if all links to the outcome were deleted. Aligned outcomes cannot be deleted; as such, if all remaining links to an aligned outcome are included in this group's descendants, the group deletion will fail. | def delete_outcome_group_global(request_ctx, id, **request_kwargs):
path = '/v1/global/outcome_groups/{id}'
url = request_ctx.base_api_url + path.format(id=id)
response = client.delete(request_ctx, url, **request_kwargs)
return response | [
"def delete_outcome_group_accounts(request_ctx, account_id, id, **request_kwargs):\n\n path = '/v1/accounts/{account_id}/outcome_groups/{id}'\n url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def delete_consistencygroup(self, context, group):\n return self.common.delete_consistencygroup(self, context, group)",
"def delete_group(self, group):\n raise NotImplementedError('delete_group')",
"def delete_consistencygroup(self, context, group):\r\n extraSpecs = self._initial_setup(None, group)\r\n\r\n try:\r\n LOG.info(\"Beginning delete consistency group process\")\r\n self.adapter.delete_consistency_group(group,\r\n extraSpecs)\r\n except Exception:\r\n exceptionMessage = (_(\r\n \"Failed to delete consistency group: %(cgName)s.\")\r\n % {'group': group})\r\n raise exception.VolumeBackendAPIException(data=exceptionMessage)\r\n return group",
"def test_groups_group_ref_delete(self):\n pass",
"def delete_targetgroup(self, group_id):\r\n result = False\r\n if self._db(self._db.targetgroup.id==group_id).select():\r\n result = True\r\n self._db(self._db.targetgroup.id==group_id).delete()\r\n self._db.commit()\r\n return result",
"async def relay_delete_group(ctx, id_group: int):\n # Check if group exists\n if not (group := await Group.get_or_none(id=id_group)):\n raise EntityNotFoundException(f\"Group with ID {id_group} does not exist🤖️\")\n\n # Get all channels related to this group\n channels = await Channel.filter(group=id_group)\n # Delete all Relay-webhooks and Channel-entities related to this group\n for channel in channels:\n if webhook := await bot.fetch_webhook(webhook_id=channel.hook):\n await webhook.delete()\n await channel.delete()\n # Delete group\n await group.delete()\n await ctx.send(f\"Group with ID {id_group} was successful deleted🤖️\")",
"def unlink_outcome_global(request_ctx, id, outcome_id, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def delete_intrusion_rule_group_command(client: Client, args: Dict[str, Any]) -> CommandResults:\n rule_group_id = args['rule_group_id']\n delete_related_rules = arg_to_optional_bool(args.get('delete_related_rules'))\n\n raw_response = client.delete_intrusion_rule_group(\n rule_group_id=rule_group_id,\n delete_related_rules=delete_related_rules,\n )\n\n readable_output = get_readable_output(\n response=raw_response,\n header_by_keys=INTRUSION_RULE_GROUP_HEADERS_BY_KEYS,\n title=f'Deleted {INTRUSION_RULE_GROUP_TITLE}',\n )\n\n return CommandResults(\n readable_output=readable_output,\n raw_response=raw_response,\n )",
"def delete_thing_group(thingGroupName=None, expectedVersion=None):\n pass",
"def delete_group(self, group_name):\n\n group_id = self.get_group_id(group_name)\n if group_id:\n role_ids = []\n permission_ids = []\n\n roles = self.request(\"GET\", f\"groups/{group_id}/roles\")\n for role in roles:\n role_ids.append(role[\"_id\"])\n permission_ids.extend(role.get(\"permissions\", []))\n\n self.request(\"DELETE\", f\"groups/{group_id}\")\n for role_id in role_ids:\n self.request(\"DELETE\", f\"roles/{role_id}\")\n for permission_id in permission_ids:\n self.request(\"DELETE\", f\"permissions/{permission_id}\")",
"def delete(self, consistencygroup, force=False):\n body = {'consistencygroup': {'force': force}}\n self.run_hooks('modify_body_for_action', body, 'consistencygroup')\n url = '/consistencygroups/%s/delete' % base.getid(consistencygroup)\n resp, body = self.api.client.post(url, body=body)\n return common_base.TupleWithMeta((resp, body), resp)",
"def test_delete_group(self):\n pass",
"def test_delete_groups(self):\n pass",
"def purge_group_id(self, group_id, group_type):\n\n # Get the proper URL for the group based on type, then collect the\n # group to get the list of objects inside\n group_url = URL_MAP[group_type]\n group = self.req(f\"{group_url}/{group_id}\")\n\n # Delete the group first and print a status message\n self.req(f\"{group_url}/{group_id}\", method=\"delete\")\n print(f\"Deleted {group_type} named {group['name']} with ID {group_id}\")\n\n # Iterate over each object, find the proper URL, and delete the object\n for obj in group[\"objects\"]:\n obj_url = URL_MAP[obj[\"type\"]]\n self.req(f\"{obj_url}/{obj['id']}\", method=\"delete\")\n print(\n f\"Deleted {obj['type']} named {obj['name']} with ID {obj['id']}\"\n )",
"def delete_group(self, group_id):\n url = self.groups_url + \"/%s\" % group_id\n return requests.delete(url, headers=self.headers)",
"def delete_intrusion_rule_group(\n self,\n rule_group_id: str,\n delete_related_rules: bool = None,\n ) -> Dict[str, Any]:\n params = assign_params(\n cascadeDeleteOrphanedRules=delete_related_rules\n )\n\n return self._http_request(\n method='DELETE',\n url_suffix=f'object/intrusionrulegroups/{rule_group_id}',\n params=params,\n )",
"def do_del_group(dbsync, group):\n pass",
"def do_delete_group(self, line):\n words = collections.deque(line.split())\n try:\n action_profile_name = self.get_next_token(words, \"action profile name\")\n group_handle = self.get_handle(words, \"group handle\")\n if self._thrift_client.delete_group(action_profile_name, group_handle) != 0:\n print \"Group deleted\"\n else:\n print >> sys.stderr, \"Invalid group handle %d\" % group_handle\n except Exception as e:\n self.usage(e, \"delete_group\")"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Deleting an outcome group deletes descendant outcome groups and outcome links. The linked outcomes themselves are only deleted if all links to the outcome were deleted. Aligned outcomes cannot be deleted; as such, if all remaining links to an aligned outcome are included in this group's descendants, the group deletion will fail. | def delete_outcome_group_accounts(request_ctx, account_id, id, **request_kwargs):
path = '/v1/accounts/{account_id}/outcome_groups/{id}'
url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)
response = client.delete(request_ctx, url, **request_kwargs)
return response | [
"def delete_outcome_group_global(request_ctx, id, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}'\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def delete_consistencygroup(self, context, group):\n return self.common.delete_consistencygroup(self, context, group)",
"def delete_group(self, group):\n raise NotImplementedError('delete_group')",
"def delete_consistencygroup(self, context, group):\r\n extraSpecs = self._initial_setup(None, group)\r\n\r\n try:\r\n LOG.info(\"Beginning delete consistency group process\")\r\n self.adapter.delete_consistency_group(group,\r\n extraSpecs)\r\n except Exception:\r\n exceptionMessage = (_(\r\n \"Failed to delete consistency group: %(cgName)s.\")\r\n % {'group': group})\r\n raise exception.VolumeBackendAPIException(data=exceptionMessage)\r\n return group",
"def test_groups_group_ref_delete(self):\n pass",
"def delete_targetgroup(self, group_id):\r\n result = False\r\n if self._db(self._db.targetgroup.id==group_id).select():\r\n result = True\r\n self._db(self._db.targetgroup.id==group_id).delete()\r\n self._db.commit()\r\n return result",
"async def relay_delete_group(ctx, id_group: int):\n # Check if group exists\n if not (group := await Group.get_or_none(id=id_group)):\n raise EntityNotFoundException(f\"Group with ID {id_group} does not exist🤖️\")\n\n # Get all channels related to this group\n channels = await Channel.filter(group=id_group)\n # Delete all Relay-webhooks and Channel-entities related to this group\n for channel in channels:\n if webhook := await bot.fetch_webhook(webhook_id=channel.hook):\n await webhook.delete()\n await channel.delete()\n # Delete group\n await group.delete()\n await ctx.send(f\"Group with ID {id_group} was successful deleted🤖️\")",
"def unlink_outcome_global(request_ctx, id, outcome_id, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def delete_intrusion_rule_group_command(client: Client, args: Dict[str, Any]) -> CommandResults:\n rule_group_id = args['rule_group_id']\n delete_related_rules = arg_to_optional_bool(args.get('delete_related_rules'))\n\n raw_response = client.delete_intrusion_rule_group(\n rule_group_id=rule_group_id,\n delete_related_rules=delete_related_rules,\n )\n\n readable_output = get_readable_output(\n response=raw_response,\n header_by_keys=INTRUSION_RULE_GROUP_HEADERS_BY_KEYS,\n title=f'Deleted {INTRUSION_RULE_GROUP_TITLE}',\n )\n\n return CommandResults(\n readable_output=readable_output,\n raw_response=raw_response,\n )",
"def delete_thing_group(thingGroupName=None, expectedVersion=None):\n pass",
"def delete_group(self, group_name):\n\n group_id = self.get_group_id(group_name)\n if group_id:\n role_ids = []\n permission_ids = []\n\n roles = self.request(\"GET\", f\"groups/{group_id}/roles\")\n for role in roles:\n role_ids.append(role[\"_id\"])\n permission_ids.extend(role.get(\"permissions\", []))\n\n self.request(\"DELETE\", f\"groups/{group_id}\")\n for role_id in role_ids:\n self.request(\"DELETE\", f\"roles/{role_id}\")\n for permission_id in permission_ids:\n self.request(\"DELETE\", f\"permissions/{permission_id}\")",
"def delete(self, consistencygroup, force=False):\n body = {'consistencygroup': {'force': force}}\n self.run_hooks('modify_body_for_action', body, 'consistencygroup')\n url = '/consistencygroups/%s/delete' % base.getid(consistencygroup)\n resp, body = self.api.client.post(url, body=body)\n return common_base.TupleWithMeta((resp, body), resp)",
"def test_delete_group(self):\n pass",
"def test_delete_groups(self):\n pass",
"def purge_group_id(self, group_id, group_type):\n\n # Get the proper URL for the group based on type, then collect the\n # group to get the list of objects inside\n group_url = URL_MAP[group_type]\n group = self.req(f\"{group_url}/{group_id}\")\n\n # Delete the group first and print a status message\n self.req(f\"{group_url}/{group_id}\", method=\"delete\")\n print(f\"Deleted {group_type} named {group['name']} with ID {group_id}\")\n\n # Iterate over each object, find the proper URL, and delete the object\n for obj in group[\"objects\"]:\n obj_url = URL_MAP[obj[\"type\"]]\n self.req(f\"{obj_url}/{obj['id']}\", method=\"delete\")\n print(\n f\"Deleted {obj['type']} named {obj['name']} with ID {obj['id']}\"\n )",
"def delete_group(self, group_id):\n url = self.groups_url + \"/%s\" % group_id\n return requests.delete(url, headers=self.headers)",
"def delete_intrusion_rule_group(\n self,\n rule_group_id: str,\n delete_related_rules: bool = None,\n ) -> Dict[str, Any]:\n params = assign_params(\n cascadeDeleteOrphanedRules=delete_related_rules\n )\n\n return self._http_request(\n method='DELETE',\n url_suffix=f'object/intrusionrulegroups/{rule_group_id}',\n params=params,\n )",
"def do_del_group(dbsync, group):\n pass",
"def do_delete_group(self, line):\n words = collections.deque(line.split())\n try:\n action_profile_name = self.get_next_token(words, \"action profile name\")\n group_handle = self.get_handle(words, \"group handle\")\n if self._thrift_client.delete_group(action_profile_name, group_handle) != 0:\n print \"Group deleted\"\n else:\n print >> sys.stderr, \"Invalid group handle %d\" % group_handle\n except Exception as e:\n self.usage(e, \"delete_group\")"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Link an outcome into the outcome group. The outcome to link can either be specified by a PUT to the link URL for a specific outcome (the outcome_id in the PUT URLs) or by supplying the information for a new outcome (title, description, ratings, mastery_points) in a POST to the collection. If linking an existing outcome, the outcome_id must identify an outcome available to this context; i.e. an outcome owned by this group's context, an outcome owned by an associated account, or a global outcome. With outcome_id present, any other parameters are ignored. If defining a new outcome, the outcome is created in the outcome group's context using the provided title, description, ratings, and mastery points; the title is required but all other fields are optional. The new outcome is then linked into the outcome group. If ratings are provided when creating a new outcome, an embedded rubric criterion is included in the new outcome. This criterion's mastery_points default to the maximum points in the highest rating if not specified in the mastery_points parameter. Any ratings lacking a description are given a default of "No description". Any ratings lacking a point value are given a default of 0. If no ratings are provided, the mastery_points parameter is ignored. | def create_link_outcome_global(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):
path = '/v1/global/outcome_groups/{id}/outcomes'
payload = {
'outcome_id' : outcome_id,
'title' : title,
'display_name' : display_name,
'description' : description,
'vendor_guid' : vendor_guid,
'mastery_points' : mastery_points,
'ratings[description]' : ratings_description,
'ratings[points]' : ratings_points,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.post(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def create_link_outcome_global_outcome_id(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n payload = {\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def add_outcome(self, outcome):\n if outcome.name in self.outcomes:\n raise KeyError(\n \"Option '{}' already contains an outcome '{}'.\".format(\n self.name,\n outcome.name\n )\n )\n\n self.outcomes[outcome.name] = outcome",
"def outcome(self, outcome):\n self._outcome = outcome",
"def __create_violation_link(self, agreement_id, violation, extras):\n LOG.debug('Violation instance created for agreement {}'.format(agreement_id))\n\n now_iso = arrow.utcnow().isoformat()\n id = '/violation_link/' + str(uuid.uuid4())\n myrulesengine = rulesengine.RulesEngine()\n agreement = myrulesengine._registry.get_resource(agreement_id, None)\n agreement.identifier = agreement_id\n\n res = core_model.Link(id, occi_violation.VIOLATION_LINK, [], agreement, violation)\n res.attributes = {'occi.core.source': agreement_id,\n 'occi.core.target': violation.identifier}\n\n res.provider = extras[\"security\"].items()[0][0]\n res.customer = extras[\"customer\"]\n res.source = agreement\n res.target = violation.identifier\n\n # Updating agreement resource with new link\n agreement.links.append(res)\n myrulesengine._registry.resources.__setitem__(agreement_id, agreement)\n\n LOG.debug('Inserting violation link with ID: {}'.format(id))\n myrulesengine._registry.resources.__setitem__(id, res)\n return res",
"def unlink_outcome_global(request_ctx, id, outcome_id, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def link_and_answer(self, url, answer):\n self.__out__(self.__call_core__('g_answer'))\n self.__out__('<i><p>' + answer + '</p></i>')\n link = '<a href=\"' + str(url) + '\" target=\"_blank\">link</a>'\n subject = '<b>'+url.split('/')[-1].split('.')[0].replace(\"-\",\" \")+'</b>'\n self.__out__(self.__call_core__('g_link').format(link=link, subject=subject))",
"def add_outcome(self, name):\n self.outcomes.append(name)",
"def link_amenity_to_a_place(place_id, amenity_id):\n place = storage.get('Place', place_id)\n if not place:\n abort(404)\n amenity = storage.get('Amenity', amenity_id)\n if not amenity:\n abort(404)\n if amenity in place.amenities:\n return amenity.to_dict()\n place.amenities.append(amenity)\n storage.save()\n return amenity.to_dict(), 201",
"def outcome(self, message: Message):\n self.log_data(\"Outcome: \" + str(message.get_payload()))\n self.outcome_history.append(message.get_payload()[\"outcome\"])\n self.total_reward += message.get_payload()[\"reward\"]\n self.last_reward = message.get_payload()[\"reward\"]\n self.log_data(\"Agent (eatherley) Total Reward now: \" + str(self.total_reward))\n if self.last_reward == 1 or self.last_reward == 0:\n self.enemy_defections += 1",
"def link_amenity_place(place_id, amenity_id):\n place = storage.get(\"Place\", place_id)\n if not place:\n abort(404)\n\n amenity = storage.get(\"Amenity\", amenity_id)\n if not amenity:\n abort(404)\n\n if getenv('HBNB_TYPE_STORAGE') == 'db':\n if amenity in place.amenities:\n return make_response(jsonify(amenity.to_dict()), 200)\n place.amenities.append(amenity)\n else:\n if amenity_id in place.amenity_ids:\n return make_response(jsonify(amenity.to_dict()), 200)\n place.amenity_ids.append(amenity_id)\n\n storage.save()\n return make_response(jsonify(amenity.to_dict()), 201)",
"def update_outcome_group_global(request_ctx, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}'\n payload = {\n 'title' : title,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'parent_outcome_group_id' : parent_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def create_outcome_ln(patient, outcome, value, fp):\n if not pd.isna(value):\n if value == 0:\n eval_ln = CEvaluationLink(CPredicateNode(outcome), CListLink(patient, CConceptNode(\"negative\")), stv=CStv(1.0, 1.0))\n else:\n eval_ln = CEvaluationLink(CPredicateNode(outcome), CListLink(patient, CConceptNode(\"positive\")), stv=CStv(1.0, 1.0))\n\n fp.write(eval_ln.recursive_print() + \"\\n\")",
"def link_amenity_to_a_place(place_id, amenity_id):\n place_obj = storage.get(Place, place_id)\n amenity_obj = storage.get(Amenity, amenity_id)\n if place_obj and amenity_obj:\n if amenity_obj not in place_obj.amenities:\n place_obj.amenities.append(amenity_obj)\n storage.save()\n return jsonify(amenity_obj.to_dict()), 201\n else:\n return jsonify(amenity_obj.to_dict()), 200\n else:\n abort(404)",
"def add(self, workflow_ID=None, parentobj_ID=None, **kwargs):\n\n uri = kwargs.get('uri')\n uid = kwargs.get('uid')\n desc = kwargs.get('desc')\n name = kwargs.get('name')\n source = kwargs.get('source')\n\n if (self.debug):\n print('MPO.ADD', workflow_ID, parentobj_ID, name, desc,uri,uid,source,kwargs, file=sys.stderr)\n\n if uid:\n payload={\"name\":name,\"description\":desc,\"source_uid\":source,\"uid\":uid}\n elif uri:\n payload={\"name\":name,\"description\":desc,\"source_uid\":source,\"uri\":uri}\n else:\n return {\"name\":name,\"description\":desc,\"source_uid\":source,\"message\":\"Must provide either uri or uid.\", 'uid':-1, \"status\":-1}\n\n return self.post(self.DATAOBJECT_RT,workflow_ID,[parentobj_ID],data=payload,**kwargs)",
"def create_link(self, word, meaning):\n print(str(self.unique_id) + \" learned \" +\n str(word) + \" for \" + str(meaning))\n self.meaning2word[meaning] = word\n self.word2meaning[word] = meaning\n self.wordsuccess[word] = []\n\n if meaning not in self.model.vocabulary:\n self.model.vocabulary[meaning] = {}\n\n # If word not in vocabulary, add it\n if word not in self.model.vocabulary[meaning]:\n self.model.vocabulary[meaning][word] = [self.unique_id]\n # Else append this agent to its users\n else:\n self.model.vocabulary[meaning][word].append(self.unique_id)",
"def add_link_to_bundle(request, bundle_id):\n\n # ensure bundle exists\n bundle = get_object_or_404(Bundle, id=bundle_id)\n\n # get/create link for given url\n url = request.data.get('url', None)\n\n # validate url is a url\n v = URLValidator()\n\n try:\n v(url)\n except ValidationError as exc:\n # the user must be joking\n return Response({'error': True, 'msg': 'Invalid URL'}, status=400)\n\n # assert that \"comfort_level\" is specified.\n # this is validated outside of the `Link` fields handled by\n # DRF serializer validation.\n comfort_level = int(request.data.get('comfort_level', None))\n if comfort_level not in [i[0] for i in COMFORT_LEVELS]:\n return Response({'error': True,\n 'msg': 'Please specify a reader comfort level'\n })\n\n url = urltools.normalize(url)\n\n try:\n # fetch existing link\n link = Link.objects.get(url=url)\n except Link.DoesNotExist:\n # create a new link\n link_serializer = LinkSerializer(data=request.data)\n link_serializer.is_valid(raise_exception=True)\n link = link_serializer.save()\n\n # add link to bundle\n if not BundleLink.objects.filter(bundle=bundle, link=link).exists():\n # call alchemy util to fetch concepts for URL\n concepts = bundles.alchemy_utils.get_concepts(url)\n this_bundle = BundleLink.objects.create(bundle=bundle,\n link=link,\n comfort_level=comfort_level,\n curator_id=1)\n for concept in concepts:\n this_bundle.tags.add(concept)\n\n return Response('', status=201)",
"def link_clip_to_episode(clip, epi, action=None):\r\n error_catch(clip, epi, action=action)\r\n if action == \"break\":\r\n epi[\"associated_clips\"].remove(clip[\"id\"])\r\n clip[\"from_episode\"][\"id\"] = None\r\n clip[\"from_episode\"][\"youtube_id\"] = None\r\n clip[\"from_episode\"][\"published_at\"] = None\r\n epi[\"curation\"] = refresh_curation_data(epi)\r\n else:\r\n epi[\"associated_clips\"].append(clip[\"id\"])\r\n clip[\"from_episode\"][\"id\"] = epi[\"id\"]\r\n clip[\"from_episode\"][\"youtube_id\"] = epi[\"youtube_id\"]\r\n clip[\"from_episode\"][\"published_at\"] = epi[\"published_at\"]\r\n epi[\"curation\"] = refresh_curation_data(epi)\r\n return clip, epi",
"def create_link(\n integration: Integration,\n installation: IntegrationInstallation,\n event: GroupEvent,\n response: Response,\n) -> None:\n external_issue = ExternalIssue.objects.create(\n organization_id=event.group.project.organization_id,\n integration_id=integration.id,\n key=response[\"key\"],\n title=event.title,\n description=installation.get_group_description(event.group, event),\n metadata=response.get(\"metadata\"),\n )\n GroupLink.objects.create(\n group_id=event.group.id,\n project_id=event.group.project_id,\n linked_type=GroupLink.LinkedType.issue,\n linked_id=external_issue.id,\n relationship=GroupLink.Relationship.references,\n data={\"provider\": integration.provider},\n )",
"def make_link(image, target, title='', window_target='', fragment=''):\n link = dom.createElement('link')\n if not isinstance(target, basestring):\n build_reference(context, target, link)\n else:\n link.setAttribute('url', target)\n if fragment:\n link.setAttribute('anchor', fragment)\n if title:\n link.setAttribute('title', title)\n if window_target:\n link.setAttribute('target', window_target)\n parent = image.parentNode\n parent.replaceChild(link, image)\n link.appendChild(image)\n return link"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Link an outcome into the outcome group. The outcome to link can either be specified by a PUT to the link URL for a specific outcome (the outcome_id in the PUT URLs) or by supplying the information for a new outcome (title, description, ratings, mastery_points) in a POST to the collection. If linking an existing outcome, the outcome_id must identify an outcome available to this context; i.e. an outcome owned by this group's context, an outcome owned by an associated account, or a global outcome. With outcome_id present, any other parameters are ignored. If defining a new outcome, the outcome is created in the outcome group's context using the provided title, description, ratings, and mastery points; the title is required but all other fields are optional. The new outcome is then linked into the outcome group. If ratings are provided when creating a new outcome, an embedded rubric criterion is included in the new outcome. This criterion's mastery_points default to the maximum points in the highest rating if not specified in the mastery_points parameter. Any ratings lacking a description are given a default of "No description". Any ratings lacking a point value are given a default of 0. If no ratings are provided, the mastery_points parameter is ignored. | def create_link_outcome_global_outcome_id(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):
path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'
payload = {
'title' : title,
'display_name' : display_name,
'description' : description,
'vendor_guid' : vendor_guid,
'mastery_points' : mastery_points,
'ratings[description]' : ratings_description,
'ratings[points]' : ratings_points,
}
url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def create_link_outcome_global(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes'\n payload = {\n 'outcome_id' : outcome_id,\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.post(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def add_outcome(self, outcome):\n if outcome.name in self.outcomes:\n raise KeyError(\n \"Option '{}' already contains an outcome '{}'.\".format(\n self.name,\n outcome.name\n )\n )\n\n self.outcomes[outcome.name] = outcome",
"def outcome(self, outcome):\n self._outcome = outcome",
"def __create_violation_link(self, agreement_id, violation, extras):\n LOG.debug('Violation instance created for agreement {}'.format(agreement_id))\n\n now_iso = arrow.utcnow().isoformat()\n id = '/violation_link/' + str(uuid.uuid4())\n myrulesengine = rulesengine.RulesEngine()\n agreement = myrulesengine._registry.get_resource(agreement_id, None)\n agreement.identifier = agreement_id\n\n res = core_model.Link(id, occi_violation.VIOLATION_LINK, [], agreement, violation)\n res.attributes = {'occi.core.source': agreement_id,\n 'occi.core.target': violation.identifier}\n\n res.provider = extras[\"security\"].items()[0][0]\n res.customer = extras[\"customer\"]\n res.source = agreement\n res.target = violation.identifier\n\n # Updating agreement resource with new link\n agreement.links.append(res)\n myrulesengine._registry.resources.__setitem__(agreement_id, agreement)\n\n LOG.debug('Inserting violation link with ID: {}'.format(id))\n myrulesengine._registry.resources.__setitem__(id, res)\n return res",
"def unlink_outcome_global(request_ctx, id, outcome_id, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.delete(request_ctx, url, **request_kwargs)\n\n return response",
"def link_and_answer(self, url, answer):\n self.__out__(self.__call_core__('g_answer'))\n self.__out__('<i><p>' + answer + '</p></i>')\n link = '<a href=\"' + str(url) + '\" target=\"_blank\">link</a>'\n subject = '<b>'+url.split('/')[-1].split('.')[0].replace(\"-\",\" \")+'</b>'\n self.__out__(self.__call_core__('g_link').format(link=link, subject=subject))",
"def add_outcome(self, name):\n self.outcomes.append(name)",
"def link_amenity_to_a_place(place_id, amenity_id):\n place = storage.get('Place', place_id)\n if not place:\n abort(404)\n amenity = storage.get('Amenity', amenity_id)\n if not amenity:\n abort(404)\n if amenity in place.amenities:\n return amenity.to_dict()\n place.amenities.append(amenity)\n storage.save()\n return amenity.to_dict(), 201",
"def outcome(self, message: Message):\n self.log_data(\"Outcome: \" + str(message.get_payload()))\n self.outcome_history.append(message.get_payload()[\"outcome\"])\n self.total_reward += message.get_payload()[\"reward\"]\n self.last_reward = message.get_payload()[\"reward\"]\n self.log_data(\"Agent (eatherley) Total Reward now: \" + str(self.total_reward))\n if self.last_reward == 1 or self.last_reward == 0:\n self.enemy_defections += 1",
"def link_amenity_place(place_id, amenity_id):\n place = storage.get(\"Place\", place_id)\n if not place:\n abort(404)\n\n amenity = storage.get(\"Amenity\", amenity_id)\n if not amenity:\n abort(404)\n\n if getenv('HBNB_TYPE_STORAGE') == 'db':\n if amenity in place.amenities:\n return make_response(jsonify(amenity.to_dict()), 200)\n place.amenities.append(amenity)\n else:\n if amenity_id in place.amenity_ids:\n return make_response(jsonify(amenity.to_dict()), 200)\n place.amenity_ids.append(amenity_id)\n\n storage.save()\n return make_response(jsonify(amenity.to_dict()), 201)",
"def update_outcome_group_global(request_ctx, id, title=None, description=None, vendor_guid=None, parent_outcome_group_id=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}'\n payload = {\n 'title' : title,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'parent_outcome_group_id' : parent_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def create_outcome_ln(patient, outcome, value, fp):\n if not pd.isna(value):\n if value == 0:\n eval_ln = CEvaluationLink(CPredicateNode(outcome), CListLink(patient, CConceptNode(\"negative\")), stv=CStv(1.0, 1.0))\n else:\n eval_ln = CEvaluationLink(CPredicateNode(outcome), CListLink(patient, CConceptNode(\"positive\")), stv=CStv(1.0, 1.0))\n\n fp.write(eval_ln.recursive_print() + \"\\n\")",
"def link_amenity_to_a_place(place_id, amenity_id):\n place_obj = storage.get(Place, place_id)\n amenity_obj = storage.get(Amenity, amenity_id)\n if place_obj and amenity_obj:\n if amenity_obj not in place_obj.amenities:\n place_obj.amenities.append(amenity_obj)\n storage.save()\n return jsonify(amenity_obj.to_dict()), 201\n else:\n return jsonify(amenity_obj.to_dict()), 200\n else:\n abort(404)",
"def add(self, workflow_ID=None, parentobj_ID=None, **kwargs):\n\n uri = kwargs.get('uri')\n uid = kwargs.get('uid')\n desc = kwargs.get('desc')\n name = kwargs.get('name')\n source = kwargs.get('source')\n\n if (self.debug):\n print('MPO.ADD', workflow_ID, parentobj_ID, name, desc,uri,uid,source,kwargs, file=sys.stderr)\n\n if uid:\n payload={\"name\":name,\"description\":desc,\"source_uid\":source,\"uid\":uid}\n elif uri:\n payload={\"name\":name,\"description\":desc,\"source_uid\":source,\"uri\":uri}\n else:\n return {\"name\":name,\"description\":desc,\"source_uid\":source,\"message\":\"Must provide either uri or uid.\", 'uid':-1, \"status\":-1}\n\n return self.post(self.DATAOBJECT_RT,workflow_ID,[parentobj_ID],data=payload,**kwargs)",
"def create_link(self, word, meaning):\n print(str(self.unique_id) + \" learned \" +\n str(word) + \" for \" + str(meaning))\n self.meaning2word[meaning] = word\n self.word2meaning[word] = meaning\n self.wordsuccess[word] = []\n\n if meaning not in self.model.vocabulary:\n self.model.vocabulary[meaning] = {}\n\n # If word not in vocabulary, add it\n if word not in self.model.vocabulary[meaning]:\n self.model.vocabulary[meaning][word] = [self.unique_id]\n # Else append this agent to its users\n else:\n self.model.vocabulary[meaning][word].append(self.unique_id)",
"def add_link_to_bundle(request, bundle_id):\n\n # ensure bundle exists\n bundle = get_object_or_404(Bundle, id=bundle_id)\n\n # get/create link for given url\n url = request.data.get('url', None)\n\n # validate url is a url\n v = URLValidator()\n\n try:\n v(url)\n except ValidationError as exc:\n # the user must be joking\n return Response({'error': True, 'msg': 'Invalid URL'}, status=400)\n\n # assert that \"comfort_level\" is specified.\n # this is validated outside of the `Link` fields handled by\n # DRF serializer validation.\n comfort_level = int(request.data.get('comfort_level', None))\n if comfort_level not in [i[0] for i in COMFORT_LEVELS]:\n return Response({'error': True,\n 'msg': 'Please specify a reader comfort level'\n })\n\n url = urltools.normalize(url)\n\n try:\n # fetch existing link\n link = Link.objects.get(url=url)\n except Link.DoesNotExist:\n # create a new link\n link_serializer = LinkSerializer(data=request.data)\n link_serializer.is_valid(raise_exception=True)\n link = link_serializer.save()\n\n # add link to bundle\n if not BundleLink.objects.filter(bundle=bundle, link=link).exists():\n # call alchemy util to fetch concepts for URL\n concepts = bundles.alchemy_utils.get_concepts(url)\n this_bundle = BundleLink.objects.create(bundle=bundle,\n link=link,\n comfort_level=comfort_level,\n curator_id=1)\n for concept in concepts:\n this_bundle.tags.add(concept)\n\n return Response('', status=201)",
"def link_clip_to_episode(clip, epi, action=None):\r\n error_catch(clip, epi, action=action)\r\n if action == \"break\":\r\n epi[\"associated_clips\"].remove(clip[\"id\"])\r\n clip[\"from_episode\"][\"id\"] = None\r\n clip[\"from_episode\"][\"youtube_id\"] = None\r\n clip[\"from_episode\"][\"published_at\"] = None\r\n epi[\"curation\"] = refresh_curation_data(epi)\r\n else:\r\n epi[\"associated_clips\"].append(clip[\"id\"])\r\n clip[\"from_episode\"][\"id\"] = epi[\"id\"]\r\n clip[\"from_episode\"][\"youtube_id\"] = epi[\"youtube_id\"]\r\n clip[\"from_episode\"][\"published_at\"] = epi[\"published_at\"]\r\n epi[\"curation\"] = refresh_curation_data(epi)\r\n return clip, epi",
"def create_link(\n integration: Integration,\n installation: IntegrationInstallation,\n event: GroupEvent,\n response: Response,\n) -> None:\n external_issue = ExternalIssue.objects.create(\n organization_id=event.group.project.organization_id,\n integration_id=integration.id,\n key=response[\"key\"],\n title=event.title,\n description=installation.get_group_description(event.group, event),\n metadata=response.get(\"metadata\"),\n )\n GroupLink.objects.create(\n group_id=event.group.id,\n project_id=event.group.project_id,\n linked_type=GroupLink.LinkedType.issue,\n linked_id=external_issue.id,\n relationship=GroupLink.Relationship.references,\n data={\"provider\": integration.provider},\n )",
"def make_link(image, target, title='', window_target='', fragment=''):\n link = dom.createElement('link')\n if not isinstance(target, basestring):\n build_reference(context, target, link)\n else:\n link.setAttribute('url', target)\n if fragment:\n link.setAttribute('anchor', fragment)\n if title:\n link.setAttribute('title', title)\n if window_target:\n link.setAttribute('target', window_target)\n parent = image.parentNode\n parent.replaceChild(link, image)\n link.appendChild(image)\n return link"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Unlinking an outcome only deletes the outcome itself if this was the last link to the outcome in any group in any context. Aligned outcomes cannot be deleted; as such, if this is the last link to an aligned outcome, the unlinking will fail. | def unlink_outcome_global(request_ctx, id, outcome_id, **request_kwargs):
path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'
url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)
response = client.delete(request_ctx, url, **request_kwargs)
return response | [
"def unlink(self, link_id):",
"def unlink_completely(self):\n self.last_changed = self.nodenet.current_step\n self.nodenet.get_nodespace(self.parent_nodespace).contents_last_changed = self.nodenet.current_step\n\n links_to_delete = set()\n for gate_name_candidate in self.get_gate_types():\n for link_candidate in self.get_gate(gate_name_candidate).get_links():\n links_to_delete.add(link_candidate)\n for slot_name_candidate in self.get_slot_types():\n for link_candidate in self.get_slot(slot_name_candidate).get_links():\n links_to_delete.add(link_candidate)\n for link in links_to_delete:\n link.target_node.last_changed = self.nodenet.current_step\n self.nodenet.get_nodespace(link.target_node.parent_nodespace).contents_last_changed = self.nodenet.current_step\n link.remove()",
"def remove_outcome(self, outcome_name):\n if outcome_name not in self.outcomes:\n raise KeyError(\n \"Option '{}' doesn't contain any outcome '{}'.\".format(\n self.name,\n outcome_name\n )\n )\n\n del self.outcomes[outcome_name]",
"def remove_link():",
"def delete_link(self, word):\n meaning = self.word2meaning[word]\n print(str(self.unique_id) + \" forgot \" +\n str(word) + \" for \" + str(meaning))\n del self.word2meaning[word]\n del self.meaning2word[meaning]\n del self.wordsuccess[word]\n\n # If the agent was the only one using the word, delete the word\n if len(self.model.vocabulary[meaning][word]) == 1:\n del self.model.vocabulary[meaning][word]\n # Else simply remove the agent\n else:\n self.model.vocabulary[meaning][word].remove(self.unique_id)",
"def __delete_violation_link(self, agreement_id, violation, violation_link, extras):\n myrulesengine = rulesengine.RulesEngine()\n myrulesengine._registry.delete_resource(violation_link.identifier, extras)\n\n agreement = myrulesengine._registry.get_resource(agreement_id, None)\n agreement.identifier = agreement_id\n if violation_link in agreement.links:\n agreement.links.remove(violation_link)",
"def unlink(self):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n self.client.post_linked_resource(\n self.resource, RelationType.UNLINK_FROM_TEMPLATE,\n EntityType.ROLE.value, None)",
"def delete_link(self, link):",
"def unlink(self):\n album_id = self.albums_map[self.artist][self.c_album][1]\n # clear entry in self.albums_map[artist]\n self.albums_map[self.artist].pop(self.c_album)\n # remove Albums recording only if no more references to the album exist\n still_present = False\n for item in self.albums_map[self.artist].values():\n if item[1] == album_id:\n still_present = True\n if not still_present:\n dmla.unlink_album(self.a_album)\n self.modified = True\n self.refresh_screen(self.artists_list.currentIndex(),\n self.albums_list.currentIndex(), modifyoff=False)",
"def test_delete_outcome_via_participant(self):\n outcomes, p = self._create_outcomes()\n\n # Delete participant\n db.session.delete(p)\n db.session.commit()\n\n # Check that outcomes have been deleted\n oc = ['Deceased', 'Alive']\n o1 = Outcome.query.filter_by(vital_status=oc[0]).one_or_none()\n o2 = Outcome.query.filter_by(vital_status=oc[1]).one_or_none()\n self.assertIs(o1, None)\n self.assertIs(o2, None)",
"def delete_link(self, light_name):\n self.get_links().pop(light_name)",
"def unlink(self):\n link = self.link\n if link is None:\n raise ValueError(\"Interface is not linked\")\n if link.is_ptp or link.is_lag:\n link.delete()\n else:\n raise ValueError(\"Cannot unlink non p-t-p link\")",
"def remove_outcome(self, label):\n try:\n self._outcomes.remove(label)\n self._transition_sequence.remove(label)\n except ValueError:\n pass",
"def _remove_relations(self):\n self.tree = etree.parse(self.output_file)\n\n for tlink in self.tree.xpath(\"//TLINK\"):\n tlink.getparent().remove(tlink)",
"def unlink(self):\n self._linked = False\n self.is_dirty = False\n return self",
"def unlink_pivot(remote, pivot_id):\n cmd = mmapi.StoredCommands()\n cmd_key = cmd.AppendSceneCommand_UnlinkPivot(pivot_id)\n remote.runCommand(cmd)",
"async def unlink(self, ctx: MyContext):\n query = \"SELECT * FROM wormhole_channel WHERE channelID = ?\"\n wh_channel = self.bot.db_query(\n query, (ctx.channel.id,), astuple=True, fetchone=True\n )\n # comes as: (name, channelID, guildID, type, webhookID, webhookTOKEN)\n if len(wh_channel) == 0:\n await ctx.send(await self.bot._(ctx.guild.id, \"wormhole.error.not-linked\"))\n return\n query = \"DELETE FROM wormhole_channel WHERE channelID = ? AND name = ?\"\n async with ClientSession() as session:\n webhook = discord.Webhook.partial(\n wh_channel[4], wh_channel[5], session=session\n )\n await webhook.delete()\n self.bot.db_query(query, (wh_channel[0], ctx.channel.id))\n await ctx.send(\n await self.bot._(ctx.guild.id, \"wormhole.success.channel-unlinked\")\n )",
"def unlink(node):\n meta.removeMetaData(node, className=LINK_METACLASS)",
"def _lnk_delete_link(self, link_name):\n translated_name = '/' + self._trajectory_name + '/' + link_name.replace('.','/')\n link = self._hdf5file.get_node(where=translated_name)\n link._f_remove()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new empty subgroup under the outcome group with the given title and description. | def create_subgroup_global(request_ctx, id, title, description=None, vendor_guid=None, **request_kwargs):
path = '/v1/global/outcome_groups/{id}/subgroups'
payload = {
'title' : title,
'description' : description,
'vendor_guid' : vendor_guid,
}
url = request_ctx.base_api_url + path.format(id=id)
response = client.post(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def create_thing_group(thingGroupName=None, parentGroupName=None, thingGroupProperties=None, tags=None):\n pass",
"def createMainGroup(self):\n\t\tmc.group( n = self.grp.name, em = True )",
"def with_group(title: str) -> Generator[None, None, None]:\n if os.environ.get(\"GITHUB_ACTIONS\", \"false\") != \"true\":\n console.print(\"#\" * 10 + \" [bright_blue]\" + title + \"[/] \" + \"#\" * 10)\n yield\n return\n console.print(f\"::group::[bright_blue]{title}[/]\")\n yield\n console.print(\"::endgroup::\")",
"def _create_question_group(self, q: Question):\n if q.title not in self.question_groups_by_key:\n logger.info(f\"Creating question group: {q.title}\")\n group = QuestionGroup(parent=q)\n self.question_groups_by_key[group.key] = group",
"def test_create_group_no_pattern(self):\n\n return self.client.post('/create_group', \n data={'group_name':\"New Group\",\n 'group_descrip':\"Fun Group\",\n 'hashtag':\"\",\n 'group_image':'static/images/craft_group_default.jpg'},\n follow_redirects=True)\n\n #Group was created without a pattern and redirected to new group page\n self.assertEqual(result.status_code, 200)\n self.assertIn('New Group', result.data)\n self.assertIn('Fun Group', result.data)",
"def create_group(self, groupdata: Dict[str, Any]) -> Group:\n ...",
"def _create_child_group(self, name) -> \"GroupBase\":\n pass",
"def create(self, group_id, group_title, description=None, membership_type=None, open_group=None,\n closed_group=None, hidden_group=None, discussion_styles=None, enable_blog=None, enable_contest=None,\n enable_forum=None, enable_idea=None, enable_qanda=None, enable_tkb=None, all_styles_default=True,\n parent_category_id=None, avatar_image_path=None, full_response=None, return_id=None, return_url=None,\n return_api_url=None, return_http_code=None, return_status=None, return_error_messages=None,\n split_errors=False):\n return structures_module.grouphubs.create(self.khoros_object, group_id, group_title, description,\n membership_type, open_group, closed_group, hidden_group,\n discussion_styles, enable_blog, enable_contest, enable_forum,\n enable_idea, enable_qanda, enable_tkb, all_styles_default,\n parent_category_id, avatar_image_path, full_response,\n return_id, return_url, return_api_url, return_http_code,\n return_status, return_error_messages, split_errors)",
"def create_group_for_new_organization(sender, **kwargs):\n\n created = kwargs.get('created')\n organization = kwargs.get('instance')\n\n if created:\n Group.objects.create(\n title=\"Your First Client\",\n organization=organization,\n description=\"Your organization\",\n )",
"def create(self):\n dto = {\n 'name': self.params['name'],\n 'description': self.params['description'],\n 'variables': self.params.get('variables') or ''\n }\n response = self.client.request('POST', 'inventories',\n self.inventory_id, 'groups', json=dto)\n if response.status_code >= 400:\n self.fail(str(response.text))\n return response.json()",
"def create_group(self):\n sql = open_sql_file('create_group').format(self.group_name)\n self.redshift.query(sql)\n print('Group {} created.'.format(self.group_name))\n\n return",
"def test_create_group(self):\n pass",
"def test_cannot_create_group_with_empty_field(self):\n\n utils.create_user_and_authenticate(self)\n group_fields = ['name', 'description']\n utils.test_cannot_post_with_empty_fields(self, self.url, group_fields)",
"def new_subset_group(self, label=None, subset_state=None, **kwargs):\n from glue.core.subset_group import SubsetGroup\n kwargs.setdefault(\"color\", settings.SUBSET_COLORS[self._sg_count % len(settings.SUBSET_COLORS)])\n self._sg_count += 1\n label = label or 'Subset %i' % self._sg_count\n\n result = SubsetGroup(label=label, subset_state=subset_state, **kwargs)\n self._subset_groups.append(result)\n result.register(self)\n return result",
"def init_group(self, mock_insert):\n mock_insert.return_value = None\n members = [\"me\", \"you\"]\n return Group(\"test_name\", \"test_description\", \"test_type\", members)",
"def createGroup(self, *group):\n if not self.rank:\n logging.info('Creating atom group {}'.format(group))\n\n if not len(group):\n for idSS in self.pargs['idSS']:\n self.lmp.command('group group{} type {}'.format(idSS, idSS))\n else:\n self.lmp.command('group ' + ('{} ' * len(group)).format(*group))",
"def create_group(self, label):\n group = OptionGroup(label)\n self.append(group)\n return group",
"def create_dynamic_group(data=None):\n return connector.SCIMGroup(\n displayName='display_name{0}'.format(uuid.uuid4()))",
"def clone(self, target):\r\n \r\n try:\r\n new_group = None\r\n original_group = self.info\r\n \r\n title = original_group['title']\r\n tags = original_group['tags']\r\n for tag in list(tags):\r\n if tag.startswith(\"source-\") or tag.startswith(\"sourcefolder-\"):\r\n tags.remove(tag)\r\n \r\n original_group['tags'].append(\"source-{0}\".format(original_group['id']))\r\n tags = ','.join(original_group['tags'])\r\n \r\n #Find a unique name for the group\r\n i = 1 \r\n while True:\r\n search_query = 'title:\"{0}\" AND owner:{1}'.format(title, target.users.me.username)\r\n groups = [group for group in target.groups.search(search_query, outside_org=False) if group['title'] == title]\r\n if len(groups) == 0:\r\n break\r\n i += 1\r\n title = \"{0} {1}\".format(original_group['title'], i)\r\n \r\n\r\n\r\n thumbnail = self.thumbnail\r\n if not thumbnail and self.portal_group:\r\n temp_dir = os.path.join(_TEMP_DIR.name, original_group['id'])\r\n if not os.path.exists(temp_dir):\r\n os.makedirs(temp_dir)\r\n thumbnail = self.portal_group.download_thumbnail(temp_dir)\r\n\r\n new_group = target.groups.create(title, tags, original_group['description'], original_group['snippet'],\r\n 'private', thumbnail, True, original_group['sortField'], original_group['sortOrder'], True)\r\n return new_group\r\n except Exception as ex:\r\n raise _ItemCreateException(\"Failed to create group '{0}': {1}\".format(original_group['title'], str(ex)), new_group)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new subgroup of the outcome group with the same title and description as the source group, then creates links in that new subgroup to the same outcomes that are linked in the source group. Recurses on the subgroups of the source group, importing them each in turn into the new subgroup. Allows you to copy organizational structure, but does not create copies of the outcomes themselves, only new links. The source group must be either global, from the same context as this outcome group, or from an associated account. The source group cannot be the root outcome group of its context. | def import_outcome_group_accounts(request_ctx, account_id, id, source_outcome_group_id, **request_kwargs):
path = '/v1/accounts/{account_id}/outcome_groups/{id}/import'
payload = {
'source_outcome_group_id' : source_outcome_group_id,
}
url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)
response = client.post(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def clone(self, target):\r\n \r\n try:\r\n new_group = None\r\n original_group = self.info\r\n \r\n title = original_group['title']\r\n tags = original_group['tags']\r\n for tag in list(tags):\r\n if tag.startswith(\"source-\") or tag.startswith(\"sourcefolder-\"):\r\n tags.remove(tag)\r\n \r\n original_group['tags'].append(\"source-{0}\".format(original_group['id']))\r\n tags = ','.join(original_group['tags'])\r\n \r\n #Find a unique name for the group\r\n i = 1 \r\n while True:\r\n search_query = 'title:\"{0}\" AND owner:{1}'.format(title, target.users.me.username)\r\n groups = [group for group in target.groups.search(search_query, outside_org=False) if group['title'] == title]\r\n if len(groups) == 0:\r\n break\r\n i += 1\r\n title = \"{0} {1}\".format(original_group['title'], i)\r\n \r\n\r\n\r\n thumbnail = self.thumbnail\r\n if not thumbnail and self.portal_group:\r\n temp_dir = os.path.join(_TEMP_DIR.name, original_group['id'])\r\n if not os.path.exists(temp_dir):\r\n os.makedirs(temp_dir)\r\n thumbnail = self.portal_group.download_thumbnail(temp_dir)\r\n\r\n new_group = target.groups.create(title, tags, original_group['description'], original_group['snippet'],\r\n 'private', thumbnail, True, original_group['sortField'], original_group['sortOrder'], True)\r\n return new_group\r\n except Exception as ex:\r\n raise _ItemCreateException(\"Failed to create group '{0}': {1}\".format(original_group['title'], str(ex)), new_group)",
"def import_outcome_group_courses(request_ctx, course_id, id, source_outcome_group_id, **request_kwargs):\n\n path = '/v1/courses/{course_id}/outcome_groups/{id}/import'\n payload = {\n 'source_outcome_group_id' : source_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(course_id=course_id, id=id)\n response = client.post(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def copy_group(self):\n dd = self.destination_directory\n sg = self.source_group\n dg = self.destination_group\n\n data = {\n 'description': sg.description,\n 'name': sg.name,\n 'status': sg.status,\n }\n\n # If this Group already exists, we'll just update it.\n if dg:\n for key, value in data.items():\n setattr(dg, key, value)\n\n while True:\n try:\n dg.save()\n return dg\n except StormpathError as err:\n logger.error('Failed to copy Group: {} into Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))\n\n # If we get here, it means we need to create the Group from scratch.\n while True:\n try:\n return dd.groups.create(data)\n except StormpathError as err:\n logger.error('Failed to copy Group: {} into Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))",
"def add_move_group_combining_others(self, new_group_name, existing_group_names=None):\n new_group = xml.dom.minidom.Document().createElement('group')\n new_group.setAttribute(\"name\", new_group_name)\n for existing_group_name in existing_group_names:\n new_group.appendChild(xml.dom.minidom.Document().createElement(f'group name=\"{existing_group_name}\"'))\n new_group.writexml(self.new_robot_srdf, indent=\" \", addindent=\" \", newl=\"\\n\")",
"def migrate(self):\n self.destination_group = self.get_destination_group()\n self.destination_group = self.copy_group()\n self.copy_custom_data()\n\n logger.info('Successfully copied Group: {}'.format(self.destination_group.name.encode('utf-8')))\n return self.destination_group",
"def create_link_outcome_global(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes'\n payload = {\n 'outcome_id' : outcome_id,\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.post(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def _merge_groups(self, group, newgroup):\n\n # name\n if group.name != newgroup.name:\n raise errors.AnsibleError(\"Cannot merge group %s with %s\" % (group.name, newgroup.name))\n\n # depth\n group.depth = max([group.depth, newgroup.depth])\n\n # hosts list (host objects are by now already added to self.hosts)\n for host in newgroup.hosts:\n grouphosts = dict([(h.name, h) for h in group.hosts])\n if host.name in grouphosts:\n # same host name but different object, merge\n self._merge_hosts(grouphosts[host.name], host)\n else:\n # new membership, add host to group from self\n # group from self will also be added again to host.groups, but\n # as different object\n group.add_host(self.hosts[host.name])\n # now remove this the old object for group in host.groups\n for hostgroup in [g for g in host.groups]:\n if hostgroup.name == group.name and hostgroup != self.groups[group.name]:\n self.hosts[host.name].groups.remove(hostgroup)\n\n\n # group child membership relation\n for newchild in newgroup.child_groups:\n # dict with existing child groups:\n childgroups = dict([(g.name, g) for g in group.child_groups])\n # check if child of new group is already known as a child\n if newchild.name not in childgroups:\n self.groups[group.name].add_child_group(newchild)\n\n # group parent membership relation\n for newparent in newgroup.parent_groups:\n # dict with existing parent groups:\n parentgroups = dict([(g.name, g) for g in group.parent_groups])\n # check if parent of new group is already known as a parent\n if newparent.name not in parentgroups:\n if newparent.name not in self.groups:\n # group does not exist yet in self, import him\n self.groups[newparent.name] = newparent\n # group now exists but not yet as a parent here\n self.groups[newparent.name].add_child_group(group)\n\n # variables\n group.vars = utils.combine_vars(group.vars, newgroup.vars)",
"def create_group_from_source(self, **attrs):\n return _group.Group.create_from_source(self, **attrs)",
"def create(self, group, parent_group):\n raise NotImplementedError",
"def allocate_groups_by_group(self, target_type, source_type, \n size_probs, rng):\n\n # TODO: factor out\n\n assert source_type in self.groups\n assert target_type not in self.groups\n \n self.init_group_type(target_type)\n\n ids = list(self.groups[source_type].keys())\n rng.shuffle(ids)\n while len(ids) > 0:\n size = int(sample_table(size_probs, rng)[0])\n members = []\n group_id = self.add_group(target_type, members)\n for source_id in ids[:size]:\n for i in self.groups[source_type][source_id]:\n self.I[i].groups[target_type] = group_id\n members.append(self.I[i])\n self.add_individuals_to_group(target_type, group_id, members)\n del ids[:size]",
"def create_consistencygroup_from_src(self, context, group, volumes,\n cgsnapshot=None, snapshots=None,\n source_cg=None, source_vols=None):\n LOG.info(\"create_consistencygroup_from_src\")",
"def test_060_add_group_to_group(self):\n\n testflow.step(\"Adding group %s to group %s\", TEST_GROUP1, TEST_GROUP2)\n assert MANAGE_CLI.run(\n 'groupadd',\n TEST_GROUP1,\n group=TEST_GROUP2,\n )[0], \"Failed to add group to group '%s'\" % TEST_GROUP1",
"def create_linked_duplicates(ob, group_name_linked, context):\n\tbpy.ops.object.select_all(action='DESELECT')\n\tob.select = True\n\tfor obj in ob.users_group[0].objects:\n\t\tbpy.ops.object.select_all(action='DESELECT')\n\t\tobj.select = True\n\t\tbpy.ops.object.duplicate(linked=True)\n\t\tobj.select = False\n\t\tbpy.ops.object.group_link(group=group_name_linked)\n\t\tname = context.active_object.name[:-4] + '_linked'",
"async def mergegroup(self, ctx, original_group_id: int, duplicate_group_id: int):\n original_group = await ex.get_group(original_group_id)\n duplicate_group = await ex.get_group(duplicate_group_id)\n if not duplicate_group:\n return await ctx.send(f\"> {duplicate_group_id} could not find a Group.\")\n if not original_group:\n return await ctx.send(f\"> {original_group} could not find a Group.\")\n # move aliases\n await ex.conn.execute(\"UPDATE groupmembers.aliases SET objectid = $1 WHERE isgroup = $2 AND objectid = $3\", original_group.id, 1, duplicate_group.id)\n for member_id in duplicate_group.members:\n if member_id not in original_group.members:\n # update the member location to the original group\n await ex.conn.execute(\"UPDATE groupmembers.idoltogroup SET groupid = $1 WHERE idolid = $2 AND groupid = $3\", original_group.id, member_id, duplicate_group.id)\n # delete group\n await ex.conn.execute(\"DELETE FROM groupmembers.groups WHERE groupid = $1\", duplicate_group.id)\n # recreate cache\n await ex.create_idol_cache()\n await ex.create_group_cache()\n await ctx.send(f\"> Merged {duplicate_group_id} to {original_group_id}.\")",
"def _copy_groups(self, out):\n if self.parent_table:\n if hasattr(self.parent_table, \"_groups\"):\n out._groups = groups.ColumnGroups(\n out, indices=self.parent_table._groups._indices\n )\n elif hasattr(self, \"_groups\"):\n out._groups = groups.ColumnGroups(out, indices=self._groups._indices)",
"def copy_group(self, group_no):\n max_group_no = max(self.grp.keys())\n new_group_no = max_group_no + 1\n\n self.grp[new_group_no]['atoms'] = self.grp[group_no]['atoms']\n self.grp[new_group_no]['constraints'] = self.grp[group_no]['constraints']\n self.grp[new_group_no]['rotsym'] = self.grp[group_no]['rotsym']\n self.grp[new_group_no]['linear'] = self.grp[group_no]['linear']\n if self.grp[group_no]['volume']: self.grp[new_group_no]['volume'] = self.grp[group_no]['volume']\n if self.grp[group_no]['energy']: self.grp[new_group_no]['energy'] = self.grp[group_no]['energy']\n\n return new_group_no",
"def create_link_outcome_global_outcome_id(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n payload = {\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def clone(self):\n return _libsbml.Group_clone(self)",
"def add_group(group):"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Creates a new subgroup of the outcome group with the same title and description as the source group, then creates links in that new subgroup to the same outcomes that are linked in the source group. Recurses on the subgroups of the source group, importing them each in turn into the new subgroup. Allows you to copy organizational structure, but does not create copies of the outcomes themselves, only new links. The source group must be either global, from the same context as this outcome group, or from an associated account. The source group cannot be the root outcome group of its context. | def import_outcome_group_courses(request_ctx, course_id, id, source_outcome_group_id, **request_kwargs):
path = '/v1/courses/{course_id}/outcome_groups/{id}/import'
payload = {
'source_outcome_group_id' : source_outcome_group_id,
}
url = request_ctx.base_api_url + path.format(course_id=course_id, id=id)
response = client.post(request_ctx, url, payload=payload, **request_kwargs)
return response | [
"def clone(self, target):\r\n \r\n try:\r\n new_group = None\r\n original_group = self.info\r\n \r\n title = original_group['title']\r\n tags = original_group['tags']\r\n for tag in list(tags):\r\n if tag.startswith(\"source-\") or tag.startswith(\"sourcefolder-\"):\r\n tags.remove(tag)\r\n \r\n original_group['tags'].append(\"source-{0}\".format(original_group['id']))\r\n tags = ','.join(original_group['tags'])\r\n \r\n #Find a unique name for the group\r\n i = 1 \r\n while True:\r\n search_query = 'title:\"{0}\" AND owner:{1}'.format(title, target.users.me.username)\r\n groups = [group for group in target.groups.search(search_query, outside_org=False) if group['title'] == title]\r\n if len(groups) == 0:\r\n break\r\n i += 1\r\n title = \"{0} {1}\".format(original_group['title'], i)\r\n \r\n\r\n\r\n thumbnail = self.thumbnail\r\n if not thumbnail and self.portal_group:\r\n temp_dir = os.path.join(_TEMP_DIR.name, original_group['id'])\r\n if not os.path.exists(temp_dir):\r\n os.makedirs(temp_dir)\r\n thumbnail = self.portal_group.download_thumbnail(temp_dir)\r\n\r\n new_group = target.groups.create(title, tags, original_group['description'], original_group['snippet'],\r\n 'private', thumbnail, True, original_group['sortField'], original_group['sortOrder'], True)\r\n return new_group\r\n except Exception as ex:\r\n raise _ItemCreateException(\"Failed to create group '{0}': {1}\".format(original_group['title'], str(ex)), new_group)",
"def import_outcome_group_accounts(request_ctx, account_id, id, source_outcome_group_id, **request_kwargs):\n\n path = '/v1/accounts/{account_id}/outcome_groups/{id}/import'\n payload = {\n 'source_outcome_group_id' : source_outcome_group_id,\n }\n url = request_ctx.base_api_url + path.format(account_id=account_id, id=id)\n response = client.post(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def copy_group(self):\n dd = self.destination_directory\n sg = self.source_group\n dg = self.destination_group\n\n data = {\n 'description': sg.description,\n 'name': sg.name,\n 'status': sg.status,\n }\n\n # If this Group already exists, we'll just update it.\n if dg:\n for key, value in data.items():\n setattr(dg, key, value)\n\n while True:\n try:\n dg.save()\n return dg\n except StormpathError as err:\n logger.error('Failed to copy Group: {} into Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))\n\n # If we get here, it means we need to create the Group from scratch.\n while True:\n try:\n return dd.groups.create(data)\n except StormpathError as err:\n logger.error('Failed to copy Group: {} into Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))",
"def add_move_group_combining_others(self, new_group_name, existing_group_names=None):\n new_group = xml.dom.minidom.Document().createElement('group')\n new_group.setAttribute(\"name\", new_group_name)\n for existing_group_name in existing_group_names:\n new_group.appendChild(xml.dom.minidom.Document().createElement(f'group name=\"{existing_group_name}\"'))\n new_group.writexml(self.new_robot_srdf, indent=\" \", addindent=\" \", newl=\"\\n\")",
"def migrate(self):\n self.destination_group = self.get_destination_group()\n self.destination_group = self.copy_group()\n self.copy_custom_data()\n\n logger.info('Successfully copied Group: {}'.format(self.destination_group.name.encode('utf-8')))\n return self.destination_group",
"def create_link_outcome_global(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes'\n payload = {\n 'outcome_id' : outcome_id,\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id)\n response = client.post(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def _merge_groups(self, group, newgroup):\n\n # name\n if group.name != newgroup.name:\n raise errors.AnsibleError(\"Cannot merge group %s with %s\" % (group.name, newgroup.name))\n\n # depth\n group.depth = max([group.depth, newgroup.depth])\n\n # hosts list (host objects are by now already added to self.hosts)\n for host in newgroup.hosts:\n grouphosts = dict([(h.name, h) for h in group.hosts])\n if host.name in grouphosts:\n # same host name but different object, merge\n self._merge_hosts(grouphosts[host.name], host)\n else:\n # new membership, add host to group from self\n # group from self will also be added again to host.groups, but\n # as different object\n group.add_host(self.hosts[host.name])\n # now remove this the old object for group in host.groups\n for hostgroup in [g for g in host.groups]:\n if hostgroup.name == group.name and hostgroup != self.groups[group.name]:\n self.hosts[host.name].groups.remove(hostgroup)\n\n\n # group child membership relation\n for newchild in newgroup.child_groups:\n # dict with existing child groups:\n childgroups = dict([(g.name, g) for g in group.child_groups])\n # check if child of new group is already known as a child\n if newchild.name not in childgroups:\n self.groups[group.name].add_child_group(newchild)\n\n # group parent membership relation\n for newparent in newgroup.parent_groups:\n # dict with existing parent groups:\n parentgroups = dict([(g.name, g) for g in group.parent_groups])\n # check if parent of new group is already known as a parent\n if newparent.name not in parentgroups:\n if newparent.name not in self.groups:\n # group does not exist yet in self, import him\n self.groups[newparent.name] = newparent\n # group now exists but not yet as a parent here\n self.groups[newparent.name].add_child_group(group)\n\n # variables\n group.vars = utils.combine_vars(group.vars, newgroup.vars)",
"def create_group_from_source(self, **attrs):\n return _group.Group.create_from_source(self, **attrs)",
"def create(self, group, parent_group):\n raise NotImplementedError",
"def allocate_groups_by_group(self, target_type, source_type, \n size_probs, rng):\n\n # TODO: factor out\n\n assert source_type in self.groups\n assert target_type not in self.groups\n \n self.init_group_type(target_type)\n\n ids = list(self.groups[source_type].keys())\n rng.shuffle(ids)\n while len(ids) > 0:\n size = int(sample_table(size_probs, rng)[0])\n members = []\n group_id = self.add_group(target_type, members)\n for source_id in ids[:size]:\n for i in self.groups[source_type][source_id]:\n self.I[i].groups[target_type] = group_id\n members.append(self.I[i])\n self.add_individuals_to_group(target_type, group_id, members)\n del ids[:size]",
"def create_consistencygroup_from_src(self, context, group, volumes,\n cgsnapshot=None, snapshots=None,\n source_cg=None, source_vols=None):\n LOG.info(\"create_consistencygroup_from_src\")",
"def test_060_add_group_to_group(self):\n\n testflow.step(\"Adding group %s to group %s\", TEST_GROUP1, TEST_GROUP2)\n assert MANAGE_CLI.run(\n 'groupadd',\n TEST_GROUP1,\n group=TEST_GROUP2,\n )[0], \"Failed to add group to group '%s'\" % TEST_GROUP1",
"def create_linked_duplicates(ob, group_name_linked, context):\n\tbpy.ops.object.select_all(action='DESELECT')\n\tob.select = True\n\tfor obj in ob.users_group[0].objects:\n\t\tbpy.ops.object.select_all(action='DESELECT')\n\t\tobj.select = True\n\t\tbpy.ops.object.duplicate(linked=True)\n\t\tobj.select = False\n\t\tbpy.ops.object.group_link(group=group_name_linked)\n\t\tname = context.active_object.name[:-4] + '_linked'",
"async def mergegroup(self, ctx, original_group_id: int, duplicate_group_id: int):\n original_group = await ex.get_group(original_group_id)\n duplicate_group = await ex.get_group(duplicate_group_id)\n if not duplicate_group:\n return await ctx.send(f\"> {duplicate_group_id} could not find a Group.\")\n if not original_group:\n return await ctx.send(f\"> {original_group} could not find a Group.\")\n # move aliases\n await ex.conn.execute(\"UPDATE groupmembers.aliases SET objectid = $1 WHERE isgroup = $2 AND objectid = $3\", original_group.id, 1, duplicate_group.id)\n for member_id in duplicate_group.members:\n if member_id not in original_group.members:\n # update the member location to the original group\n await ex.conn.execute(\"UPDATE groupmembers.idoltogroup SET groupid = $1 WHERE idolid = $2 AND groupid = $3\", original_group.id, member_id, duplicate_group.id)\n # delete group\n await ex.conn.execute(\"DELETE FROM groupmembers.groups WHERE groupid = $1\", duplicate_group.id)\n # recreate cache\n await ex.create_idol_cache()\n await ex.create_group_cache()\n await ctx.send(f\"> Merged {duplicate_group_id} to {original_group_id}.\")",
"def _copy_groups(self, out):\n if self.parent_table:\n if hasattr(self.parent_table, \"_groups\"):\n out._groups = groups.ColumnGroups(\n out, indices=self.parent_table._groups._indices\n )\n elif hasattr(self, \"_groups\"):\n out._groups = groups.ColumnGroups(out, indices=self._groups._indices)",
"def copy_group(self, group_no):\n max_group_no = max(self.grp.keys())\n new_group_no = max_group_no + 1\n\n self.grp[new_group_no]['atoms'] = self.grp[group_no]['atoms']\n self.grp[new_group_no]['constraints'] = self.grp[group_no]['constraints']\n self.grp[new_group_no]['rotsym'] = self.grp[group_no]['rotsym']\n self.grp[new_group_no]['linear'] = self.grp[group_no]['linear']\n if self.grp[group_no]['volume']: self.grp[new_group_no]['volume'] = self.grp[group_no]['volume']\n if self.grp[group_no]['energy']: self.grp[new_group_no]['energy'] = self.grp[group_no]['energy']\n\n return new_group_no",
"def create_link_outcome_global_outcome_id(request_ctx, id, outcome_id=None, title=None, display_name=None, description=None, vendor_guid=None, mastery_points=None, ratings_description=None, ratings_points=None, **request_kwargs):\n\n path = '/v1/global/outcome_groups/{id}/outcomes/{outcome_id}'\n payload = {\n 'title' : title,\n 'display_name' : display_name,\n 'description' : description,\n 'vendor_guid' : vendor_guid,\n 'mastery_points' : mastery_points,\n 'ratings[description]' : ratings_description,\n 'ratings[points]' : ratings_points,\n }\n url = request_ctx.base_api_url + path.format(id=id, outcome_id=outcome_id)\n response = client.put(request_ctx, url, payload=payload, **request_kwargs)\n\n return response",
"def clone(self):\n return _libsbml.Group_clone(self)",
"def add_group(group):"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
提取出html页面中的所有url 并跟踪这些url进行一步爬取 如果提取的url中格式为 /question/xxx 就下载之后直接进入解析函数 | def parse(self, response):
# # print(response.text)
all_urls = response.css("a::attr(href)").extract()
all_urls = [parse.urljoin(response.url, url) for url in all_urls]
all_urls = filter(lambda x: True if x.startswith("https") else False, all_urls)
for url in all_urls:
match_obj = re.match("(.*zhihu.com/question/(\d+)(/|$).*)", url)
if match_obj:
# 如果提取到question相关的页面则下载后交由提取函数进行提取
request_url = match_obj.group(1)
yield scrapy.Request(request_url, headers=self.headers, callback=self.parse_question)
else:
# 如果不是question页面则直接进一步跟踪
yield scrapy.Request(url, headers=self.headers, callback=self.parse) | [
"def parse(self, response):\n all_urls = response.css(\"a::attr(href)\").extract()\n all_urls = [parse.urljoin(response.url,url) for url in all_urls]\n # all_urls = filter(lambda x:True if x.startswith(\"https\") else False,all_urls)\n for url in all_urls:\n match_obj = re.match(\"(.*zhihu.com/question/(\\d+))(/|$).*\",url)\n if match_obj:\n #如果提取到question相关页面则下载后交由提取函数提取\n request_url = match_obj.group(1)\n yield scrapy.Request(url = request_url,headers = self.header,callback = self.parse_question)\n break\n else:\n #如果不是question页面则进一步跟踪\n # yield scrapy.Request(url,headers = self.header,callback = self.parse)\n pass",
"def parse(self, response):\n all_urls = response.css(\"a::attr(href)\").extract()\n all_urls = [parse.urljoin(response.url,url) for url in all_urls]\n all_urls = filter(lambda x:True if x.startswith(\"https\") else False,all_urls)\n for url in all_urls:\n match_obj = re.match(\"(.*zhihu.com/question/(\\d+))(/|$).*\",url)\n if match_obj:\n # 如果提取到question相关的页面则下载后交由提取函数进行提取\n request_url = match_obj.group(1)\n yield scrapy.Request(request_url,headers=self.headers,callback=self.parse_question)\n else:\n # 如果不是question页面则直接进一步跟踪\n yield scrapy.Request(url,headers=self.headers,callback=self.parse)",
"def parse(self, response):\n all_urls = response.css(\"a::attr(href)\").extract()\n all_urls = [parse.urljoin(response.url, url) for url in all_urls]\n all_urls = filter(lambda x: True if x.startswith(\"https\") else False, all_urls)\n for url in all_urls:\n match_obj = re.match(\"(.*zhihu.com/question/(\\d+))(/|$).*\", url)\n if match_obj:\n # 如果提取到question相关的页面则下载后交由提取函数进行提取\n request_url = match_obj.group(1)\n yield scrapy.Request(request_url, headers=self.headers, callback=self.parse_question)\n\n else:\n #此处可能会导致问题过少而答案过多\n # 如果不是question页面则直接进一步跟踪\n yield scrapy.Request(url, headers=self.headers, callback=self.parse)",
"def process_html(url, content):\n\n returnme = []\n\n domain = url.split('://')[1].split('/')[0]\n soup = BeautifulSoup(content, 'html.parser')\n\n # Remove unnecessary tags which could exist in <ol>\n for s in soup.select('script'):\n s.extract()\n for s in soup.select('noscript'):\n s.extract()\n for s in soup.select('table'):\n s.extract()\n for s in soup.select('figure'):\n s.extract()\n\n if domain == 'www.lifewire.com':\n for s in soup.find_all('div', {'class': 'theme-experttiptip'}):\n s.extract()\n for s in soup.find_all('div', {'class': 'theme-experttipimportant'}):\n s.extract()\n\n # For specific websites, need fine tune the parser to remove (.extract()) some\n # unnecessary tags to clean up the result got from ol.get_text()\n if domain == 'www.wikihow.com':\n for s in soup.select('span'):\n s.extract()\n\n ols = soup.find_all('ol')\n for _, ol in enumerate(ols):\n\n if domain == 'support.google.com':\n for s in ol.find_all('img'):\n # In Google support web, the 'alt' text are duplicated with text ahead\n # But the arrow image should be replaced with its alt, see both example:\n # https://support.google.com/pixelphone/answer/7444033\n if s['alt'].lower().strip() == 'and then':\n s.replace_with('and then')\n else:\n for s in ol.find_all('img'):\n s.replace_with(s['alt'])\n\n if domain in ['steps.app', 'www.techbone.net']:\n # This website has no separater between steps, if call get_text(), the\n # words between steps will mess up.\n instruction_got = ol.get_text('. ', strip=True)\n else:\n # Replace any HTML tag with a space, especially between steps of instruction\n # See https://www.crummy.com/software/BeautifulSoup/bs4/doc/#get-text\n instruction_got = ol.get_text(' ', strip=True)\n\n processed_str = _replace_unicode_with_space(instruction_got)\n # Decide whether the instruction is Android-related by URL/instruction.\n # Sometimes instruction does not contain \"android\" but it's indeed valid, so\n # add url as part of the text.\n if _is_valid(url.split('?')[0], processed_str):\n returnme.append(processed_str)\n\n return returnme",
"def parse(self, response):\n\n # 解析列表页中的所有文章url并交给scrapy下载后并进行解析\n if response.status == 404:\n self.fail_urls.append(response.url)\n self.crawler.stats.inc_value(\"failed_url\")\n post_nodes = response.css(\".xinw958 .xinwim241 a\")\n for post_node in post_nodes:\n ex_image_url = post_node.css(\"img::attr(src)\").extract_first(\"\")\n post_url = post_node.css(\"::attr(href)\").extract_first(\"\")\n image_url = \"http://news.lcudcc.edu.cn\" + ex_image_url\n yield Request(url=parse.urljoin(response.url, post_url), meta={\"front_image_url\":image_url}, callback=self.parse_detail)\n\n\n # 提取下一页并交给scrapy进行下载\n ex_next_url = response.css(\".Next::attr(href)\").extract_first(\"\")\n next_url = \"http://news.lcudcc.edu.cn/zhxw/\" + ex_next_url\n if next_url:\n yield Request(url=next_url, callback=self.parse)\n\n\n\n # content_src = \"http://news.lcudcc.edu.cn/\" + response.css(\".img_vsb_content::attr(src)\").extract_first(\"\")\n # content_vurl = \"http://news.lcudcc.edu.cn/\" + response.css(\".img_vsb_content::attr(vurl)\").extract_first(\"\")\n # content_orisrc = \"http://news.lcudcc.edu.cn/\" + response.css(\".img_vsb_content::attr(orisrc)\").extract_first(\"\")\n # if content_src:\n # yield Request(url=parse.urljoin(response.url, post_url), callback=self.parse)",
"def _extract_html(self, url):\n self.response = requests.get(url, timeout=5)\n self.html = BeautifulSoup(self.response.content, \"lxml\") if self.response.ok else None\n # return self.html",
"def scrape(self, html):\n \n links = []\n \n #check headers for redirects\n redirects = filter(lambda x: x[0] == 'location', self.headers)\n links.extend([x[1] for x in redirects])\n \n #scrape response body\n soup = bs(html)\n metalinks = soup.findAll('meta', attrs={'http-equiv':True})\n for m in metalinks:\n index = str(m).find('url=')\n end = str(m).find('\"',index, len(str(m)))\n if index != -1:\n link = str(m)[index+4:end]\n links.append(link)\n \n attrs = ['background', 'cite', 'codebase', 'href', 'longdesc', 'src']\n \n for a in attrs:\n links.extend(\n map(lambda x: x[a], soup.findAll(**{a:True}))# **{} unzips dictionary to a=True\n )\n \n return links",
"def parse_all_url(r,url):\n try:\n r_page = etree.HTML(r)\n except Exception as e:\n print(r)\n print(str(e))\n return False\n menus_el = r_page.xpath('//div[@class=\"list_nav\"]/ul/li')\n temp = []\n for x in menus_el:\n if x.find(\"a\") != None:\n temp.append({\"url\": list_url + x.find(\"a\").get(\"href\"),\n \"title\": x.find(\"a\").text.replace(\" \", \"\").replace(\"\\n\", \"\")})\n else:\n temp.append(\n {\"url\": url, \"title\": x.text.replace(\" \", \"\").replace(\"\\n\", \"\")})\n new_le = r_page.xpath('//div[@class=\"column_tite\"]/ul/li/a')\n if len(new_le) > 0:\n temp.append({\"url\": list_url + new_le[0].get(\n \"href\"), \"title\": new_le[0].text.replace(\" \", \"\").replace(\"\\n\", \"\")}) # 最新\n return temp",
"def baidu_parse(page):\n result_list = []\n a = []\n '''\n # 匹配中文,数字和英文的形式。。\n xx = u\"[\\u4e00-\\u9fa5a-zA-Z0-9]+\"\n pattern = re.compile(xx)\n '''\n \"\"\"\n class_list = []\n related_list = []\n a = []\n\n \n \n for t in page.xpath('//div[@class=\"opr-recommends-merge-content\"]/div[@class=\"cr-title c-clearfix\"]/span'):\n class1 = t.text\n class_list.append(class1)\n s = 0\n for t in page.xpath('//div[@class=\"opr-recommends-merge-panel opr-recommends-merge-mbGap\"] | //div[@class=\"opr-recommends-merge-panel\"]'):\n for q in t.xpath('.//div[@class=\"c-row c-gap-top\"]/div'):\n for m in q.xpath('./div[@class=\"c-gap-top-small\"]/a'):\n name1 = m.text\n url = m.get('href')\n url = 'http://www.baidu.com'+url\n related_list.append({ 'class1':class_list[s], 'name1':name1, 'url':url})\n s = s +1\n \"\"\"\n for i in range(1,101):\n a.append(['%d' %i])\n #print \"++++++++++++++++++++++++++++++++++\"\n #print type(page)\n for t in page.xpath('//div[@id=\"content_left\"]/div'):\n if t.xpath('./@id') in a:\n title = (''.join(t.xpath('./h3/a//text()')).strip())\n url = t.xpath('./h3/a/@href')[0]\n if url:\n content = ''\n for text in t.xpath('.//div[@class=\"c-abstract\"]//text() | .//div[@class=\"c-span18 c-span-last\"]/p[1]//text()'):\n content = content + text.strip()\n\n #content = ''.join((''.join(t.xpath('.//div[@class=\"c-abstract\"]/text() | .//div[@class=\"c-span18 c-span-last\"]/p[1]//text()')))\n #content = (' '.join(pattern.findall(''.join(t.xpath('.//div[@class=\"c-abstract\"]/text() | .//div[@class=\"c-abstract c-abstract-en\"]/text()')))))\n result_list.append({ 'title':title, 'url':url, 'content':content}) \n else:\n continue\n return result_list",
"def parse(self, response):\n # declare local common list which holds temporary URLs\n localCommonUrlList = list()\n urlList = list()\n\n # get request's root URL\n uri = urlparse(response.url)\n\n # find links within href=\"\"\n try:\n # do not store links which starts with \"#\" or \"mailto\"\n urlList = response.xpath('/html//a[starts-with(@href, \"\") \\\n and not(starts-with(@href, \"#\")) \\\n and not(contains(@href, \"mailto\"))]/@href'\n ).extract()\n # normalize list\n urlList = self.normalizeUrl(urlList, uri)\n # merge list with local common\n localCommonUrlList = list(set(localCommonUrlList + urlList))\n except BaseException:\n pass\n\n # find css links\n try:\n urlList = response.xpath('/html/head/link[contains(@href, \"\")]\\\n /@href').extract()\n # normalize list\n urlList = self.normalizeUrl(urlList, uri)\n # merge with common list\n localCommonUrlList = list(set(localCommonUrlList + urlList))\n except BaseException:\n pass\n\n # find links within src=\"\"\n try:\n urlList = response.xpath('//*[contains(@src, \"\")]/@src').extract()\n # normalize list\n urlList = self.normalizeUrl(urlList, uri)\n # merge with common list\n localCommonUrlList = list(set(localCommonUrlList + urlList))\n except BaseException:\n pass\n\n # parse inline css for URLs\n extractUrlPattern = '([(\"\\'])+(?P<url>[^)\"\\']+)'\n tmpInlineStyleUrls = list()\n try:\n urlList = response.xpath('//*[re:match(@style, \"url\\((.*?)\\)\")]\\\n /@style').extract()\n for link in urlList:\n item = re.search(extractUrlPattern, link)\n if item is not None:\n item = item.group('url')\n tmpInlineStyleUrls.append(item)\n # normalize list\n urlList = self.normalizeUrl(tmpInlineStyleUrls, uri)\n # merge with common list\n localCommonUrlList = list(set(localCommonUrlList + urlList))\n except BaseException:\n pass\n\n # TODO: parse external CSS\n\n if len(localCommonUrlList) > 0:\n # remove duplicates\n localCommonUrlList = list(set(localCommonUrlList))\n # write local list of URLs into a file (in append mode)\n self.writeToDisk(localCommonUrlList, 'a+')\n # for evey found link perform url scraping\n for link in localCommonUrlList:\n yield SplashRequest(link, args={'wait': 10})",
"def parse (self , url , html_cont):\n if url is None or html_cont is None :\n return\n\n soup = BeautifulSoup(html_cont , 'html.parser', from_encoding='utf-8')\n new_urls = self._get_new_urls(url , soup)\n new_data = self._get_new_data(url , soup)\n return new_urls , new_data",
"def fetch_all_urls(page, soup):\n page_urls = []\n try:\n write_content(page, soup.prettify().encode(\"utf-8\"))\n data = soup.findAll('div', attrs={'id': 'bodyContent'})\n for div in data:\n for link in div.findAll('a', {'href': re.compile('^/wiki/')}):\n href = link.get('href')\n # Avoid administrative pages having ':'\n if ':' in href:\n continue\n if \"Clinton_Foundation\" in href:\n print page\n url = Constants.WIKIPEDIA_BASE_URL + href\n # For '#' in url, extract the link up to '#'\n if '#' in url:\n url = url[:url.index('#')]\n page_urls.append(url.encode(\"utf-8\"))\n except:\n print \"Error while fetching page URLs!\"\n print traceback.format_exc()\n return page_urls",
"def parse(self, response):\n # url = response.xpath('//*[@id=\"entry_672279\"]/div[2]/h2/a/@href').extract_first(\"\")\n # url = response.xpath('//div[@id=\"news_list\"]//h2[@class=\"news_entry\"]/a/@href').extract()\n # sel = Selector(text=response.text)\n # urls = response.css('div#news_list h2.news_entry a::attr(href)').extract()\n\n # Start here\n post_nodes = response.css('#news_list div.news_block')\n for post_node in post_nodes:\n image_url = post_node.css('.entry_summary a img::attr(src)').extract_first(\"\")\n post_url = post_node.css('h2.news_entry a::attr(href)').extract_first(\"\")\n # 打开正文页面,继续爬取 -- 需要一次 yield\n yield Request(url=parse.urljoin(response.url, post_url),\n meta={'front_image_url': image_url},\n callback=self.parse_detail)\n\n # 提取下一页并交给 scrapy 进行下载\n next_name = response.css(\"div.pager a:last-child::text\").extract_first(\"\")\n # next_url = response.xpath('//a[contains(text(),\"Next >\")]/@href').extract_first(\"\")\n if next_name == \"Next >\":\n next_url = response.css(\"div.pager a:last-child::attr(href)\").extract_first(\"\")\n yield Request(url=parse.urljoin(response.url, next_url), callback=self.parse)",
"def _extract_url(self, content, url_level):\r\n tags = content.find_all('a')\r\n\r\n #iterate over all urls of current page\r\n for tag in tags:\r\n url = tag.get('href')\r\n \r\n # if url contains base url and url is not parsed already then parse the url\r\n if url.startswith(self.url) and url not in self.urls:\r\n self._parse_url(url, url_level)",
"def get_urls():\n page_urls = ['http://www.mzitu.com/page/{cnt}'.format(cnt=cnt)\n for cnt in range(1, 193)]\n print(\"Please wait for second ...\")\n img_urls = []\n for page_url in page_urls:\n try:\n bs = BeautifulSoup(\n requests.get(page_url, headers=HEADERS, timeout=60, proxies=proxies).text,\n 'lxml').find('ul', id=\"pins\")\n result = re.findall(r\"(?<=href=)\\S+\", str(bs)) # 匹配所有 urls\n img_url = [url.replace('\"', \"\") for url in result]\n img_urls.extend(img_url)\n except Exception as e:\n print(\"http请求异常================>\")\n print(e)\n return set(img_urls) # 利用 set 去重 urls",
"def explore(self): \n\n # Use Beautiful Soup to parse the HTML and get the href of a tags and src of img tags:\n page_data = BeautifulSoup(self.response.text, 'html.parser')\n links = [link.get('href') for link in page_data.find_all('a')]\n images = [image.get('src') for image in page_data.find_all('img')]\n\n for link in links:\n # Format the url:\n link = self.format_url(link)\n if link == '':\n continue\n\n # Append each valid link to self.links:\n self.links.append(link)\n\n for image in images:\n # Format the url:\n image = self.format_url(image)\n if image == '':\n continue\n\n # Append each valid image to self.images:\n self.images.append(image)",
"def parse_url(self, url):",
"def getPuzzleUrls(url: str) -> list:\n try:\n response = get(url)\n html_soup = BeautifulSoup(response.text, 'html.parser')\n puzzle_containers = html_soup.find_all('div', class_ = 'result')\n puzzle_urls = [website_url + container.a[\"href\"] for container in puzzle_containers]\n return puzzle_urls\n \n except:\n print(\"getPuzzleUrls: URL error \" + str(url))\n return None",
"def parse(self, response):\n self.parsed_pages.append(response.url)\n internal_links = []\n external_links = []\n\n urls = self.remove_invalid_links(response.xpath('//a/@href').extract())\n\n for url in urls:\n netloc = urlparse(url).netloc\n print(\"url: {} netloc: {}\".format(url, netloc))\n if netloc == '' or netloc in self.allowed_domains:\n internal_links.append(url)\n else:\n external_links.append(url)\n\n item = CrawlitItem()\n item['url'] = response.url\n item['static_content'] = response.xpath(\n '//*[contains(@src, \".\")]').xpath('@src').extract()\n item['internal_links'] = internal_links\n item['external_links'] = external_links\n yield item\n\n for url in internal_links:\n if url[:1] != \"#\" and url not in self.parsed_pages:\n yield response.follow(url, callback=self.parse)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Parse challenge from a challenge response, cache it, and return it. | def _update_challenge(request: PipelineRequest, challenger: "PipelineResponse") -> HttpChallenge:
challenge = HttpChallenge(
request.http_request.url,
challenger.http_response.headers.get("WWW-Authenticate"),
response_headers=challenger.http_response.headers,
)
ChallengeCache.set_challenge_for_url(request.http_request.url, challenge)
return challenge | [
"def _parse_challenge(cls, response):\n links = _parse_header_links(response)\n try:\n authzr_uri = links['up']['url']\n except KeyError:\n raise errors.ClientError('\"up\" link missing')\n return (\n response.json()\n .addCallback(\n lambda body: messages.ChallengeResource(\n authzr_uri=authzr_uri,\n body=messages.ChallengeBody.from_json(body)))\n )",
"def challenge(self, challenge):\r\n # pylint: disable-msg=R0911,R0912\r\n if not challenge:\r\n logger.debug(\"Empty challenge\")\r\n return Failure(\"bad-challenge\")\r\n\r\n # workaround for some buggy implementations\r\n challenge = challenge.split(b'\\x00')[0]\r\n\r\n if self.response_auth:\r\n return self._final_challenge(challenge)\r\n realms = []\r\n nonce = None\r\n charset = \"iso-8859-1\"\r\n while challenge:\r\n match = PARAM_RE.match(challenge)\r\n if not match:\r\n logger.debug(\"Challenge syntax error: {0!r}\".format(challenge))\r\n return Failure(\"bad-challenge\")\r\n challenge = match.group(\"rest\")\r\n var = match.group(\"var\")\r\n val = match.group(\"val\")\r\n logger.debug(\"{0!r}: {1!r}\".format(var, val))\r\n if var == b\"realm\":\r\n realms.append(_unquote(val))\r\n elif var == b\"nonce\":\r\n if nonce:\r\n logger.debug(\"Duplicate nonce\")\r\n return Failure(\"bad-challenge\")\r\n nonce = _unquote(val)\r\n elif var == b\"qop\":\r\n qopl = _unquote(val).split(b\",\")\r\n if b\"auth\" not in qopl:\r\n logger.debug(\"auth not supported\")\r\n return Failure(\"not-implemented\")\r\n elif var == b\"charset\":\r\n if val != b\"utf-8\":\r\n logger.debug(\"charset given and not utf-8\")\r\n return Failure(\"bad-challenge\")\r\n charset = \"utf-8\"\r\n elif var == b\"algorithm\":\r\n if val != b\"md5-sess\":\r\n logger.debug(\"algorithm given and not md5-sess\")\r\n return Failure(\"bad-challenge\")\r\n if not nonce:\r\n logger.debug(\"nonce not given\")\r\n return Failure(\"bad-challenge\")\r\n return self._make_response(charset, realms, nonce)",
"def create_token(self, response):\n s = ssh.base64url_decode(response)\n\n if s[0] == 'r':\n # this is a version 0 response\n version_1 = False\n if self.lowest_supported_version > 0:\n raise exceptions.ProtocolVersionError(\n \"Client needs to support at least version %d\"\n % self.lowest_supported_version\n )\n r = protocol.Response.deserialize(s)\n if not r.hmac_challenge.verify(self._hmac):\n raise exceptions.InvalidInputException(\n \"Challenge hmac verification failed, not matching secret\"\n )\n challenge = protocol.Challenge.deserialize(r.hmac_challenge.payload)\n elif s[0] == '\\x01':\n # this is a version 1 response\n version_1 = True\n r = msgpack_protocol.Response.deserialize(s)\n challenge = msgpack_protocol.Challenge.deserialize_authenticated(\n r.challenge, self.secret)\n else:\n raise exceptions.ProtocolError(\"invalid first byte of response\")\n\n # verify the integrity of the challenge in the response\n if self.server_name != challenge.server_name:\n s = \"Got challenge with the wrong server_name encoded\"\n raise exceptions.InvalidInputException(s)\n\n key = self.key_provider.get_key(challenge.username)\n\n if challenge.valid_from > self.now_func():\n s = time.strftime(\"%Y-%m-%d %H:%M:%S UTC\",\n time.gmtime(challenge.valid_from))\n raise exceptions.InvalidInputException(\"Response with challenge \"\n \"created as %s too new \"\n % s)\n\n if challenge.valid_to < self.now_func():\n s = time.strftime(\"%Y-%m-%d %H:%M:%S UTC\",\n time.gmtime(challenge.valid_from))\n raise exceptions.InvalidInputException(\"Response with challenge \"\n \"created as %s too old \"\n % s)\n\n if version_1:\n if not key.verify_signature(r.signature, r.challenge):\n raise exceptions.InvalidInputException(\n \"Client did not provide proof that it controls \"\n \"the secret key\")\n else:\n if not key.verify_signature(r.signature, r.hmac_challenge.payload):\n raise exceptions.InvalidInputException(\n \"Client did not provide proof that it controls \"\n \"the secret key\")\n\n expire_time = int(self.now_func()) + self.token_lifetime\n\n return self._make_token(challenge.username, expire_time)",
"def _calcResponse(challenge, nonce, password):\n return sha1(b'%s %s %s' % (challenge, nonce, password)).digest()",
"def solve_challenge():\n\treturn (challenge[0]*challenge[1]-challenge[2]) * challenge[3] - challenge[4]",
"def _parse_challenge(header):\n # type: (str) -> Dict[str, str]\n ret = {}\n if header.startswith(BEARER):\n challenge_params = header[len(BEARER) + 1 :]\n\n matches = re.split(AUTHENTICATION_CHALLENGE_PARAMS_PATTERN, challenge_params)\n _clean(matches)\n ret = {}\n for i in range(0, len(matches), 2):\n ret[matches[i]] = matches[i + 1]\n\n return ret",
"def create_challenge_response(\n self,\n room_code: str,\n challenge_response: ChallengeResponse,\n ) -> GameInfo:\n game = self.read_game(room_code)\n\n if game.challenge is None:\n msg = f\"No challenge exists on game {room_code!r}\"\n raise InvalidMove(msg)\n if game.challenge.state != ChallengeState.AWAITING_RESPONSE:\n state = game.challenge.state.value\n msg = f\"Challenge is in {state!r} state, not 'AWAITING_RESPONSE'\"\n raise InvalidMove(msg)\n\n self.games_table.update_item(\n Key={\"room_code\": room_code},\n UpdateExpression=(\"set challenge.#chalresp=:r, challenge.#chalstate=:s\"),\n ExpressionAttributeValues={\n \":r\": challenge_response.dict(),\n \":s\": ChallengeState.VOTING,\n },\n ExpressionAttributeNames={\n # \"response\" and \"state\" are reserved words\n \"#chalstate\": \"state\",\n \"#chalresp\": \"response\",\n },\n ConditionExpression=Attr(\"challenge\").eq(game.dict()[\"challenge\"]),\n )\n\n return self.read_game(room_code)",
"def challenge(self, challenge):\r\n raise NotImplementedError",
"def read(challenge):\n data = {\n 'id': challenge.id,\n 'name': challenge.name,\n 'value': challenge.value,\n 'description': challenge.description,\n 'category': challenge.category,\n 'hidden': challenge.hidden,\n 'max_attempts': challenge.max_attempts,\n 'type': challenge.type,\n 'type_data': {\n 'id': IntermediateFlagChallenge.id,\n 'name': IntermediateFlagChallenge.name,\n 'templates': IntermediateFlagChallenge.templates,\n 'scripts': IntermediateFlagChallenge.scripts,\n }\n }\n return challenge, data",
"def challenge_response(self, challenge):\r\n key_1 = self.request.headers.get(\"Sec-Websocket-Key1\")\r\n key_2 = self.request.headers.get(\"Sec-Websocket-Key2\")\r\n try:\r\n part_1 = self._calculate_part(key_1)\r\n part_2 = self._calculate_part(key_2)\r\n except ValueError:\r\n raise ValueError(\"Invalid Keys/Challenge\")\r\n return self._generate_challenge_response(part_1, part_2, challenge)",
"def parse(self):\n\n # make sure we clear any old response stored\n self.code = None\n self.text = None\n\n # grab the first line from our relieved data (without line ending)\n index = self.data.find(\"\\r\\n\")\n line = self.data[:index]\n\n # break apart the response code and (optionally) the rest of the line\n match = re.match(r\"(\\d{3})(?: +(.+))?\", line)\n\n # check for match\n if match:\n # store our code and text\n self.code = match.group(1)\n self.text = match.group(2)\n\n # remove our processed line (including line endings)\n self.data = self.data[index+2:]\n\n # we are done\n return",
"def get_meta(challenge):\n host = config.get(challenge, 'host')\n port = config.get(challenge, 'port')\n url = \"http://%(host)s:%(port)s/stats\" % {\n 'host': host,\n 'port': port}\n r = requests.get(url)\n return make_json_response(r.text)",
"def read(challenge):\n\n data = {\n 'id': challenge.id,\n 'name': challenge.name,\n 'value': challenge.value,\n 'description': \"This challenge has not been unlocked yet. You need at least {} points to play.\".format(challenge.unlock_at),\n 'category': challenge.category,\n 'hidden': challenge.hidden,\n 'max_attempts': challenge.max_attempts,\n 'unlock_at': challenge.unlock_at,\n 'locked': True,\n 'type': challenge.type,\n 'type_data': {\n 'id': CTFdLockingChallenge.id,\n 'name': CTFdLockingChallenge.name,\n 'templates': CTFdLockingChallenge.templates,\n 'scripts': CTFdLockingChallenge.scripts,\n },\n }\n\n if session.get('admin') or not locked(challenge):\n data['locked'] = False\n data['description'] = str(challenge.description)\n\n return challenge, data",
"def decode(self, response, request):\n log.debug(\"Decoding authorization.\")\n auth = self._parseAuth(response)\n try:\n self._verifyChallenge(auth[\"challenge\"], request)\n creds = self.buildCredentials(auth, request)\n except KeyError, ke:\n raise LoginFailed(\"{0!r} not in authorization\".format(*ke.args))\n except LoginFailed, lf:\n log.warn(lf)\n raise\n log.debug(\"Decoded credentials: {0}\".format(creds))\n return creds",
"def get(self):\n try:\n imageFilename = random.choice(os.listdir(self.cacheDir))\n imagePath = os.path.join(self.cacheDir, imageFilename)\n with open(imagePath) as imageFile:\n self.image = imageFile.read()\n except IndexError:\n raise GimpCaptchaError(\"CAPTCHA cache dir appears empty: %r\"\n % self.cacheDir)\n except (OSError, IOError):\n raise GimpCaptchaError(\"Could not read Gimp captcha image file: %r\"\n % imageFilename)\n\n self.answer = imageFilename.rsplit(os.path.extsep, 1)[0]\n self.challenge = self.createChallenge(self.answer)\n\n return (self.image, self.challenge)",
"def getChallenge(self, request):\n return self.digest.getChallenge(request.getClientAddress().host)",
"def create_challenge(challenge): # noqa: E501\n if connexion.request.is_json:\n challenge = Challenge.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'",
"def parse(self, answer):\n\n body = answer.json()\n log.debug(answer.status_code)\n log.debug(body)\n\n if \"result\" in body and \"challenge\" in body[\"result\"]:\n self.challenge = body[\"result\"][\"challenge\"]\n\n if answer.status_code == 403:\n if body['error_code'] == \"invalid_token\":\n self.session_token = None\n log.warning(\"Invalid token, delete existing token.\")\n raise InvalidToken(body)\n\n elif body['error_code'] == 'auth_required':\n log.debug('Require authentication.')\n raise AuthRequired(body)\n\n log.warning(\"Not authenticated.\")\n raise ApiError(body)\n\n if answer.status_code != 200:\n log.critical(\"Status code not handled.\")\n raise\n\n if not body[\"success\"]:\n log.error(\"Message body not success.\")\n raise\n\n if \"result\" in body:\n return body[\"result\"]\n else:\n return True",
"def decode_response(self, challenge, userresponse):\n if len(userresponse) > 100:\n return 0\n shuzi = (1, 2, 5, 10, 50)\n chongfu = []\n key = {}\n count = 0\n for i in challenge:\n if i in chongfu:\n continue\n else:\n value = shuzi[count % 5] \n chongfu.append(i)\n count += 1\n key.update({i: value})\n res = 0\n for i in userresponse:\n res += key.get(i, 0)\n return res"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
check if the reference folder is in place and all attributes are ready | def check_reference_ready():
# check to see if there is a manifest file in the default reference path
manifest_file = os.path.join(settings.DEFAULT_REFERENCE_PATH, 'manifest.json')
if not os.path.isfile(manifest_file):
_log("manifest.json file cannot be found in the reference folder; simulation will NOT work!")
return
_log("reading manifest.json ..")
# read the manifest file
with open(manifest_file, 'r') as manifest:
data = json.load(manifest)
reference_fasta = os.path.join(settings.DEFAULT_REFERENCE_PATH, data["reference"])
if not os.path.isfile(reference_fasta):
_log("genome reference file (.fasta | .fa) cannot be found in the reference folder; simulation will NOT work!")
return
_log("found all required simulation files in place; simulation is READY!")
settings.REFERENCE_READY = True
settings.INPUT_FILES = {"reference": data['reference'], "targets": 'dummy'} | [
"def _ref_exists(self):\n self._collect_soft_chain()\n\n found_ref = False\n for node in self.nodes:\n if node.id == self.ref:\n self.file = os.path.join(Constants.ALTER_DIR, node.filename)\n found_ref = True\n break\n\n return found_ref",
"def _pre_update(self):\n if utils.createFolderPath(self.temp_path) == False:\n return False\n if utils.createFolderPath(self.publish_path) == False:\n return False\n return True",
"def do_file_check(self):",
"def isFromReferencedFile(self):\n \n pass",
"def copy_file_check(self):\n pass",
"def _retrieveAndCheckFilePaths(self):\n # Verify that the base ontology file exists.\n fpath = self.config.getBaseOntologyPath()\n if not(os.path.isfile(fpath)):\n raise RuntimeError(\n 'The base ontology file could not be found: {0}.'.format(fpath)\n )\n self.base_ont_path = fpath\n\n # Verify that the build directory exists.\n destdir = os.path.dirname(self.getOutputFilePath())\n if not(os.path.isdir(destdir)):\n raise RuntimeError(\n 'The destination directory for the updated base ontology file '\n 'does not exist: {0}.'.format(destdir)\n )",
"def update(self):\r\n self.check_working_dir()",
"def checkRefs(self, export_refs):\r\n return True",
"def exists(self):\r\n return self.filereferencedata != {}",
"def indirect_assets_check(self): #wayne\n import re\n item_prefix = [\"cam\", \"fcs\", \"lkt\", \"grp.stuff\"]\n for obj in self.scene.objects:\n if obj.name.startswith(\"grp.stuff\"):\n pass\n elif obj.name.startswith('grp'):\n proxy = re.search(\"proxy\", obj.name)\n if not proxy:\n try:\n group_name = obj.dupli_group.name\n group = bpy.data.groups[group_name]\n if group.library:\n pass\n else:\n print(\"not linked\")\n if obj.name.startswith(tuple(item_prefix)):\n pass\n else:\n #newcheck\n match_response = False\n check_grp = False\n for grp in bpy.data.groups:\n if grp.name == \"grp.stuff\":\n check_grp = True\n if check_grp:\n for match in bpy.data.groups[\"grp.stuff\"].objects:\n if match.name == obj.name:\n match_response = True\n if match_response:\n pass\n else:\n self.error_log.append(\"%s is not linked\" %(obj.name))\n except:\n pass # print('NO DUPLI GROUP')\n else: \n if obj.library:\n pass # print(obj.library.filepath), print(\"%s is linked\" %obj.name)\n else: \n if obj.name.startswith(tuple(item_prefix)):\n pass\n else:\n match_response = False\n check_grp = False\n for grp in bpy.data.groups:\n if grp.name == \"grp.stuff\":\n check_grp = True\n if check_grp:\n for match in bpy.data.groups[\"grp.stuff\"].objects:\n if match.name == obj.name:\n match_response = True\n if match_response:\n pass\n else:\n self.error_log.append(\"%s is not linked\" %(obj.name))",
"def _check_integrity(self):\n root = self.root\n for scene_name in self.scene_list:\n if not(os.path.isdir(os.path.join(root,scene_name)) and \n os.path.isdir(os.path.join(root,scene_name, images_dir)) and\n os.path.isfile(os.path.join(root,scene_name,annotation_filename))):\n return False\n return True",
"def __checkDestination(self):\n return os.path.exists(self.__targetPath)",
"def verify_attrs(self):\n self.verify_namespace_attrs(self.newlibrary.wrap_namespace)",
"def _check_existence(self):\n\n self.abort = False\n tumor_paths_exist = [os.path.exists(path) \\\n for path in self.tumor_paths]\n tissue_paths_exist = [os.path.exists(path) \\\n for path in self.tissue_paths]\n if not (False in tumor_paths_exist) and \\\n not (False in tissue_paths_exist) and \\\n os.path.exists(self.metadata_path):\n print 'This data seems to have already been ' + \\\n 'processed (possibly with different parameters)'\n\n if not self.overwrite:\n self.abort = True\n else:\n print 'Pre-calculated files are NOW being removed !!!'\n for the_file in os.listdir(self.save_dir):\n file_path = os.path.join(self.save_dir, the_file)\n if file_path != self.tmp_metadata_path:\n try:\n if os.path.isfile(file_path):\n os.remove(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception, err:\n print err\n\n if not self.abort:\n pickle_path = os.path.join(self.save_dir,\n 'metadata.pickle')\n pickle.dump(self.metadata,\n open(pickle_path, 'w'))\n\n os.rename(self.tmp_metadata_path,\n self.metadata_path)",
"def check_data(self):\n\t\t#creating a list of name of all images and their extension\n\t\timg_name, img_ext = self.create_lst_name_ext(self.image_dir)\n\t\t#creating a list of name of all xmls and their extension\n\t\txml_name, xml_ext = self.create_lst_name_ext(self.xml_dir)\n\t\t# directory where all remove images to be save\n\t\tremove_img_dir = self.remove_files(self.image_dir)\n\t\t# directory where all remove xml to be save\n\t\tremove_xml_dir = self.remove_files(self.xml_dir)\n\n\t\t#if the file from img_name is not in xml_name then remove it from img_name and vice-versa\n\t\tself.compare_directory(self.image_dir,remove_img_dir,img_name,xml_name,img_ext)\n\t\tself.compare_directory(self.xml_dir,remove_xml_dir,xml_name,img_name,xml_ext)",
"def base_data_check_shot(self):\n\n #alembic_dir\n alembic_dir = self.alembic_functionality.get_parm_value(self.node, 'alembic_dir')\n \n #is False\n if not (alembic_dir):\n #log\n self.logger.debug('Parameter alembic dir empty.')\n return False\n\n #dir exists\n if not (os.path.isdir(alembic_dir)):\n #log\n self.logger.debug('Alembic dir {0} does not exist.'.format(alembic_dir))\n return False\n\n\n #alembic_path_list\n alembic_path_list = [os.path.join(alembic_dir, file).replace('\\\\', '/') for \n file in \n os.listdir(alembic_dir) if \n (os.path.isfile(os.path.join(alembic_dir, file)) and file.split('.')[-1] == 'abc')]\n #alembic_path_list empty\n if not (alembic_path_list):\n #log\n self.logger.debug('alembic_path_list empty. Alembic dir {0} does not seem to contain alembic files.'.format(alembic_dir))\n return False\n\n\n #checked_alembic_path_list\n checked_alembic_path_list = []\n\n #iterate\n for alembic_path in alembic_path_list:\n\n #object_path_list\n object_path_list = self.alembic_functionality.get_alembic_object_path_list(alembic_path)\n #object_path_list empty\n if not (object_path_list):\n #log\n self.logger.debug('Object path list for alembic {0} empty. Continuing'.format(alembic_path))\n continue\n\n #iterate, check and create\n for object_path in object_path_list:\n\n #helga_locator_attr_exists\n helga_locator_attr_exists = self.alembic_functionality.alembic_attribute_exists(alembic_path, object_path, 'helga_locator')\n\n #helga_highpoly_rendergeo_attr_exists\n helga_highpoly_rendergeo_attr_exists = self.alembic_functionality.alembic_attribute_exists(alembic_path, object_path, 'helga_highpoly_rendergeo')\n\n #if attr exists append and break\n if (helga_locator_attr_exists and helga_highpoly_rendergeo_attr_exists):\n\n #append\n checked_alembic_path_list.append(alembic_path)\n break\n\n #checked_alembic_path_list empty\n if not (checked_alembic_path_list):\n #log\n self.logger.debug('checked_alembic_path_list empty. Alembic dir {0} does not seem to contain alembic files with helga_highpoly_rendergeo attribute.'.format(alembic_dir))\n return False\n\n\n #alembic_highpoly_rendergeo_dir\n alembic_highpoly_rendergeo_dir = self.alembic_functionality.get_parm_value(self.node, 'alembic_highpoly_rendergeo_dir')\n \n #is False\n if not (alembic_highpoly_rendergeo_dir):\n #log\n self.logger.debug('Parameter alembic highpoly rendergeo dir empty.')\n return False\n\n #dir exists\n if not (os.path.isdir(alembic_highpoly_rendergeo_dir)):\n #log\n self.logger.debug('Alembic highpoly rendergeo dir {0} does not exist.'.format(alembic_highpoly_rendergeo_dir))\n return False\n\n\n #return\n return [checked_alembic_path_list, alembic_highpoly_rendergeo_dir]",
"def test_exist_entry_on_rebuild(self):\n self.validate_attributes_in_exist_response()",
"def check_folder_state(self):\n while self:\n diff = self.get_diff()\n print(diff or 'No changes detected')\n if diff:\n self.parent.send_diff_data(diff)\n time.sleep(1)",
"def verify(self) -> None:\n # Verify folder existance\n if not Path(self.path).is_dir():\n Path(self.path).mkdir(parents=True, exist_ok=True)\n\n # Update info file\n with open(self.info_path, \"w\", encoding=\"utf-8\") as file:\n json.dump({\"id\": self.id, \"name\": self.guild.name}, file)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Callback to be called whenever the system state has changed. Checks whether or not the step has to be advanced or not | def updateState(self):
if ('cutting' in self.step_ops) and (self.cut_state.user_cutting):
self.step_ops['cutting'] = True
if ('cooking' in self.step_ops) and (self.cut_state.user_cooking):
self.step_ops['cooking'] = True
# TODO: add the rest of the operations
advance = True
# Check if ALL operations are complete
for op in self.step_ops:
if self.step_ops[op] == False:
advance = False
break
if advance:
self.nextStep() | [
"def update_aux_state(self, step, *args, **kwargs):\n return",
"def _voltage_changed(self):\n if self.checkValueBool:\n self.check_status()",
"def has_state_changed(self) -> bool:\r\n ...",
"def update_step(self):\n #---------------\n # set next step\n #---------------\n next_step = self.curr_step + 1\n #--------------\n # did we fail?\n #--------------\n if self.state == \"failed\":\n self.fwk.logEvent(self.name, None, 'all_fail', 'simulation failed before completing its work')\n if self.fwk.debug:\n print(\" ##### failed\", self.fwk.fwk_global_time, self.curr_phase, self.curr_step)\n print(\"bad things happened and now we are shutting down\")\n del self.my_comps\n self.my_comps = {}\n #self.state = 'shutdown'\n #self.my_comps.update({'shutdown':self.my_overheads['shutdown']})\n #self.my_comps['shutdown'].state = \"ready\"\n #if self.fwk.debug:\n # print \"new my comps\", self.my_comps.keys()\n self.is_done = True\n return False\n\n #-----------------------------------------\n # are we ready for the next step??\n #-----------------------------------------\n for c in list(self.my_comps.values()):\n if isinstance(c, component) and c.ready_for_step < next_step:\n if c.ready_for_step < next_step - 1:\n print('ready for step not equal to curr step!!! (%s-%s: ready for step: %d -- curr step: %d)' % (c.phase, c.name, c.ready_for_step, self.curr_step))\n raise\n return False\n\n #-------------------------------------------------\n # ready to go to the next step\n # - if ckpt_on, checkpoint if necessary\n # - check to see if we have entered a new phase\n #-------------------------------------------------\n\n if self.ckpt_on:\n if self.curr_phase_index == (len(self.phases) - 1) and next_step > self.phases[self.curr_phase].nsteps:\n if self.fwk.debug:\n print(\"not checkpointing because it is the last step of the last phase!!!!!!!!!!!!!!!!!!!\")\n else:\n if self.ckpt_mode == 'phys_explicit':\n #print \">\",\n if self.next_ckpt and self.curr_step >= self.next_ckpt:\n try:\n self.next_ckpt = self.ckpt_values.pop(0)\n except:\n #print \" &&&&&&&\"\n self.next_ckpt = None\n return True\n elif self.ckpt_mode == 'phys_regular':\n if self.curr_step >= self.next_ckpt:\n self.next_ckpt += self.ckpt_interval\n return True\n elif self.ckpt_mode == 'wall_explicit':\n if self.next_ckpt and self.fwk.fwk_global_time >= self.next_ckpt:\n try:\n self.next_ckpt = self.ckpt_values.pop(0)\n except:\n #print \" &&&&&&&\"\n self.next_ckpt = None\n return True\n elif self.ckpt_mode == 'wall_regular':\n if self.fwk.fwk_global_time >= self.next_ckpt:\n self.next_ckpt += self.ckpt_interval\n return True\n\n self.curr_step = next_step\n #print 'new step for', self.name\n\n #------------------------------\n # write log message\n #------------------------------\n self.fwk.logEvent(self.name, None, \"end_step\", \"ending step %d\" % (self.curr_step - 1))\n if self.fwk.debug:\n print(\"curr step\", self.curr_step, \"next phase at\", self.phases[self.curr_phase].nsteps, \"steps\")\n\n #------------------------------\n # done with rework?\n #------------------------------\n if self.state == \"rework\" and self.curr_phase == self.rework_done_phase and self.curr_step >= self.rework_done_step:\n #print '>'\n self.state = \"work\"\n #self.total_rework_time += self.fwk.fwk_global_time - self.start_rework_time\n if self.fwk.debug:\n print(\" ##### work\", self.fwk.fwk_global_time, self.curr_phase, self.curr_step)\n self.fwk.logEvent(self.name, None, \"state_change\", \"work\")\n\n #------------------------------------------\n # account for newly completed step as work\n #------------------------------------------\n if self.state == 'work':\n self.total_steps += 1\n #print \"adding a step\", self.curr_step\n\n #------------------------------\n # ready for next phase?\n #------------------------------\n if self.curr_step > self.phases[self.curr_phase].nsteps:\n # end of phase\n self.curr_step = 1\n self.is_done = self.get_next_phase()\n\n #------------------------------\n # if not done, set up comps for new step\n #------------------------------\n if not self.is_done:\n #self.fwk.logEvent(self.name, None, \"start_step\", \"starting new step %d\" % self.curr_step)\n # set all components to beginning of step\n for c in list(self.my_comps.values()):\n c.state = 'not_done'\n c.ready_for_step = self.curr_step\n return False",
"def state_changed(self, game_state: GameState):\n pass",
"def update_on_timestep(self):\n raise NotImplementedError",
"def update(self):\n\t\t# If being controlled by COM\n\t\tif self.controled_by_com :\n\t\t\t# Substract 1 from the update counter\n\t\t\tself.update_counter -= 1\n\t\t\t# If the update counter reaches zero\n\t\t\tif self.update_counter == 0. :\n\t\t\t\t# then ask for an action \n\t\t\t\tif self.intermediate_phase is False :\n\t\t\t\t\tself.action_required = True \n\t\t\t\t\t\t\n\t\t\t\t# if during a change\n\t\t\t\t# then make the change\n\t\t\t\tif self.intermediate_phase is True : \n\t\t\t\t\tself.action_required = False\n\t\t\t\t\tself._color_changer() #Make the change in the Simulator\n\t\telse :\n\t\t\tpass",
"def onCheckStateChanged(self, state):",
"def notify_change(self):\r\n data = (self.busy, self.debug, self.profile)\r\n self.publish_msg( eng_messages.ENGINE_STATECHANGE+'.'+self.name, data)",
"def state_changed(self):\n return self.checked.stateChanged",
"def step(self, state):",
"def increase_global_step(self):\n pass",
"def _step_callback(self):\n pass",
"def stepText2Changed(build, step, text2):",
"def user_state_changed(self, state):\n pass",
"def __call__(self, valid_stats, step):\n\n if self.status == PatienceEnum.STOPPED:\n # Don't do anything\n return\n\n if all([scorer.is_improving(valid_stats) for scorer\n in self.early_stopping_scorers]):\n self._update_increasing(valid_stats, step)\n\n elif all([scorer.is_decreasing(valid_stats) for scorer\n in self.early_stopping_scorers]):\n self._update_decreasing()\n\n else:\n self._update_stalled()",
"def state_wait_do(cfg, app, win, events):",
"def try_advance(self):\n if not self.step.toclick:\n self.step.finished = True\n return True\n return False",
"def _on_step(self) -> None:\n self._n_calls += 1\n # Account for multiple environments\n # each call to step() corresponds to n_envs transitions\n if self._n_calls % max(self.target_update_interval // self.n_envs, 1) == 0:\n polyak_update(self.q_net.parameters(), self.q_net_target.parameters(), self.tau)\n # Copy running stats, see GH issue #996\n polyak_update(self.batch_norm_stats, self.batch_norm_stats_target, 1.0)\n\n self.exploration_rate = self.exploration_schedule(self._current_progress_remaining)\n self.logger.record(\"rollout/exploration_rate\", self.exploration_rate)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Constructor for thread that will request the RSS of a particular podcast series, parse the series details and episode information, and save the information w/`storer` | def __init__(self, storer, series, i):
super(EpisodeWorker, self).__init__()
self.storer = storer
self.series = series # All series
self.i = i | [
"def __init__(self, url):\n\n self.url = url\n self.feed, self.keys, self.entries = self.parse_rss_feed()",
"def __init__(self, url=URL):\n self.entries = feedparser.parse(url).entries",
"def __init__(self, thoonk, feed):\n Queue.__init__(self, thoonk, feed)\n\n self.feed_publishes = 'feed.publishes:%s' % feed\n self.feed_published = 'feed.published:%s' % feed\n self.feed_cancelled = 'feed.cancelled:%s' % feed\n self.feed_retried = 'feed.retried:%s' % feed\n self.feed_finishes = 'feed.finishes:%s' % feed\n self.feed_claimed = 'feed.claimed:%s' % feed\n self.feed_stalled = 'feed.stalled:%s' % feed\n self.feed_running = 'feed.running:%s' % feed\n \n self.job_finish = 'job.finish:%s' % feed",
"def load_podcasts(channel, channel_id):\n\n print \"Podcasts\", channel[\"feed\"].get(\"title\")\n\n # channel_name = channel[\"feed\"].get(\"title\")\n\n # channel_id = Channel.query.filter_by(channel_name=channel_name).one().channel_id\n\n for podcast in channel[\"items\"]:\n # iterating through keys in items dict\n all_links = podcast.get(\"links\")\n all_images = podcast.get(\"image\")\n\n author = podcast.get(\"author\")\n title = podcast.get(\"title\")\n podcast_url = podcast.get(\"link\")\n summary = podcast.get(\"summary\")\n\n # duration is in a different format for each rss feed, so this uniforms all into seconds\n podcast_duration = podcast.get(\"itunes_duration\")\n\n if podcast_duration and \":\" in podcast_duration.encode('utf-8'):\n\n splitted = podcast_duration.encode('utf-8').split(\":\")\n\n if len(splitted) == 3:\n hours_to_secs = int(splitted[0]) * 3600\n mins_to_secs = int(splitted[1]) * 60\n podcast_duration = hours_to_secs + mins_to_secs + int(splitted[2])\n\n if len(splitted) == 2:\n mins_to_secs = int(splitted[0])*60\n podcast_duration = mins_to_secs + int(splitted[1])\n else:\n podcast_duration = 0\n\n # converting to a datetime from a python timestruct:\n # http://stackoverflow.com/questions/1697815/how-do-you-convert-a-python-time-struct-time-object-into-a-datetime-object/18726020\n python_timestruct = podcast.get(\"published_parsed\")\n\n if python_timestruct:\n released_at = datetime.fromtimestamp(mktime(python_timestruct))\n\n # creating an image url variable, to avoid 'referenced before created error'\n image_url = None\n\n if all_images:\n image_url = all_images.get(\"href\")\n\n # creating an play url variable, to avoid 'referenced before created error'\n play_url = None\n\n if all_links:\n for link in all_links:\n if link.type == \"audio/mpeg\" or link.type == \"video/mp4\" or link.type == \"audio/x-mpeg\":\n # checking for type of link so we get the correct url\n play_url = link.get(\"href\")\n\n # we only want to add episodes that have a play_url to the database\n if play_url:\n podcast = Podcast(channel_id=channel_id,\n author=author,\n title=title,\n podcast_url=podcast_url,\n play_url=play_url,\n released_at=released_at,\n image_url=image_url,\n summary=summary,\n podcast_duration=podcast_duration,)\n\n # adds instance to the session so it will be stored\n db.session.add(podcast)\n\n # committing to the database\n db.session.commit()",
"def __init__(self, channelMeta={}, parse=False, filename=None):\n global minidom, random\n if not minidom: raise ImportError('minidom not imported')\n if not random: raise ImportError('random not imported')\n object.__init__(self)\n self.feed = minidom.Document()\n self.rss = self.feed.createElement('rss')\n self.rss.setAttribute('version', '2.0')\n self.channel = self.feed.createElement('channel')\n self.channelMeta = channelMeta\n self.filename = filename\n self.items = []\n self.itemsQuaDict = []\n if parse: self.parse()",
"def __init__(self):\n\t\tsuper(SpyEyeTrackerCnC, self).__init__(name=\"SpyEye Tracker C&C feed\", rss_url=\"https://spyeyetracker.abuse.ch/monitor.php?rssfeed=tracker\")",
"def __init__( self ):\n\n self.log = gLogger.getSubLogger( self.__class__.__name__ )\n self.rssConfig = RssConfiguration()\n self.__opHelper = Operations()\n self.rssClient = None\n\n # We can set CacheLifetime and CacheHistory from CS, so that we can tune them.\n cacheLifeTime = int( self.rssConfig.getConfigCache() )\n\n # RSSCache only affects the calls directed to RSS, if using the CS it is not\n # used.\n self.seCache = RSSCache( 'StorageElement', cacheLifeTime, self.__updateSECache )",
"def from_rss(\n cls, entry: feedparser.FeedParserDict, feed: feedparser.FeedParserDict\n ) -> \"FeedEntry\":\n try:\n time = datetime.datetime(*entry.published_parsed[:6]).isoformat()\n except (AttributeError, TypeError):\n time = datetime.datetime.now().isoformat()\n return cls(\n title=entry.get(\"title\"),\n summary=entry.get(\"summary\"),\n link=sanitise_url(entry.get(\"link\")),\n time=time,\n feed=feed.feed.get(\"title\"),\n source=feed.get(\"href\"),\n )",
"def episode():\n return pyres.episode.Episode(base_path='path', date=time.localtime(),\n title='title', url='link',\n podcast='podcast_name')",
"def __init__(self, rss_id):\n npr_url = \"http://www.npr.org/rss/rss.php?id=%s\" % rss_id\n self._root = ElementTree.fromstring(urllib2.urlopen(npr_url).read())",
"def __init__(self, title):\n # will hit the TMDB API on every instantiation\n search = tmdb.Search()\n response = search.movie({'query': title})\n\n # if there are any results to querying for the title, take the first result\n if len(search.results) > 0:\n self.ID = uuid.uuid4()\n self.TMDB_ID = search.results[0]['id']\n movie = tmdb.Movies(self.TMDB_ID).info() # get all the information available\n\n # save off a few interesting attributes\n self.title = movie['title']\n self.release_date = movie['release_date']\n self.popularity = movie['popularity']\n self.overview = movie['overview']\n else:\n self.initialize()\n print \" ##### Warning: could not find any matches for %s\" % title",
"def __init__(self, title, storyline, poster_image_url, trailer_url, release_year,\n duration, mpaa_rating):\n self.title = title\n self.storyline = storyline\n self.poster_image_url = poster_image_url\n self.trailer_youtube_url = trailer_url\n self.release_year = release_year\n self.duration_in_minutes = duration\n self.mpaa_rating = mpaa_rating",
"def __init__(self, movie_title, release_date, movie_stars, movie_storyline,\n poster_image, trailer_youtube):\n self.title = movie_title\n self.release_date = release_date\n self.movie_stars = movie_stars\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube",
"def do_pre_parsing(self) -> None:\n self.episode.index = generate_post_index(self.final_location, self.used_indexes)\n if self.episode.index == 0:\n raise NotEpisodeURLError(self.final_location, \"Can't parse episode number\")\n\n self.episode.url = self.final_location\n current_date_utc = datetime.now(timezone.utc)\n self.episode.parsed_at = current_date_utc.strftime(r\"%Y-%m-%dT%H:%M:%S.%fZ\")\n self.episode.updated_at = self.episode.parsed_at\n\n self.episode.episode = parse_episode_number(self.episode.post_title)\n\n full_soup = BeautifulSoup(self.content, \"lxml\")\n if full_soup.title is not None:\n self.episode._title = full_soup.title.string\n else:\n self.episode._title = \"NO TITLE!\"\n del full_soup\n\n if not self.is_url_ok:\n self.episode.url = self.final_location\n self.episode.admin_note = self.content[:50]\n raise LepEpisodeNotFoundError(self.episode)",
"def run_newsreel(self):\n\n self.feed_title.config(text='Getting next feed')\n\n \"\"\" \n TODO: Consider using kwargs and queue threads for updating settings at \n runtime-\n TODO: see if that is even possible.\n \"\"\"\n\n try:\n if self.thread_queue.empty():\n _next_feed = self.ctrl.next_feed()\n self.new_thread = threading.Thread(\n target=update_feed\n , kwargs={\n 'thread_queue': self.thread_queue,\n 'feed': _next_feed\n }\n )\n self.new_thread.start()\n self.after(100, self.listen_for_result)\n except Exception as e:\n print(e)",
"def extract_tvseries(dom):\n\n # opening the url, offload it to variable\n Client = urlopen(TARGET_URL)\n page_html = Client.read()\n Client.close()\n\n # parse the html\n page_soup = BeautifulSoup(page_html, \"html.parser\")\n\n # get each serie\n items = page_soup.findAll(\"div\",{\"class\":\"lister-item-content\"})\n\n # loop for every item on page\n tvseries = []\n for item in items:\n\n # get title\n title = item.a.text \n \n # get rating\n rating = item.strong.text\n \n # get genres\n genre = item.find(\"span\",{\"class\":\"genre\"}).text.strip()\n\n # get actors\n actor_html = item.findAll(\"a\", href=re.compile(\"name\"))\n actors = []\n\n # iterate over different actors in html\n for actor in actor_html:\n actor_names = actor.text\n actors.append(actor_names)\n\n actors = \", \".join(actors)\n \n # get runtime\n runtime = item.find(\"span\",{\"class\":\"runtime\"}).text.strip(' min')\n\n # append to series\n tvseries.append((title, rating, genre, actors, runtime))\n\n return tvseries",
"def scrap_episode(self,epi_tag,season):\n # get the div where the episode info is and scrap the usefull content \n info = epi_tag.find(attrs={'class':'info'})\n \n # get some info of the episode, can be improved and get more if necessaryY\n air_date = re.sub(rx,' ',info.find(attrs={'class':'airdate'}).get_text()).strip()\n try:\n air_date: datetime.strptime(air_date,'%d %b. %Y')\n except:\n # no specific date to episode, only year\n air_date: datetime.strptime(air_date,'%Y')\n try:\n rating = float(info.find(attrs={'class':'ipl-rating-star__rating'}).get_text())\n total_votes = int(info.find(attrs={'class':'ipl-rating-star__total-votes'}).get_text()[1:-1].replace(',',''))\n except:\n rating = None\n total_votes = None\n episode_number = int(info.find(attrs={'itemprop':'episodeNumber'})['content'])\n episode_description = re.sub(rx,' ',info.find(attrs={'itemprop':'description'}).get_text()).strip()\n episode_title = info.find(attrs={'itemprop':'name'}).get_text()\n episode_info = {\n 'episode_number': episode_number,\n 'info':{\n 'rating': rating,\n 'total_votes': total_votes,\n 'air_date': air_date,\n 'description': episode_description,\n 'episode_title': episode_title,\n 'season':season,\n 'short':season+'.E'+str(episode_number)\n }\n }\n # print(episode_info)\n return episode_info",
"def parse(self, filename=None, rawfeed=None, parsedfeed=None, itemsonly=False):\n if filename:\n if not os.path.isfile(filename): return None\n p = feedparser.parse(filename)\n elif rawfeed: p = feedparser.parse(rawfeed)\n elif parsedfeed: p = parsedfeed\n elif self.filename:\n if not os.path.isfile(self.filename): return None\n p = feedparser.parse(self.filename)\n else: raise Exception, \"Must give either a rawfeed, filename, set self.filename, or parsedfeed\"\n if not itemsonly:\n if 'updated' in p['feed']:\n p['feed']['pubDate'] = p['feed']['pubdate'] =p['feed']['updated']\n elif 'updated_parsed' in p['feed']:\n p['feed']['pubDate'] = p['feed']['pubdate'] = time.strftime(self._date_fmt, p['feed']['updated_parsed'])\n self.channelMeta = p['feed']\n self.itemsQuaDict.extend(p['entries'])",
"def scan_thread(self, response):\n story_item = response.meta.get(\"story_item\")\n print(\"\\nscraping thread {0}\\n\".format(response.url))\n\n # div_tmarks is a list of all threadmarked posts on this story thread\n # ...at least on this PAGE of the story.\n div_tmarks = response.xpath(\"//li[contains(@class, 'hasThreadmark')]\")\n \n if div_tmarks is not None and len(div_tmarks) > 0:\n\n for div_tmark in div_tmarks:\n # story_seg = StorySegment()\n\n author = div_tmark.xpath(\"@data-author\").extract_first()\n\n author_seg, created = Author.objects.get_or_create(name=author)\n\n title = \"\".join(div_tmark.xpath(\"div/span/text()\").extract()).encode('utf-8')\n title = \" \".join(title.split())\n\n # Get the Date and clean it up/format it ======================================\n date = div_tmark.xpath(\".//span[@class='DateTime' and ../@class!='editDate']/@title\").extract_first()\n if date is None:\n date = div_tmark.xpath(\".//abbr[@class='DateTime']/text()\").extract_first()\n date_obj = datetime.strptime(date, \"%b %d, %Y at %I:%M %p\")\n date_obj = date_obj.replace(tzinfo=utc)\n # story_seg.published = date_obj\n # =============================================================================\n\n story_seg, seg_created = StorySegment.objects.get_or_create(story=story_item,\n title=title,\n published=date_obj)\n\n # If you want to include the formatting of the original page, change the following\n # line to ..... .//blockquote/node()\").extract()\n # As it stands, we don't necessarily need the <br /> tags and such.\n content = \"\".join(div_tmark.xpath(\".//blockquote//text()\").extract())\n story_seg.contents = content\n\n story_item.authors.add(author_seg)\n\n print(\"Title: {0} Author: {1}\".format(story_seg.title, author))\n print(\"date_time: {0}\".format(date_obj))\n print(\"content length: {0}\".format(len(content)))\n\n story_seg.save()\n story_item.save()\n\n div_next_tmark = div_tmarks[-1].xpath(\".//span[@class='next']\")\n\n # navigate to the next threadmark.\n if div_next_tmark is not None:\n next_mark = div_next_tmark.xpath(\"a/@href\").extract_first() \n print(\"Next url: {0}\".format(next_mark))\n next_mark_url = response.urljoin(next_mark)\n yield scrapy.Request(\n next_mark_url,\n callback=self.scan_thread,\n priority=2,\n meta={\"story_item\": story_item}\n )"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Uses information in `line` to request and return the RSS feed | def request_rss(self, url):
return feedparser.parse(url) | [
"def add_rss(url):",
"def get_news(url):\r\n \r\n # parse RSS feed into list of dictionaries\r\n feed = feedparser.parse(url)\r\n\r\n # no RSS feed articles for url\r\n if len(feed['entries']) == 0:\r\n return []\r\n \r\n # get first ten articles from the RSS feed\r\n news = []\r\n i = 0\r\n while True:\r\n if i == len(feed['entries']) or i > 30:\r\n break\r\n \r\n try:\r\n # get link to article\r\n link = feed[\"entries\"][i][\"link\"]\r\n\r\n # get title of article\r\n title = feed[\"entries\"][i][\"title\"]\r\n \r\n try:\r\n # get raw summary of article\r\n summary_raw = feed[\"entries\"][i][\"summary\"]\r\n \r\n # format summary\r\n summary = \"\"\r\n for c in summary_raw:\r\n if c == \"<\":\r\n summary += \"...\"\r\n break\r\n summary += c\r\n except KeyError as e:\r\n logging.error(\"no summary for RSS feed article: {}\".format(link))\r\n summary = \"read more here...\"\r\n \r\n # get raw date \r\n date_raw = feed[\"entries\"][i][\"published_parsed\"]\r\n \r\n if date_raw is None:\r\n date = feed[\"entries\"][i][\"published\"]\r\n \r\n else:\r\n # format date\r\n year = str(date_raw.tm_year)\r\n months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\", \"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\r\n month = months[date_raw.tm_mon - 1]\r\n day = str(date_raw.tm_mday)\r\n weekdays = [\"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\"]\r\n wday = weekdays[date_raw.tm_wday]\r\n hour = str(date_raw.tm_hour)\r\n hour = \"{:2}\".format(hour).format(' ','0')\r\n min = str(date_raw.tm_min)\r\n min = \"{:2}\".format(min).replace(' ','0')\r\n date = hour + \":\" + min + \" - \" + wday + \" \" + month + \" \" + day + \", \" + year\r\n \r\n # compile entry and append to news list\r\n entry = {\"link\":link, \"title\":title, \"date\":date, \"summary\":summary}\r\n \r\n # sanitize entry\r\n for key in entry:\r\n # apostrophe\r\n entry[key] = entry[key].replace(\"'\", \"'\")\r\n # right single quotation mark\r\n entry[key] = entry[key].replace(\"’\", \"’\")\r\n # left single quotation mark\r\n entry[key] = entry[key].replace('\"', \"‘\")\r\n # right double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"”\")\r\n # left double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"“\")\r\n # Weird ampersand formatting\r\n entry[key] = entry[key].replace(\"&\", \"&\")\r\n \r\n # prepare entry for sqlite queries\r\n entry[key] = surround(entry[key])\r\n \r\n # add entry to news list\r\n news.append(entry)\r\n \r\n # max 10 entries\r\n if len(news) == 10:\r\n break\r\n i += 1\r\n \r\n except Exception as e:\r\n logging.error(e)\r\n i += 1\r\n pass\r\n \r\n # success\r\n return news",
"def scrape_rss(self):\n return self.scrape(self.RSS_ENTRY_TMPL, \n self.RSS_FEED_TMPL, self.RSS_DATE_FMT)",
"def process_entry(cls, entry, feed):\n entry_id = None\n content = None\n published = None\n updated = None\n link = entry.get('link', '')\n if hasattr(entry, 'feedburner_origlink'):\n link = entry.get('feedburner_origlink')\n #StringProperty doesn't allow linebreaks, so strip them out\n title = ' '.join(entry.get('title', '').splitlines())\n author = ' '.join(entry.get('author', '').splitlines())\n\n #if hasattr(entry, 'content'):\n # # This is Atom.\n # entry_id = entry.id\n # content = entry.content[0].value\n #else:\n # Per RSS spec, at least one of title or description must be present.\n content = (entry.get('description', '') or title)\n entry_id = (entry.get('id', '') or link or title)\n\n if hasattr(entry, 'published'):\n try:\n published = datetime.datetime(*entry.published_parsed[:6])\n except:\n logging.warn(\"Unable to parse published time\")\n if hasattr(entry, 'updated'):\n try:\n updated = datetime.datetime(*entry.updated_parsed[:6])\n except:\n logging.warn(\"Unable to parse updated time\")\n \n feeditem = None \n try:\n entry_key_name = 'z' + hashlib.sha1(link + '\\n' + entry_id + '\\n' + feed.stream_id).hexdigest()\n feeditem = cls(key_name=entry_key_name,\n stream=feed,\n id=entry_id,\n title=title,\n url=link,\n summary=content,\n author=author,\n published=published,\n updated=updated)\n except:\n logging.warn(\"Failed to process: %s %s\" % (link, entry_id))\n logging.error(sys.exc_info()[0])\n\n return feeditem",
"def get_rss(limit):\n rss_data = feedparser.parse(URL)\n if limit == 1:\n title = rss_data.entries[0].title\n link = rss_data.entries[0].link\n rss_print(title, link)\n else:\n for i in range(0, limit):\n title = rss_data.entries[i].title\n link = rss_data.entries[i].link\n\n print(Back.CYAN + str(i + 1) + \"\\t\")\n rss_print(title, link)",
"def process(url):\n response = requests.get(url)\n root = ET.fromstring(response.content)\n ret = []\n for entry in root.findall('channel/item'):\n dict = build_item_dict(entry)\n guid = dict['guid']\n title = translate_html(dict['title'])\n link = dict['link']\n description = translate_html(dict['description'])\n pubdate = translate_html(dict['pubDate'])\n\n try:\n pubdate = datetime.strptime(pubdate, \"%Y-%m-%dT%H:%M:%S%z\")\n except ValueError:\n pubdate = datetime.strptime(pubdate, \"%a, %d %b %Y %H:%M:%S %Z\")\n\n newsStory = NewsStory(guid, title, description, link, pubdate)\n ret.append(newsStory)\n return ret",
"def rssParse(url):\n req = urllib.request.Request(url)\n contex = ignoreCertificate()\n req.add_header(\"User-Agent\", \"Luchiana 3.0\")\n darticles = {}\n site = \"\"\n if re.search(\"feedburner\", url):\n site = url.split(\"/\")[-1]\n elif re.search(\"www\", url):\n site = url.split(\".\")[1]\n else:\n site = url.split(\".\")[0]\n site = site.split(\"//\")[1]\n try:\n resp = urllib.request.urlopen(req, context=contex)\n data = resp.read().decode('utf-8')\n root = ET.fromstring(data)\n if re.search(\"atom\", url):\n for item in root:\n if item.tag == \"{http://www.w3.org/2005/Atom}entry\":\n #title=description;date;link\n darticles[item.find(\"{http://www.w3.org/2005/Atom}title\").text+\\\n \"(\"+site+\")\"] = \\\n item.find(\"{http://www.w3.org/2005/Atom}content\").text+\";\"+\\\n item.find(\"{http://www.w3.org/2005/Atom}published\").text+\";\"+\\\n item.find(\"{http://www.w3.org/2005/Atom}link\").attrib['href']\n else:\n for item in root[0]:\n if item.tag == \"item\":\n #title=description;date;link\n darticles[item.find('title').text] = site+\";\"+\\\n item.find('description').text+\";\"+item.find('pubDate').text+\\\n \";\"+item.find('link').text\n except:\n darticles[\"Erreur de récupération (\"+site+\")\"] = \"Erreur de\"\\\n \" récupération (\"+site+\")\"\n return darticles",
"def zhihu_rss_fetcher(ctx):\n URL = 'http://www.zhihu.com/rss'\n coll = ctx.get_mongo_collection()\n\n for entry in fetch_rss(URL).entries:\n try:\n coll.insert({'_id': entry.link})\n except DuplicateKeyError:\n continue\n ctx.new_item(TextOnlyItem(entry.title, entry.description), ['zhihu'],\n parse_entry_time(entry),\n {'id': entry.link})\n log_info(u'zhihu: new entry: {} {}'.format(entry.link,\n entry.title))",
"def rss_feed(rss_url):\n try:\n # Use feedparser to analyze given RSS feed, if it is valid RSS.\n d = feedparser.parse(rss_url)\n except:\n return \"Sorry, invalid RSS feed. Please check and try again later.\"\n \n total = len(d['entries'])\n updates = dict()\n for index, item in enumerate(d['entries']):\n # Convert publish time from ctime format to iso-time format.\n a_time = time_convert(item.published)\n # Set article url ad dictionary key, with publish date as value. \n updates[str(item.link)] = a_time \n return (total, updates)",
"def info(self, irc, msg, args, url):\n try:\n url = self.registryValue('feeds.%s' % url)\n except registry.NonExistentRegistryEntry:\n pass\n feed = self.getFeed(url)\n conv = self._getConverter(feed)\n info = feed.get('feed')\n if not info:\n irc.error('I couldn\\'t retrieve that RSS feed.')\n return\n # check the 'modified_parsed' key, if it's there, convert it here first\n if 'modified' in info:\n seconds = time.mktime(info['modified_parsed'])\n now = time.mktime(time.gmtime())\n when = utils.timeElapsed(now - seconds) + ' ago'\n else:\n when = 'time unavailable'\n title = conv(info.get('title', 'unavailable'))\n desc = conv(info.get('description', 'unavailable'))\n link = conv(info.get('link', 'unavailable'))\n # The rest of the entries are all available in the channel key\n response = format('Title: %s; URL: %u; '\n 'Description: %s; Last updated: %s.',\n title, link, desc, when)\n irc.reply(utils.str.normalizeWhitespace(response))",
"def build_from_single_rss(self, rss_url):\n debug.header(\"Building cache from single RSS feed...\")\n\n runtime_start = dt.datetime.now()\n added_items_count = 0\n items_skipped_count = 0\n\n d = feedparser.parse(rss_url)\n\n for entry in d.entries:\n # Get title\n title = entry.title\n source = entry.author\n\n # Get link\n url = feeds.lex_url(entry.link)\n\n if feeds.is_forbidden(url) or hash(url) in self.url_hashes or hash(\n title) in self.title_hashes:\n debug.skip_item(title, source, url)\n items_skipped_count += 1\n continue\n\n # Get publication date\n pub_date = dt.datetime(*entry.published_parsed[:6])\n\n # Get content\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, \"\n \"like Gecko) Chrome/51.0.2704.103 Safari/537.36\"\n }\n try:\n r = requests.get(url, headers=headers, )\n except:\n debug.skip_item(title, source, url + \" (unable to connect)\")\n items_skipped_count += 1\n continue\n content = feeds.extract_content(r.content)\n content = content.encode(\"ascii\", \"ignore\")\n content = content.decode()\n\n if hash(content) in self.content_hashes:\n debug.skip_item(title, source, url)\n items_skipped_count += 1\n continue\n\n # Get keywords\n # keywords = preprocess(content)\n\n # Add entry to cache.\n item_dict = {\"title\": title,\n \"source\": source,\n \"link\": url,\n \"pub_date\": pub_date,\n \"content\": content\n }\n self.add_item(item_id=self.n_items, item_dict=item_dict)\n added_items_count += 1\n run_time = (dt.datetime.now() - runtime_start).seconds\n debug.footer(f\"Build complete in {run_time} seconds!\",\n f\"{added_items_count} items added — {items_skipped_count} items skipped\")",
"def from_rss(\n cls, entry: feedparser.FeedParserDict, feed: feedparser.FeedParserDict\n ) -> \"FeedEntry\":\n try:\n time = datetime.datetime(*entry.published_parsed[:6]).isoformat()\n except (AttributeError, TypeError):\n time = datetime.datetime.now().isoformat()\n return cls(\n title=entry.get(\"title\"),\n summary=entry.get(\"summary\"),\n link=sanitise_url(entry.get(\"link\")),\n time=time,\n feed=feed.feed.get(\"title\"),\n source=feed.get(\"href\"),\n )",
"def __init__(self, url=URL):\n self.entries = feedparser.parse(url).entries",
"def process_line(self, line):\n find_result = re.findall(LINE_REGEX, line)\n line_data = {r[0]: r[1] for r in find_result}\n self.process_url(line_data.get('request_to'))\n self.process_status_code(line_data.get('response_status'))",
"def process(url):\n feed = feedparser.parse(url)\n entries = feed.entries\n ret = []\n for entry in entries:\n guid = entry.guid\n title = translate_html(entry.title)\n link = entry.link\n summary = translate_html(entry.summary)\n try:\n subject = translate_html(entry.tags[0]['term'])\n except AttributeError:\n subject = \"\"\n newsStory = NewsStory(guid, title, subject, summary, link)\n ret.append(newsStory)\n return ret",
"def parse_shaarli_rss_export(rss_file):\n\n rss_file.seek(0)\n entries = rss_file.read().split('<entry>')[1:]\n for entry in entries:\n # example entry:\n # <entry>\n # <title>Aktuelle Trojaner-Welle: Emotet lauert in gefälschten Rechnungsmails | heise online</title>\n # <link href=\"https://www.heise.de/security/meldung/Aktuelle-Trojaner-Welle-Emotet-lauert-in-gefaelschten-Rechnungsmails-4291268.html\" />\n # <id>https://demo.shaarli.org/?cEV4vw</id>\n # <published>2019-01-30T06:06:01+00:00</published>\n # <updated>2019-01-30T06:06:01+00:00</updated>\n # <content type=\"html\" xml:lang=\"en\"><![CDATA[<div class=\"markdown\"><p>— <a href=\"https://demo.shaarli.org/?cEV4vw\">Permalink</a></p></div>]]></content>\n # </entry>\n\n trailing_removed = entry.split('</entry>', 1)[0]\n leading_removed = trailing_removed.strip()\n rows = leading_removed.split('\\n')\n\n def get_row(key):\n return [r.strip() for r in rows if r.strip().startswith('<{}'.format(key))][0]\n\n title = str_between(get_row('title'), '<title>', '</title>').strip()\n url = str_between(get_row('link'), '<link href=\"', '\" />')\n ts_str = str_between(get_row('published'), '<published>', '</published>')\n time = datetime.strptime(ts_str, \"%Y-%m-%dT%H:%M:%S%z\")\n\n yield {\n 'url': url,\n 'timestamp': str(time.timestamp()),\n 'title': title or None,\n 'tags': '',\n 'sources': [rss_file.name],\n }",
"def article_extractor(rss_feed_link):\n user_agent = {\"user-agent\": \"Mozilla/5.0 (Windows NT 6.2; Win64;\\\n x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1\"}\n try:\n feed = requests.get(rss_feed_link, headers=user_agent)\n except requests.exceptions.ConnectionError:\n print(\"No internet connection\")\n exit()\n\n dirty_content = BeautifulSoup(feed.text, \"xml\")\n return dirty_content",
"def workAFeed(feed):\n print(\"::working \",feed)\n\n # add http\n if feed.find(\"http\") == -1:\n feed = \"http://\" + feed\n print (\"::feed=\",feed)\n\n return feed",
"def __init__(self, url):\n\n self.url = url\n self.feed, self.keys, self.entries = self.parse_rss_feed()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Variable assignment can include assigning array elements. | def assign_variable(executor, variable, value):
variable = variable.replace(" ", "")
# TODO Should move parsing of this to ParsedStatementLet.
# TODO Need to handle N-dimensional array element assignment.
i = variable.find("(")
if i != -1:
# Array reference
j = variable.find(")", i+1)
if j == -1:
raise BasicSyntaxError(F"Missing ) in in array assignment to {variable}")
if i+1 == j:
raise BasicSyntaxError(F"Missing array subscript in assignment to {variable}")
subscripts = variable[i+1:j].split(",")
variable = variable[:i]
is_valid_identifier(variable)
subscripts = [int(eval_expression(executor._symbols, subscript)) - 1 for subscript in subscripts]
executor.put_symbol_element(variable, value, subscripts)
else:
is_valid_identifier(variable)
executor.put_symbol(variable, value, symbol_type=SymbolType.VARIABLE, arg=None) | [
"def test_assign_vars(self):\n\n\t\tself.latt_a.assign (2, 3, [1,1], \"abcd\")\n\t\tself.assertTrue(self.latt_a.meta[\"name\"] == \"abcd\")\n\t\tself.assertTrue(np.all(self.latt_a.meta[\"stoich\"] == 1))\n\t\tself.assertTrue(self.latt_a.meta[\"nspec\"] == 2)\n\t\tself.assertTrue(self.latt_a.meta[\"dim\"] == 3)",
"def assign(self, target: AstNode, value):\n if isinstance(target, IdentifierNode):\n self.evaluator.set_variable(target.value, value)\n elif isinstance(target, ArrayAccessNode):\n array = self.evaluator.eval_node(target.array)\n\n if not type(array) == list:\n self.log.error(translate(\"Algo\", \"Array access target must be of array type\"))\n self.finish()\n return\n\n index = self.evaluator.eval_node(target.index)\n\n while index >= len(array):\n array.append(0)\n\n if index < len(array):\n array[index] = value\n else:\n self.log.error(translate(\"Algo\", \"Index '{idx}' too big for array\").format(idx=index))\n return None\n else:\n self.log.error(translate(\"Algo\", \"Assignment target must be either variable or array item\"))\n self.finish()\n return",
"def test_46_assign_statement(self):\n\t\tinput = \"\"\"var x,y:integer;\n\t\tprocedure main(); var x:array[1 .. 3]of real; begin x[1]:=1;\n\t\twith y:integer;y:real; do begin end end\"\"\"\n\t\texpect = \"Redeclared Variable: y\"\n\t\tself.assertTrue(TestChecker.test(input,expect,446))",
"def test_49_assign_statement(self):\n\t\tinput = \"\"\"var x,y:integer;\n\t\tfunction f(): array[1 .. 2] of real;\n\t\tvar a: array[1 .. 3] of real;\n\t\tbegin a[2]:=1.1; return a; end\n\t\tprocedure main(); var x:array[1 .. 3]of real;\n\t\tbegin f()[1]:=x[1]:=1; end\"\"\"\n\t\texpect = \"Type Mismatch In Statement: Return(Some(Id(a)))\"\n\t\tself.assertTrue(TestChecker.test(input,expect,449))",
"def visit_Assign(self, node):\n if type(node.value).__name__ == \"Num\":\n self.var_type = 'scalar'\n else:\n x = TypeDeducer(self.type_deducer_state)\n x.visit(node.value)\n if x.type_deducer_state.new_variable_ref:\n raise Exception(\"Attempting to use undeclared variable in\"\n \" assignment: Line number: {} Column Offset: {}\".format(\n node.lineno, node.col_offset))\n self.var_type = x.var_type\n self.dims = x.dims\n self.type_deducer_state.add_to_target_list(node.targets[0], self.var_type,\n self.dims)\n node = ast.AnnAssign(lineno=node.lineno, col_offset=node.col_offset,\n target=node.targets, annotation=self.var_type,\n value=node.value, simple=1)\n self.type_deducer_state.assign_list.append(node)",
"def visit_VarAssignNode(self, node: VarAssignNode, symbol_table: SymbolTable) -> None:\n if isinstance(node.name, AccessNode) and isinstance(node.name.item_to_access, NumberNode):\n var = self.visit(node.name.accessor, symbol_table)\n var.vals[int(node.name.item_to_access.tok.value)] = self.visit(node.value, symbol_table)\n if isinstance(var, List):\n var.value = [item[idx].value for idx, item in enumerate(var.vals.values())]\n else:\n return f'Strings are immutable'\n else:\n assignment = self.visit(node.value, symbol_table)\n\n symbol_table[node.name] = assignment",
"def assign(array1, array2):\n for i in range(len(array1)):\n array2[i] = array1[i]",
"def test_varAugmentedAssignment(self):\r\n self.flakes('''\r\n foo = 0\r\n foo += 1\r\n ''')",
"def checkVarArray(self, script, node):\n\n if isinstance(node.value, ast.Call):\n if isinstance(node.value.func, ast.Name):\n if node.value.func.id == 'Var':\n if len(node.value.args) > 0:\n for target in node.targets:\n if isinstance(target, ast.Attribute):\n if isinstance(target.value, ast.Name):\n if target.value.id in script.modelVars:\n if target.value.id not in self.varArrays:\n self.varArrays[target.value.id] = []\n self.varArrays[target.value.id].append(target.attr)",
"def assign_variable(self, name, value):\n return self.set_variable(name, value)",
"def assign(self, *args):\n return _ida_hexrays.cloop_t_assign(self, *args)",
"def assign(ary, out):\n\n from . import _bh\n\n if not np.isscalar(ary):\n (ary, out) = broadcast_arrays(ary, out)[0]\n # We ignore self assignments\n if _bh.same_view(ary, out):\n return\n\n # Assigning empty arrays doesn't do anything\n if hasattr(ary, \"size\"):\n if ary.size == 0:\n return\n if hasattr(out, \"size\"):\n if out.size == 0:\n return\n\n # We use a tmp array if the in-/out-put has memory conflicts\n if overlap_conflict(out, ary):\n tmp = array_create.empty_like(out)\n assign(ary, tmp)\n return assign(tmp, out)\n\n if bhary.check(out):\n _bh.ufunc(UFUNCS[\"identity\"].info['id'], (out, ary))\n else:\n if bhary.check(ary):\n if \"BH_SYNC_WARN\" in os.environ:\n import warnings\n warnings.warn(\"BH_SYNC_WARN: Copying the array to NumPy\", RuntimeWarning, stacklevel=2)\n ary = ary.copy2numpy()\n out[...] = ary",
"def visit_AugAssign(self, node):\n target = node.target\n\n rhs_target = copy.deepcopy(target)\n rhs_target.ctx = ast.Load()\n ast.fix_missing_locations(rhs_target)\n\n bin_op = ast.BinOp(rhs_target, node.op, node.value)\n assignment = ast.Assign([target], bin_op)\n assignment.inplace_op = node.op\n return self.visit(assignment)",
"def set(self, variable_name, value):\r\n if isinstance(variable_name, basestring):\r\n self._set(variable_name, value) #Scalar case\r\n else:\r\n for i in xrange(len(variable_name)): #A list of variables\r\n self._set(variable_name[i], value[i])",
"def setUniformValueArray(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\n pass",
"def multiple_value_call_assignment_handler(target, value, assign_stmts, node, id_str):\n #print(\"multiple_value_call_assignment_handler\")\n target_stmts, value_var = stypy_functions.create_temp_Assign(value, node.lineno, node.col_offset,\n \"{0}_assignment\".format(id_str))\n assign_stmts.append(target_stmts)\n\n #value_var_to_load = copy.deepcopy(value_var)\n value_var_to_load = ast.Name()\n value_var_to_load.col_offset = value_var.col_offset\n value_var_to_load.lineno = value_var.lineno\n value_var_to_load.id = value_var.id\n value_var_to_load.ctx = ast.Load()\n\n for i in xrange(len(target.elts)):\n # Assign values to each element.\n # getitem_att = core_language.create_attribute(value_var_to_load, '__getitem__', context=ast.Load(),\n # line=node.lineno,\n # column=node.col_offset)\n # item_call = functions.create_call(getitem_att, [core_language.create_num(i, node.lineno, node.col_offset)])\n # temp_stmts, temp_value = stypy_functions.create_temp_Assign(item_call, node.lineno, node.col_offset,\n # \"{0}_assignment\".format(id_str))\n stypy_interface = core_language.create_Name('stypy_interface')\n get_tuple_call = core_language.create_attribute(stypy_interface, 'stypy_get_value_from_tuple', context=ast.Load(),\n line=node.lineno,\n column=node.col_offset)\n\n item_call = functions.create_call(get_tuple_call, [value_var_to_load,\n core_language.create_num(len(target.elts), node.lineno, node.col_offset),\n core_language.create_num(i, node.lineno, node.col_offset)])\n temp_stmts, temp_value = stypy_functions.create_temp_Assign(item_call, node.lineno, node.col_offset,\n \"{0}_assignment\".format(id_str))\n if hasattr(node, 'lineno'):\n temp_stmts.lineno = node.lineno\n temp_stmts.col_offset = node.col_offset\n\n assign_stmts.append(temp_stmts)\n\n temp_stmts = core_language.create_Assign(target.elts[i], temp_value)\n if hasattr(node, 'lineno'):\n temp_stmts.lineno = node.lineno\n temp_stmts.col_offset = node.col_offset\n\n assign_stmts.append(temp_stmts)\n\n return True",
"def visit_simple_assign(self, node):\n temp = gensym()\n temp_target = to_name(temp, ast.Store())\n stmts = [ ast.Assign([temp_target], node.value) ]\n stmts += [ ast.Assign([target], to_name(temp))\n for target in node.targets ]\n return stmts",
"def visit_compound_assign(self, node):\n # Determine number of values (arity) of compound assignment.\n nvalues = { len(target.elts) for target in node.targets \n if is_sequence_node(target) }\n if len(nvalues) > 1:\n # A multiple, compound assignment with different arities, e.g.,\n # `x,y = a,b,c = ...` is not a syntax error in Python, though it\n # probably should be because it's guaranteed to cause a runtime\n # error. Raise the error here, since we cannot proceed.\n raise SyntaxError(\"Multiple assignment with different arities\")\n nvalues = nvalues.pop()\n\n # Assign temporary variables.\n temps = [ gensym() for i in range(nvalues) ]\n stmts = []\n if is_sequence_node(node.value) and len(node.value.elts) == nvalues:\n # Special case: RHS is sequence literal of correct length.\n for i in range(nvalues):\n temp_target = to_name(temps[i], ast.Store())\n stmts.append(ast.Assign([temp_target], node.value.elts[i]))\n else:\n # General case.\n temp_target = to_tuple(\n (to_name(temp, ast.Store()) for temp in temps), ast.Store())\n stmts.append(ast.Assign([temp_target], node.value))\n\n # Rewrite assignments as sequence of assignments.\n for target in reversed(node.targets):\n if is_sequence_node(target):\n stmts.extend(ast.Assign([target.elts[i]], to_name(temps[i]))\n for i in range(nvalues))\n else:\n temp_tuple = to_tuple(to_name(temp) for temp in temps)\n stmts.append(ast.Assign([target], temp_tuple))\n \n return stmts",
"def test_assign_bad_stoich1(self):\n\n\t\ttry:\n\t\t\tself.latt_a.assign (1, 3, [1,1], \"abcd\")\n\t\texcept:\n\t\t\tself.failed = True\n\t\tself.assertTrue(self.failed)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
An if statement works by skipping to the next line, if the THEN clause is false, otherwise it continues to execute the clauses after the THEN. | def stmt_if(executor, stmt):
e = Expression()
result = e.eval(stmt._tokens, symbols=executor._symbols)
if not result:
executor.goto_next_line() | [
"def exec_else(self, stmt: ElseStmt):\n if self.if_status is None:\n self.log.error(translate(\"Algo\", \"ELSE can only be used after an IF block\"))\n self.finish()\n return\n\n if not self.if_status[1]:\n self.enter_block(stmt)\n\n self.if_status = None",
"def else_clause(self, ind):\n self._vmcode += \"goto IF_END\" + str(ind) + \\\n \"\\nlabel IF_FALSE\" + str(ind) + \"\\n\"",
"def test_conditional_instruction(self):\n LEXER.input('if x:')\n self.checks_tokens(['IF', 'ID', 'COL'])\n LEXER.input('else:')\n self.checks_tokens(['ELSE', 'COL'])",
"def new_style_if(list_of_lines):\n then = [token\n for token in name_tokens(list_of_lines.tokens_after)\n if token == 'then']\n return len(then) > 0",
"def end_ifeq(self):\n self.indent_left()\n self.write_line(\"endif\")",
"def test_30_if(self):\n\t\tinput = \"\"\"procedure main(); var x:integer; begin x:=foo(); end\n\t\tfunction foo():integer; var a:real; begin\n\t\twith a:integer;b:real; do begin if a>0 then a:=1; else return 0; end\n\t\tend\"\"\"\n\t\texpect = \"Function foo Not Return\"\n\t\tself.assertTrue(TestChecker.test(input,expect,430))",
"def conditional(self) -> global___Statement.Conditional:",
"def exec_if(self, stmt: IfStmt):\n self.enter_block(stmt)\n\n condition = bool(self.evaluator.eval_node(stmt.condition))\n self.if_status = (len(self.stack) - 1, condition)\n\n if not condition:\n self.exit_block()",
"def test_29_if(self):\n\t\tinput = \"\"\"procedure main(); var x:integer; begin x:=foo(); end\n\t\tfunction foo():integer; var a:real; begin\n\t\twith a:integer;b:real; do begin if a>0 then return 1; else b:=0; end\n\t\tend\"\"\"\n\t\texpect = \"Function foo Not Return\"\n\t\tself.assertTrue(TestChecker.test(input,expect,429))",
"def test_if_failed_condition_goes_to_otherwise():\n test = [\n 'not_target_value',\n [\n {\n 'condition': 'is',\n 'target': 'target_value',\n 'then': 'no',\n 'otherwise': 'yes',\n },\n ],\n ]\n assert apply_if_statements(*test) == Success('yes')",
"def _screen_if_then(variable, value, then):\n return '''\n if [ -z ${variable} ];\n then if [ \"\\${variable}\" -eq \"{value}\" ] ;\n then {then};\n fi;\n else if [ \"${variable}\" -eq \"{value}\" ] ;\n then {then};\n fi;\n fi;\n '''.format(\n variable=variable,\n value=value,\n then=then,\n )",
"def test_if_else_str(dummy_code_block):\n else_if = make_dummy_if_else(call_if=False)\n assert (\n str(else_if)\n == dedent(\n \"\"\"\n if return_false():\n return_true()\n return_true()\n return_true()\n else:\n return_true()\n return_true()\n return_true()\n \"\"\"\n ).strip()\n )",
"def with_if_statement():\n if cond():\n return true_func()\n else:\n return false_func()",
"def test_about_else_clause(self):\n result=None\n try:\n pass\n except RuntimeError:\n result='exception clause'\n pass\n else:\n result='else clause'\n\n assert \"else clause\" == result",
"def identify_ifelse_block():\n pass",
"def else_if_statement(outfile: TextIO, condition: str, indent: int=0):\n write_indent(outfile, indent)\n outfile.write(\"}\\n\")\n write_indent(outfile, indent)\n outfile.write(\"else \")\n if_statement(outfile, condition, indent)",
"def eliminate_ifones(body):\n def isifone(tree):\n if type(tree) is If:\n if type(tree.test) is Num: # TODO: Python 3.8+: ast.Constant, no ast.Num\n if tree.test.n == 1:\n return \"then\"\n elif tree.test.n == 0:\n return \"else\"\n elif type(tree.test) is NameConstant: # TODO: Python 3.8+: ast.Constant, no ast.NameConstant\n if tree.test.value is True:\n return \"then\"\n elif tree.test.value in (False, None):\n return \"else\"\n return False\n\n def optimize(tree): # stmt -> list of stmts\n t = isifone(tree)\n if t:\n branch = tree.body if t == \"then\" else tree.orelse\n return branch\n return [tree]\n\n return transform_statements(optimize, body)",
"def check_while(self, ind):\n self._vmcode += \"not\\nif-goto WHILE_END\" + str(ind) + \"\\n\"",
"def test_nested_if_str(dummy_code_block, another_dummy_code_block):\n else_ = Else(\n code_block=CodeBlock(\n instructions=[If(method=return_true, code_block=another_dummy_code_block,)]\n )\n )\n elif_ = IfElifElse(method=return_false, code_block=dummy_code_block, else_=else_,)\n assert (\n str(elif_)\n == dedent(\n \"\"\"\n if return_false():\n return_true()\n return_true()\n return_true()\n else:\n if return_true():\n return_true()\n return_true()\n return_true()\n \"\"\"\n ).strip()\n )"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Calculate tips over past X amount of time and write JSON output | def aggregate_tips():
# The SQL query to perform
now = time.time()
print("Computing tip stats...", end="", flush=True)
labels = ["30_days", "7_days", "24_hours", "1_hour"]
windows = [30*86400.0, 7*86400.0, 1*86400.0, 3600.0]
result = {}
result["unix_time"] = now
result["human_time_utc"] = str(datetime.datetime.utcfromtimestamp(int(now))) + " UTC"
# Agrees with old method, but should it be SUM(amount)?
query = "SELECT support_id, amount, time, claim_name, claim_id, is_nsfw, SUM(to_claim_address) tot FROM (SELECT support.id as support_id, support.support_amount amount,\
transaction.transaction_time time, claim.is_nsfw is_nsfw,\
claim.claim_id claim_id, claim.name claim_name,\
(CASE WHEN (output.address_list LIKE CONCAT('%25', claim_address, '%25')) THEN '1' ELSE '0' END) to_claim_address\
FROM claim\
INNER JOIN support ON support.supported_claim_id = claim.claim_id\
INNER JOIN transaction ON support.transaction_hash_id = transaction.hash\
INNER JOIN output ON transaction.hash = output.transaction_hash \
WHERE transaction.transaction_time > ({now} - {window})\
AND transaction.transaction_time <= {now}) AS result\
GROUP BY support_id, amount;".format(now=now, window=windows[0])
request = requests.get("https://chainquery.lbry.com/api/sql?query=" + query)
the_dict = request.json()
# Get tips into numpy array
times = []
tips = []
is_tip = []
links = []
is_nsfw = []
for row in the_dict["data"]:
times.append(float(row["time"]))
tips.append(float(row["amount"]))
links.append("https://open.lbry.com/" + str(row["claim_name"]) + ":"\
+ str(row["claim_id"]))
is_nsfw.append(row["is_nsfw"])
if row["tot"] > 0:
is_tip.append(True)
else:
is_tip.append(False)
times = np.array(times)
tips = np.array(tips)
is_tip = np.array(is_tip)
links = np.array(links)
is_nsfw = np.array(is_nsfw)
# Write tips
for i in range(len(labels)):
keep = (times > (now - windows[i])) & is_tip
_times = times[keep]
_tips = tips[keep]
_links = links[keep]
_is_nsfw = is_nsfw[keep]
result["num_tips_{label}".format(label=labels[i])] = len(_tips)
result["lbc_tipped_{label}".format(label=labels[i])] = float(_tips.sum())
maxtip = 0
maxtip_link = None
maxtip_is_nsfw = None
if len(_tips) > 0:
maxtip = float(_tips.max())
index = np.argmax(_tips)
maxtip_link = _links[index]
maxtip_is_nsfw = _is_nsfw[index]
result["biggest_tip_{label}".format(label=labels[i])] = maxtip
result["biggest_tip_{label}_link".format(label=labels[i])] = maxtip_link
result["biggest_tip_{label}_is_nsfw".format(label=labels[i])] = bool(maxtip_is_nsfw)
# Write supports
for i in range(len(labels)):
keep = (times > (now - windows[i])) & (~is_tip)
_times = times[keep]
_tips = tips[keep]
_links = links[keep]
_is_nsfw = is_nsfw[keep]
result["num_supports_{label}".format(label=labels[i])] = len(_tips)
result["lbc_supports_{label}".format(label=labels[i])] = float(_tips.sum())
maxtip = 0
maxtip_link = None
maxtip_is_nsfw = None
if len(_tips) > 0:
maxtip = float(_tips.max())
index = np.argmax(_tips)
maxtip_link = _links[index]
maxtip_is_nsfw = _is_nsfw[index]
result["biggest_support_{label}".format(label=labels[i])] = maxtip
result["biggest_support_{label}_link".format(label=labels[i])] = maxtip_link
result["biggest_support_{label}_is_nsfw".format(label=labels[i])] = bool(maxtip_is_nsfw)
f = open("tips_stats.json", "w")
f.write(json.dumps(result))
f.close()
print("done. ", flush=True, end="") | [
"def aggregate_tips():\n\n # The SQL query to perform\n now = time.time()\n print(\"Computing tip stats...\", end=\"\", flush=True)\n labels = [\"all_time\", \"30_days\", \"7_days\", \"24_hours\", \"1_hour\"]\n windows = [None, 30*86400.0, 7*86400.0, 1*86400.0, 3600.0]\n result = {}\n result[\"unix_time\"] = now\n result[\"human_time_utc\"] =\\\n str(datetime.datetime.utcfromtimestamp(int(now))) + \" UTC\"\n block = daemon_command(\"status\")[\"wallet\"][\"blocks\"]\n\n # Open the DB\n db_file = \"/home/brewer/local/lbry-sdk/lbry/lbryum_data/claims.db\"\n conn = sqlite3.connect(db_file)\n c = conn.cursor()\n conn.create_function(\"log\", 1, math.log)\n conn.create_function(\"exp\", 1, math.exp)\n\n for i in range(len(labels)):\n\n # Count and aggregate tips and supports for the time window\n query = \\\n \"\"\"\n SELECT\n COUNT(amount) num,\n exp(AVG(log(amount))) size,\n MAX(amount) max\n FROM\n support\n \"\"\"\n\n if i > 0:\n query += \\\n \"\"\" WHERE\n height >= {cutoff};\n \"\"\".format(cutoff=block - windows[i]/(2.5*60))\n\n for row in c.execute(query):\n biggest = row[2]\n result[\"num_{label}\".format(label=labels[i])] = row[0]\n result[\"typical_{label}\".format(label=labels[i])] = row[1]/1.0E8\n result[\"biggest_{label}\".format(label=labels[i])] = row[2]/1.0E8\n break\n\n # Get claim name and ID for max\n query = \\\n \"\"\"\n SELECT\n claim_name, claim_id\n FROM\n claim INNER JOIN support ON claim.claim_hash = support.claim_hash\n WHERE\n support.amount = {amount}\n \"\"\".format(amount=biggest)\n\n if i > 0:\n query += \\\n \"\"\" AND\n support.height >= {cutoff};\n \"\"\".format(cutoff=block - windows[i]/(2.5*60))\n\n for row in c.execute(query):\n claim_name, claim_id = row[0:2]\n\n result[\"tv_url_{label}\".format(label=labels[i])] = \"https://lbry.tv/\" \\\n + claim_name + \":\" + claim_id\n\n # Get claim name and ID for max\n query = \\\n \"\"\"\n SELECT\n COUNT(claim_id)\n FROM\n claim INNER JOIN tag ON claim.claim_hash = tag.claim_hash\n INNER JOIN support ON support.claim_hash = claim.claim_hash\n WHERE ((tag.tag = \"mature\" OR tag.tag = \"nsfw\" OR\n tag.tag = \"porn\" OR tag.tag = \"xxx\")\n AND support.amount = {amount})\n \"\"\".format(amount=biggest)\n\n if i > 0:\n query += \\\n \"\"\" AND\n support.height >= {cutoff};\n \"\"\".format(cutoff=block - windows[i]/(2.5*60))\n\n for row in c.execute(query):\n result[\"is_nsfw_{label}\".format(label=labels[i])] = row[0] != 0\n break\n\n f = open(\"supports_and_tips.json\", \"w\")\n f.write(json.dumps(result, indent=2))\n f.close()\n conn.close()\n print(\"done. \", flush=True, end=\"\")",
"def calculate_tip(meal_base, tip_rate):",
"def time_taken(json_cutlist, laser):\r\n\tcutlist = json.loads(json_cutlist)\r\n\ttime = 0\r\n\tcoordinate_array = [0, 0]\r\n\tfor a in cutlist:\r\n\t\tif a[0] == \"jump\" or a[0] == \"mark\":\r\n\t\t\tcoordinate_array = [float(a[1]) - coordinate_array[0], float(a[2]) - coordinate_array[1]]\r\n\t\t\tmag = math.sqrt(coordinate_array[0]**2 + coordinate_array[1]**2)\r\n\t\t\tif a[0] == \"jump\":\r\n\t\t\t\ttime += mag/laser[\"jump_speed\"]\r\n\t\t\telse:\r\n\t\t\t\ttime += mag/laser[\"mark_speed\"]\r\n\t\t\tcoordinate_array = [float(a[1]), float(a[2])]\r\n\t\telif a[0] == \"z_abs\" or a[0] == \"z_rel\":\r\n\t\t\tzSet = float(a[1])\r\n\t\telif a[0] == \"c_abs\" or a[0] == \"c_rel\":\r\n\t\t\tcSet = float(a[1])\r\n\t\telif a[0] == \"a_abs\" or a[0] == \"a_rel\":\r\n\t\t\taSet = float(a[1])\r\n\t\telse:\r\n\t\t\tpass\r\n\treturn str(datetime.timedelta(seconds=int(time)))",
"def scoreboard_json_ctftime(_):\n\n standings = []\n scores = calculations.scores(['team', 'team__user'], ['team__user__username'])\n\n for rank, (team, team_points) in enumerate(scores.items(), start=1):\n task_stats = defaultdict(lambda: {'points': 0.0})\n for point_type in ('offense', 'defense', 'sla'):\n for service, points in team_points[point_type][0].items():\n task_stats[service.name]['points'] += points\n\n for service_name in task_stats:\n task_stats[service_name] = {'points': round(task_stats[service_name]['points'], 4)}\n\n standings.append({\n 'pos': rank,\n 'team': team.user.username,\n 'score': round(team_points['total'], 4),\n 'taskStats': task_stats\n })\n\n return JsonResponse({'tasks': list(task_stats.keys()), 'standings': standings})",
"def data_outages():\n conn = connect_db()\n cursor = conn.cursor()\n start_date = request.args.get('startDate')\n end_date = request.args.get('endDate')\n start_date_sql = sqlite_date_parse(start_date)\n end_date_sql = sqlite_date_parse(end_date)\n icao = request.args.get('icao')\n cursor.execute(\"SELECT fname FROM hourly WHERE day BETWEEN '%s' AND '%s' AND icao = '%s'\" % (start_date_sql, end_date_sql, icao))\n hourly = cursor.fetchall()\n hourly_times = [str(time[0][6:18]) for time in hourly]\n hourly_outages = []\n for dt in generate_expected_dates(start_date, end_date, True):\n timestamp = dt.strftime('%Y%m%d%H%M')\n data_present_boolean = 0 if timestamp not in hourly_times else 1\n hourly_outages.append({'x': timestamp[:8] + 'T' + timestamp[8:12], 'y': data_present_boolean})\n cursor.execute(\"SELECT fname FROM sixmin WHERE day BETWEEN '%s' AND '%s' AND icao = '%s'\" % (start_date_sql, end_date_sql, icao))\n six_minute = cursor.fetchall()\n six_minute_times = [str(time[0][6:18]) for time in six_minute]\n six_minute_outages = []\n for dt in generate_expected_dates(start_date, end_date, False):\n timestamp = dt.strftime('%Y%m%d%H%M')\n data_present_boolean = 0 if timestamp not in six_minute_times else 1\n six_minute_outages.append({'x': timestamp[:8] + 'T' + timestamp[8:12], 'y': data_present_boolean})\n conn.close()\n return json.dumps({'hourly': hourly_outages, 'sixmin': six_minute_outages})",
"def timed_recipes():\n time = request.args.get('time', 0, type=int) #raw input from HTML page\n global time_global\n time_global = time #sets global time to inputted time, for use in search function\n return jsonify(cooktime=time_global) #returns a confirmation of the input tiime",
"def tip_summary(game, tips):\n summary = {}\n\n winners = []\n summary['margin'] = []\n for club in (game.afl_home, game.afl_away):\n winners.append((club, len([t for t in tips if t.winner == club])))\n\n margins = [t.margin\n for t in tips\n if t.winner == club and t.margin is not None]\n\n # There will be no margins if nobody has tipped club\n if margins:\n min_max = {\n 'max': max(margins),\n 'min': min(margins),\n 'avg': mean(margins)\n }\n else:\n min_max = {'max': 0, 'min': 0, 'avg': 0}\n\n summary['margin'].append((club, min_max))\n\n summary['winner'] = winners\n\n # Crowds\n crowds = [t.crowd for t in tips if t.crowd is not None]\n if crowds:\n min_max = {\n 'max': max(crowds), 'min': min(crowds), 'avg': mean(crowds)}\n else:\n min_max = {'max': 0, 'min': 0, 'avg': 0}\n summary['crowd'] = min_max\n\n # Supercoach\n player_count = []\n players = [s.player for t in tips for s in t.supercoach_tips.all()]\n\n for player in set(players):\n if player is not None:\n player_count.append((player, players.count(player)))\n player_count = sorted(player_count, key=lambda x: x[1], reverse=True)\n\n grouped_player_count = []\n for key, group in groupby(player_count, lambda x: x[1]):\n group = sorted(list(group), key=lambda x: x[0].last_name)\n grouped_player_count.append((key, [g[0] for g in group]))\n\n summary['supercoach'] = grouped_player_count\n\n return summary",
"def send_get_tips(self, timestamp: Optional[int] = None, include_hashes: bool = False, offset: int = 0) -> None:\n if timestamp is None:\n self.send_message(ProtocolMessages.GET_TIPS)\n else:\n payload = json_dumps(dict(\n timestamp=timestamp,\n include_hashes=include_hashes,\n offset=offset,\n ))\n self.send_message(ProtocolMessages.GET_TIPS, payload)",
"def handle_get_tips(self, payload: str) -> None:\n if not payload:\n self.send_tips()\n else:\n data = json_loads(payload)\n args = GetTipsPayload(**data)\n self.send_tips(args.timestamp, args.include_hashes, args.offset)",
"def moderator_points():\n moderators = {}\n collection = constants.DB.moderators\n\n community_managers = [\n moderator[\"account\"] for moderator in\n collection.find({\"supermoderator\": True})]\n\n utopian_fest = constants.UTOPIAN_FEST.col_values(1)\n\n for moderator in set(community_managers + utopian_fest):\n moderators.setdefault(moderator, 0)\n if moderator in community_managers:\n moderators[moderator] += 100.0\n\n # Check for BOSSPOEM or TECHSLUT\n if moderator == \"espoem\" or moderator == \"techslut\":\n moderators[moderator] = 400.0\n\n # Utopian Fest bonus\n if moderator in utopian_fest:\n moderators[moderator] += 50.0\n\n # Save dictionary as JSON with date of last Thursday\n with open(\n f\"/home/amos/utopian/utopian/static/{constants.THIS_WEEK}.json\",\n \"w\") as fp:\n json.dump(moderators, fp, indent=4)",
"def post_data():\n # Note: updating global variables endangers threadsafety, improve this \n # later (see below).\n global logindata, hours, usage, hourly_usage_stats \n\n # split the concatenated timestamp string into a list of strings:\n new_timestamps = (request.data).split(',')\n\n # bin correctly-formatted timestamps into correpsonding hour-long time windows:\n (hours_new, usage_new), skipped_timestamps = dataprep.bin_timestamp(new_timestamps, fmt, seconds_per_hour)\n\n # filter out misformatted timestamp strings & incorporate correct raw data:\n new_timestamps = [ts for i, ts in enumerate(new_timestamps) if i not in skipped_timestamps]\n logindata.extend(new_timestamps)\n\n # append to the global hours, usage counts:\n for h,u in zip(hours_new, usage_new):\n match_idx = find(hours, h)\n if match_idx:\n usage[match_idx[0]] += u\n else:\n hours.append(h)\n usage.append(u)\n\n # update any stats accordingly:\n # hours_et = dataprep.convert_timezone_eastern(hours)\n weekhour_agg = predict.weekday_hour_grouping(hours, usage)\n hourly_usage_stats = predict.average_all_hours(weekhour_agg, 'exp-downweight')\n\n # return results:\n result = '%i of %i timestamp(s) inserted. \\n(%i timestamp(s) skipped due to misformatting.)' \\\n % (len(new_timestamps)-len(skipped_timestamps), len(new_timestamps), len(skipped_timestamps)) \n\n return result",
"def getDailyPnl():\n\n pnlEuro=0.0 #PNL en Euro\n nbTrades=0 #Nombre de trades\n pnlPoints = 0.0 #PNL en points\n size=0.0\n\n daydate = time.strftime('%d-%m-%Y',time.localtime()) # recup de la date du jour\n #url = 'https://'+ urls.ig_host +'/gateway/deal/history/transactions/ALL/'+daydate+'/'+daydate #Formatage de l'url avec la date du jour\n r = requests.get(urls.transactionhistoryurl %(daydate, daydate), headers=urls.fullheaders, proxies=personal.proxies)\n s = json.loads(r.content).get(u'transactions')\n\n\n for gain in s:\n if gain.get(u'transactionType')=='ORDRE': #On ne calcule que si le type de la transaction est \"ordre\"\n\n # Calcul du PNL Journalier en Euro\n b = gain.get(u'profitAndLoss') # on recupere le pnl de la transaction\n b = b[1:] # on supprime le 'E'\n b = b.replace(',','') # on supprime la ',' ex 2,500.50 -> 2500.50\n pnlEuro += float(b) # on additionne toutes les transactions\n\n # Calcul du nombre de point\n openLevel = gain.get(u'openLevel') #recupere opellevel\n closeLevel = gain.get(u'closeLevel') #recupere closelevel\n\n directionLevel = gain.get(u'size') #recupere la taille pour avoir le sens (+ ou -)\n size = directionLevel[1:]\n directionLevel = directionLevel[:1] # split pour recuperer '+' ou '-'\n\n\n if directionLevel == '+' : #si + la difference est close - open\n diffLevel = float(closeLevel) - float(openLevel)\n diffLevel = diffLevel * float(size)\n diffLevel = round(diffLevel,1) # on arrondi pour ne pas avoir 0,2999999 point\n\n if directionLevel =='-': #si - la difference est open-close\n diffLevel = float(openLevel) - float(closeLevel)\n diffLevel = diffLevel * float(size)\n diffLevel = round(diffLevel,1) # on arrondi pour ne pas avoir 0,2999999 point\n\n\n #print str(diffLevel) +'('+directionLevel+''+size+')' + ' p: '+str(diffLevel)\n\n #on additionne les points (+ et-)\n pnlPoints += diffLevel\n\n # Incrementation du nombre de trades\n nbTrades+=1\n\n #renvoi des 3 variables (pnlEuro, pnlPoints, nbTrades)\n return pnlEuro,pnlPoints, nbTrades",
"def tip_calulator(total, people, tip):\n tip = tip / 100\n total = total / people\n tip_amount = total * tip\n new_total = total + tip_amount\n\n return tip_amount, new_total\n # pass",
"def range():\n\n # Time this functions.\n timer = coils.Timer()\n\n # Parse the URL parameter \"amount\".\n errors = list()\n try:\n amount = flask.request.args.get('amount')\n amount = float(amount)\n except:\n errors.append('Failed to parse \"amount\" parameter.')\n\n # Bail on any errors.\n if errors:\n return flask.jsonify(errors=errors)\n\n\n latest_tstring = db.session.query(mapping.Datum).\\\n filter(mapping.Datum.name=='latest_tstamp')[0].value\n latest_time = coils.string2time(latest_tstring)\n start_time = latest_time - dt.timedelta(seconds=amount)\n start_tstring = getNearestTime(start_time)\n \n return flask.jsonify(\n begin_time=start_tstring,\n end_time=latest_tstring,\n )",
"def do_metrics():\n do_delay_metric()\n do_rtt_metric()",
"def test_generate_speed_json(self):\n # Record how long it takes for the generation to take place\n start_time = datetime.now()\n self.generate_large_cassette_json()\n stop_time = datetime.now()\n\n # Verify the file generates in under 2 seconds\n two_seconds = timedelta(seconds=2)\n self.assertLess(stop_time - start_time, two_seconds)",
"def add_popular_times(places):\n \n print(\"Adding popular times and time spend to polyline_coor_POI list\")\n for i in range(len(places['results'])): \n pop_times_res = pop_times(places['results'][i]['place_id'])\n pop_times_fields = dict()\n d_items = list(['rating', 'rating_n', 'time_spent'])\n for j in d_items:\n pop_times_fields[j] = pop_times_res.get(j, -1)\n \n if pop_times_fields['time_spent'] != -1:\n pop_times_fields['time_spent'][0] = pop_times_fields['time_spent'][0] * 60\n pop_times_fields['time_spent'][1] = pop_times_fields['time_spent'][1] * 60\n \n places['results'][i].update(pop_times_fields)\n \n print(\"Popular times were added to the places\")\n \n return places",
"def pickup_rush_hours():\n query = ''' WITH t1 AS (\n SELECT STRFTIME('%H:00', tpep_pickup_datetime) AS pickup_time\n FROM trips)\n SELECT pickup_time, COUNT(pickup_time) AS pickup_counts\n FROM t1\n GROUP BY pickup_time\n ORDER BY pickup_time; \n '''\n result = execute_query(query) \n return jsonify([{'time': a[0], 'pickups': a[1]} for a in result])",
"def write_stacked_response_times(self):\r\n results_dirname = get_param(\"results_dir\")\r\n filename = os.path.join(results_dirname, \"%s_%s\" % (get_param(\"file_prefix\"),\r\n \"stacked_fairness\"))\r\n file = open(filename, \"w\")\r\n file.write(\"time\\trunning_tasks\\n\")\r\n previous_time = -1\r\n # Write in reverse order so that we automatically get the last event\r\n # for each time.\r\n for time, running_tasks in reversed(self.new_running_tasks):\r\n if time != previous_time:\r\n if previous_time != -1:\r\n file.write(\"%d\\t\" % time)\r\n for user in range(get_param(\"num_users\")):\r\n file.write(\"%d\\t\" % running_tasks[user])\r\n file.write(\"\\n\")\r\n previous_time = time"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Publish files to somewhere on the internet. | def publish_files():
print("Publishing files to the internet...", end="", flush=True)
import subprocess
try:
subprocess.run("./upload.sh", timeout=120.0)
print("done.\n")
except:
print("failed.\n") | [
"def publish(self, filename):\n # 1) Encrypt file\n # 2) Publish to remote cloud server\n # 3) Wait for the result\n # 4) Store results in files located inside RAM folder",
"def publish():\n if sys.argv[-1] == 'publish':\n os.system('python setup.py sdist upload')\n sys.exit()",
"def publish():\n fab.local(\"env/bin/python setup.py sdist\")\n tar_filename = fab.local(\n \"env/bin/python setup.py --fullname\", capture=True\n )\n dist_filename = \"dist/{}.tar.gz\".format(tar_filename)\n fab.put(dist_filename, PYREPO_DIR)",
"def publish(c):\n c.run('pelican -s publishconf.py')\n c.run(\n 'rsync --delete --exclude \".DS_Store\" -pthrvz -c '\n '{} {production}:{dest_path}'.format(\n CONFIG['deploy_path'].rstrip('/') + '/',\n **CONFIG))",
"def publish(self, path):\n raise NotImplementedError",
"def publish(self, path):\n self.logger.info(\"Publishing %s\", path)\n try:\n self.set_workspace()\n workspace_path = getcwd()\n if workspace_path != commonpath([workspace_path, abspath(path)]):\n self.logger.error(\"Attempt to publish a non-local file %s\", path)\n raise ContextError(\n f\"Only local workspace files can be published! PATH={path}\"\n )\n if not isfile(path):\n self.logger.error(\"Attempt to publish a non-file path %s\", path)\n raise ContextError(f\"Only files can be published! PATH={path}\")\n # publish the file\n target_path = join(self._path_perm, relpath(path))\n targer_url = urljoin(self._url_base, relpath(path))\n if not isdir(self._path_perm):\n raise MissingContextError(\n f\"Permanent directory does not exist! PATH={self._path_perm}\"\n )\n if not exists(dirname(target_path)):\n makedirs(dirname(target_path))\n move(path, target_path)\n except Exception as error:\n self.logger.warning(\"Failed to publish %s! %s\", path, error)\n raise\n self.logger.debug(\"moved %s -> %s\", path, target_path)\n return target_path, targer_url",
"def publish():\n reset()\n compress()\n build()\n s3deploy()\n log_success()",
"def publish(session):\n print(\"REMINDER: Has the changelog been updated?\")\n session.run(\"rm\", \"-rf\", \"dist\", \"build\", external=True)\n publish_deps = [\"setuptools\", \"wheel\", \"twine\"]\n session.install(*publish_deps)\n session.run(\"make\", \"build_frontend\", external=True)\n session.run(\"python\", \"setup.py\", \"--quiet\", \"sdist\", \"bdist_wheel\")\n session.run(\"python\", \"-m\", \"twine\", \"upload\", \"dist/*\")\n publish_docs(session)\n publish_static_webapp(session)",
"def publish_file(directory, file_name, file_type):\n\n folder = create_folder(os.path.join(output_folder,file_type))\n \n logger.info('Copying ' + file_name + ' to ' + folder)\n shutil.copyfile(os.path.join(directory,file_name), os.path.join(folder,file_name))",
"def publish_packages(c):\n c.run(\n \"poetry publish --repository shipstation \"\n f\"--password {os.environ.get('GITHUB_TOKEN')}\"\n )",
"def pub(package):\n scp(os.path.join(common.BUILD_DIR, package.name) + \"*.deb\", common.REPOSITORY)\n scp(os.path.join(common.BUILD_DIR, package.name) + \"*.changes\", common.REPOSITORY)",
"def publish(paths, targets=None):\n if not any(paths):\n raise RuntimeError(\"No publish paths specified\")\n\n from openpype import install, uninstall\n from openpype.api import Logger\n\n # Register target and host\n import pyblish.api\n import pyblish.util\n\n env = get_app_environments_for_context(\n os.environ[\"AVALON_PROJECT\"],\n os.environ[\"AVALON_ASSET\"],\n os.environ[\"AVALON_TASK\"],\n os.environ[\"AVALON_APP_NAME\"]\n )\n os.environ.update(env)\n\n log = Logger.get_logger()\n\n install()\n\n pyblish.api.register_target(\"filesequence\")\n pyblish.api.register_host(\"shell\")\n\n if targets:\n for target in targets:\n pyblish.api.register_target(target)\n\n os.environ[\"OPENPYPE_PUBLISH_DATA\"] = os.pathsep.join(paths)\n\n log.info(\"Running publish ...\")\n\n # Error exit as soon as any error occurs.\n error_format = \"Failed {plugin.__name__}: {error} -- {error.traceback}\"\n\n for result in pyblish.util.publish_iter():\n if result[\"error\"]:\n log.error(error_format.format(**result))\n uninstall()\n sys.exit(1)\n\n log.info(\"Publish finished.\")\n uninstall()",
"def publish_images():\n copy_tree(src_image_dir, dst_image_dir)",
"def pubone(file_name,alg,host):\n\n hash_alg=alg\n scheme=\"ni\"\n rform=\"json\"\n ext=\"{ \\\"meta\\\": { \\\"pubdirs\\\" : \\\"yep\\\" } }\"\n\n # record start time of this\n stime=time.time()\n\n # Create NIdigester for use with form encoder and StreamingHTTP\n ni_digester = NIdigester()\n # Install the template URL built from the scheme, the authority and the digest algorithm\n rv = ni_digester.set_url((scheme, host, \"/%s\" % hash_alg))\n if rv != ni_errs.niSUCCESS:\n nilog(\"Cannot construct valid ni URL: %s\" % ni_errs_txt[rv])\n return\n debug(ni_digester.get_url())\n # Open the file if possible\n try:\n f = open(file_name, \"rb\")\n except Exception, e :\n debug(\"Cannot open file %s: Error: %s\" %(file_name, str(e)))\n return\n # Guess the mimetype of the file\n m = magic.Magic(mime=True)\n ctype = m.from_file(file_name)\n debug(\"Content-Type: %s\" % ctype)\n if ctype is None:\n # Guessing didn't work - default\n ctype = \"application/octet-stream\"\n # Set up HTTP form data for publish request\n # Make parameter for file with digester\n octet_param = MultipartParam(\"octets\",\n fileobj=f,\n filetype=ctype,\n filename=file_name,\n digester = ni_digester)\n # Make dictionary that will dynamically retrieve ni URI when it has been made\n uri_dict = { \"generator\": octet_param.get_url,\n \"length\": (len(ni_digester.get_url()) + len(\";\") +\n ni_digester.get_b64_encoded_length())}\n msgid=str(random.randint(1, 2**64)) \n param_list = [octet_param,\n (\"URI\", uri_dict),\n (\"msgid\", msgid),\n (\"ext\", ext),\n (\"fullPut\", \"yes\"),\n (\"rform\", rform)]\n # Construct data generator and header strings\n datagen, headers = multipart_encode(param_list)\n if verbose:\n debug(\"Parameters prepared: %s\"% \"\".join(datagen))\n\n # Set up streaming HTTP mechanism - register handlers with urllib2\n # get out for now, don't do it\n opener = streaminghttp.register_openers()\n # Where to send the publish request.\n http_url = \"http://%s/netinfproto/publish\" % host\n # debug(\"Accessing: %s\" % http_url)\n # Send POST request to destination server\n fsize=os.path.getsize(file_name)\n nilog(\"%s,PUBLISH tx,file,%s,size,%d,to,%s\" % (msgid,file_name,fsize,host))\n try:\n req = urllib2.Request(http_url, datagen, headers)\n except Exception, e:\n nilog(\"%s,PUBLISH tx error\" % msgid);\n if verbose:\n nilog(\"Error: Unable to create request for http URL %s: %s\" %\n (http_url, str(e)))\n f.close()\n return\n # Get HTTP results\n try:\n http_object = urllib2.urlopen(req)\n except Exception, e:\n nilog(\"%s,PUBLISH rx error\" % msgid);\n if verbose:\n nilog(\"Error: Unable to access http URL %s: %s\" % (http_url, str(e)))\n f.close()\n return\n f.close()\n if verbose:\n nilog(\"Digester result: %s\" % octet_param.get_url())\n # Get message headers\n http_info = http_object.info()\n http_result = http_object.getcode()\n if verbose:\n debug(\"HTTP result: %d\" % http_result)\n debug(\"Response info: %s\" % http_info)\n debug(\"Response type: %s\" % http_info.gettype())\n\n # Read results into buffer\n payload = http_object.read()\n http_object.close()\n # debug(payload)\n # Report outcome\n if (http_result != 200):\n if verbose:\n debug(\"Unsuccessful publish request returned HTTP code %d\" %\n http_result) \n nilog(\"%s,PUBLISH rx error bad response status,%d\" % (msgid,http_result));\n return\n # Check content type of returned message matches requested response type\n ct = http_object.headers[\"content-type\"]\n if ct != \"application/json\":\n if verbose:\n debug(\"Error: Expecting JSON coded (application/json) \"\n \"response but received Content-Type: %s\" % ct)\n nilog(\"%s,PUBLISH rx error bad content type,%s\" % (msgid,ct));\n return\n # If output of response is expected, print in the requested format\n if verbose:\n nilog( \"Publication of %s successful:\" % target)\n\n # JSON cases\n try:\n json_report = json.loads(payload)\n except Exception, e:\n if verbose:\n nilog(\"Error: Could not decode JSON report '%s': %s\" % (payload,\n str(e)))\n nilog(\"%s, PUBLISH rx error bad json decode\" % msgid);\n return\n\n if verbose: \n print json.dumps(json_report, indent = 4)\n etime=time.time()\n duration=etime-stime\n niuri=json_report[\"ni\"]\n nilog(\"%s,PUBLISH rx fine,ni,%s,size,%d,time,%10.10f\" % (msgid,niuri,fsize,duration*1000))\n\n return niuri",
"def cvmfsPublish(reponame = None):\n if reponame == None:\n reponame = _getRepoName()\n\n rc = subprocess.call([\"cvmfs_server\", \"publish\", \"-f\", reponame])\n if rc != 0:\n raise RuntimeError(\"Could not publish CVMFS transaction\")",
"def publish_metadata(): \n \n folder = 'metadata'\n name = get_dataset_filename()\n \n # Create a kml folder in the temp directory if it does not exist\n temp_working_folder = os.path.join(temp_workspace,folder)\n \n # Publish the metadata to the download folder\n publish_file(temp_working_folder, name + '.xml','metadata')",
"def _publish():\n modlog.info(\"publishing release...\")\n\n try:\n result = subprocess.check_output([\"python\", \"setup.py\", \"bdist_wheel\", \"upload\"],\n stderr=subprocess.STDOUT, universal_newlines=True)\n except subprocess.CalledProcessError as err:\n modlog.error(\"Failed to publish new PyJen release ({0})\".format(err.returncode))\n modlog.error(err.output)\n exit(1)\n modlog.debug(result)\n\n # todo: after the publish completes, auto-update the version number\n # todo: lay tag on release\n modlog.info(\"release published successfully\")",
"def download(urls, dest_folder):\n pass",
"def publish(self):\n # Write the models locally\n local_path_dist = self.dump_distributions()\n local_path_model = self.dump_model()\n\n # Write them to cloud storage\n bucket_path_dist = self.get_bucket_path(self.filename_distributions)\n bucket_path_model = self.get_bucket_path(self.filename_model)\n\n config = self.services.config\n lake = self.services.lake\n\n\n lake.upload(bucket_path_dist, local_path_dist, bucket_name=config.lake_bucket)\n lake.upload(bucket_path_model, local_path_model, bucket_name=config.lake_bucket)\n\n # Now finally we want to write our reference file to our repository and build a merge request\n reference = {\n \"model\": {\n \"bucket\": config.lake_bucket,\n \"path\": bucket_path_model,\n \"md5\": file_md5(local_path_model),\n },\n \"distributions\": {\n \"bucket\": config.lake_bucket,\n \"path\": bucket_path_dist,\n \"md5\": file_md5(local_path_dist),\n },\n }\n\n return reference"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Start the Microblaze Processor. The processor instance will start automatically after instantiation. | def start(self):
self.microblaze.run()
self.microblaze.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET, 0)
self.load_switch_config(self.iop_switch_config) | [
"def platform_start(self):\n self.platform.start()",
"def start(self, memory=3):\n if self.cromwell_proc:\n logging.info(\"Request to start Cromwell: already running\")\n return\n\n self.cromwell_proc = subprocess.Popen([\n 'java',\n '-Dconfig.file=' + self.cromwell_conf,\n '-Xmx%dg'%memory,\n '-jar', self.cromwell_jar,\n 'server'])\n\n self.mem = memory\n\n logging.info(\"Started Cromwell\")\n self.logger.log(\n 'Launching cromwell process',\n memory=memory\n )",
"def start(self):\n\t\tGPIO.output(self._gpio, GPIO.LOW)\n\t\tself._pumping = True\n\t\tlogger.info(\"Pump received start request, pumpNr: %d\", self._pumpNr)",
"def start(self) -> None:\n context = self._get_multiprocessing_context()\n self._last_parsing_stat_received_at = time.monotonic()\n\n self._parent_signal_conn, child_signal_conn = context.Pipe()\n process = context.Process(\n target=type(self)._run_processor_manager,\n args=(\n self._dag_directory,\n self._max_runs,\n self._processor_timeout,\n child_signal_conn,\n self._dag_ids,\n self._pickle_dags,\n self._async_mode,\n ),\n )\n self._process = process\n\n process.start()\n\n self.log.info(\"Launched DagFileProcessorManager with pid: %s\", process.pid)",
"def start(self) -> None:\n with self._engine_start_lock:\n if self._engine_start == 0:\n self.rpc_server.start()\n self.start_engine()\n self._engine_start += 1",
"def start(self):\n self.lmotor_power = 0.0\n self.lmotor.start()\n self.rmotor_power = 0.0\n self.rmotor.start()",
"def initialize_multiprocessing(self):\n if self.multiprocessing_controller is not None:\n MPControl.set_multiprocess_engine(self.multiprocessing_controller)\n MPControl.connect()",
"def start(self):\n \n rpc = self.smartstarter.rpcsystem\n \n process = yield self.smartstarter.start()\n \n try:\n \n make_worker_url = yield process.get_function_url(make_worker)\n make_worker_stub = rpc.create_function_stub(make_worker_url)\n \n worker = yield make_worker_stub(\"local\") # TODO remove network\n \n worker.get_function_url = process.get_function_url_stub\n \n worker.reset = rpc.create_local_function_stub(process.reset)\n worker.stop = rpc.create_local_function_stub(process.stop)\n worker.kill = rpc.create_local_function_stub(process.kill)\n worker.stdout = process.stdout.make_stub(rpc)\n worker.stderr = process.stderr.make_stub(rpc)\n worker.exited = process.exited.make_stub(rpc)\n\n except:\n process.kill()\n raise \n \n\n \n # worker.stdout.add_callback(stdout)\n # worker.stderr.add_callback(stderr)\n \n# receiver_stub = rpc.create_local_function_stub(hook.receiver)\n# hookinstall_url = yield process.get_function_url(hook.install_hook)\n# hookinstall_url_stub = rpc.create_function_stub(hookinstall_url)\n# yield hookinstall_url_stub(receiver_stub)\n \n defer.returnValue(worker)",
"def start(self):\n logger.debug('Starting controller')\n pass",
"def launch(self):\n logger.debug(f'{self} Running control...')\n if self._initiate():\n self._spawn()",
"def start(self):\n self.log.debug(\"Mode started\")\n self.active = True\n self.task = Task.Create(self.tick, sleep=0)\n #self.machine.events.post('machineflow_' + self.name + '_start')",
"def start(self):\n print(\"Starting pyAudio mic\")\n self.stream = self.p.open(\n format=self.sample_format,\n input_device_index=self.input_device_index,\n channels=self.channels,\n rate=self.bit_rate,\n frames_per_buffer=self.frames_per_buffer,\n input=True,\n output=False,\n stream_callback=self.callback)",
"def _start_process(self):\n\t\tself._proc = subprocess.Popen(self.argv)",
"def start(self):\n if self._thread is not None:\n self._thread.start()\n else:\n self._run()\n return None",
"def set_processor(self, processor_name, processor_args=None):\n\t\tself.processor = self.build_processor(processor_name, processor_args)\n\t\tself.processor.set_bpm(self.bpm, self.downbeat)\n\n\t\tself.processor.set_drivers(self.drivers)\n\t\tself.processor.initialise_processor()\n\t\tself.current_processor = processor_name\n\t\tself.current_args = processor_args",
"def add_cpu(self):\n cpu_worker = CPUCmdRunner(self.host, 'cpu')\n self.cpu_workers.append(cpu_worker)\n cpu_worker.start()\n self.log.info('CPU worker added')",
"def start(self):\n for workload in self._workloads:\n self.log.info(\"%-20s STARTING port=%s\" % (workload.name(), workload.port()))\n workload.pre_start()\n workload.start()\n self._monitor_loop()\n self._cleanup()",
"def do_start(self,processor):\n # app_logger = self.construct_logger(rta_constants.PROPERTIES_LOG_FILE)\n running_dict = {}\n for item in self.get_running_status():\n running_dict[item.get('processor')]=item.get('status')\n\n if processor == 'spark':\n if running_dict:\n if running_dict['spark<spark_worker>'] != 'Running' and running_dict['spark<spark_master>'] != 'Running':\n try:\n cmd_line = self.cmd_start_spark\n cmd = subprocess.Popen([cmd_line],shell=True,stdout=subprocess.PIPE)\n (output,err) = cmd.communicate()\n # app_logger.info('*********output logging **************')\n print(output)\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if running_dict['spark<spark_worker>'] == 'Running' or running_dict['spark<spark_master>'] == 'Running':\n print('Spark Server is running!! please trying to stop it before it starts.')\n return\n else:\n print('Please type correct command! You may use \"help start\" see more help')\n return\n\n elif processor == 'tomcat':\n if running_dict.has_key('tomcat') and running_dict['tomcat'] != 'Running':\n try:\n cmd_line = self.cmd_start_tomcat\n # print('staring tomcat server------->')\n print cmd_line\n\n # 2311 Vpl update to fix problem of catalina shutdown when term exit (10.x timeout)\n cmd = subprocess.call(['nohup',cmd_line,'start'])\n #cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n #(output,err) = cmd.communicate()\n\n # app_logger.info('*********output logging **************')\n #print(output)\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('tomcat'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n else:\n print('Tomcat Server is running!! please trying to stop it before it start.')\n return\n\n elif processor == 'HDFS':\n #1/5/2017 Commit by JOJO\n '''\n if running_dict.has_key('HDFS') and running_dict['HDFS'] != 'Running':\n try:\n cmd_line = self.cmd_start_hadoop_hdfs\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n # (output,err) = cmd.communicate()\n # print(output)\n # app_logger.info('*********output logging **************')\n # print(output)\n print('HDFS has been started!')\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('HDFS'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n else:\n print('HDFS server is running!! please trying to stop it before it start.')\n return\n '''\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif processor == 'web_management':\n if running_dict.has_key('web_management') and running_dict['web_management'] != 'Running':\n try:\n cmd_line = 'python '+self.cmd_start_web_management\n print('starting web_management webserver------->')\n # print cmd_line\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n (output,err) = cmd.communicate()\n print(output)\n # app_logger.info('*********output logging **************')\n # print(output)\n print('web_management webserver has been started!')\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('web_management'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n else:\n print('Flask webserver is running!! please trying to stop it before it start.')\n return\n\n elif processor == 'novelty':\n if running_dict.has_key('novelty') and running_dict['novelty'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running':\n try:\n cmd_line = self.cmd_start_novelty_detector\n # print('staring novelty------->')\n # print cmd_line\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n # (output,err) = cmd.communicate()\n\n # app_logger.info('*********output logging **************')\n print('novelty has been started!')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('novelty'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif running_dict['novelty'] == 'Running':\n print('novelty processor is running!! please trying to stop it before it start.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n\n elif processor == 'raw_writer':\n if running_dict.has_key('raw_writer') and running_dict['raw_writer'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running':\n try:\n cmd_line = self.cmd_start_raw_writer\n # print('staring raw_writer------->')\n # print cmd_line\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n # (output,err) = cmd.communicate()\n print('raw_writer has been started!')\n return\n\n # app_logger.info('*********output logging **************')\n # print(output)\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('raw_writer'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif running_dict['raw_writer'] == 'Running':\n print('raw_writer processor is running!! please trying to stop it before it start.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n\n elif processor == 'cassandra':\n if running_dict.has_key('cassandra') and running_dict['cassandra'] != 'Running':\n try:\n cmd_line = self.cmd_start_cassandra\n # print('starting cassandra------->')\n # print cmd_line\n\n #2311 Vpl update to fix problem of cassandra shutdown when term exit (10.x timeout)\n #cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n cmd = subprocess.call(['nohup',cmd_line])\n #(output,err) = cmd.communicate()\n\n # app_logger.info('*********output logging **************')\n # print(output)\n print ('cassandra has been started!')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('cassandra'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n else:\n print('cassandra Server is running!! please trying to stop it before it start.')\n return\n\n elif processor == 'kairosDb':\n if running_dict.has_key('kairosDb') and running_dict['kairosDb'] != 'Running' and running_dict['cassandra']=='Running':\n try:\n cmd_line = self.cmd_start_kairosDB\n # print('staring kairosDB------->')\n\n # print cmd_line\n\t\t\t\t\t#2311 Vpl update to fix problem of kairosDb shutdown when term exit (10.x timeout)\n\t\t\t\t\t#cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n cmd = subprocess.call(['nohup',cmd_line,'start'])\n #(output,err) = cmd.communicate()\n\n # app_logger.info('*********output logging **************')\n print('kairosDb has been started!')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('kairosDb'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif running_dict['cassandra']=='Stopped':\n print('cassandra required starting before kairosDb is running!! please trying to \"start cassandra\" first')\n return\n elif running_dict['kairosDB'] == 'Running':\n print('kairosDB Server is running!! please trying to stop it before it starts.')\n return\n\n elif processor == 'grafana':\n if running_dict.has_key('grafana') and running_dict['grafana'] != 'Running' and running_dict['kairosDb']=='Running':\n try:\n cmd_line = self.cmd_start_grafana\n # print('staring grafana------->')\n # print cmd_line\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n # (output,err) = cmd.communicate()\n # app_logger.info('*********output logging **************')\n # print(output)\n print ('grafana has been started!')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('grafana'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif running_dict['kairosDb']=='Stopped':\n print('kairosDb required starting before grafana is running!! please trying to \"start kairoseDb\" first')\n return\n elif running_dict['grafana'] == 'Running':\n print('grafana Server is running!! please trying to stop it before it starts.')\n return\n\n elif processor == 'kafka':\n if running_dict.has_key('kafka') and running_dict['kafka'] != 'Running' and running_dict['zookeeper']=='Running':\n try:\n cmd_line = self.cmd_start_kafka\n print('starting kafka------->')\n # print cmd_line\n\n #2311 Vpl update to fix problem of zookeeper shutdown when term exit (10.x timeout)\n #cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n cmd = subprocess.Popen(cmd_line)\n # (output,err) = cmd.communicate()\n # print (output)\n print ('kafka has been started!')\n return\n # app_logger.info('*********output logging **************')\n # print(output)\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('kafka'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n elif running_dict['zookeeper']=='Stopped':\n print('zookeeper required starting before kafka is running!! please trying to \"start zookeeper\" first')\n return\n elif running_dict['kafka'] == 'Running':\n print('Kafka Server is running!! please trying to stop it before it starts.')\n return\n\n elif processor == 'zookeeper':\n if running_dict.has_key('zookeeper') and running_dict['zookeeper'] != 'Running':\n try:\n cmd_line = self.cmd_start_zookeeper\n # print('staring zookeeper------->')\n # print (cmd_line)\n\n #2311 Vpl update to fix problem of zookeeper shutdown when term exit (10.x timeout)\n #cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n cmd = subprocess.Popen(cmd_line)\n # (output,err) = cmd.communicate()\n # print (output)\n\n print('zookeeper has been started!')\n return\n except Exception as ex:\n print(\" Failed to stop processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if not running_dict.has_key('zookeeper'):\n print('Please type correct command! You may use \"help start\" see more help')\n return\n else:\n print('Zookeeper Server is running!! please trying to stop it before it starts.')\n return\n\n elif processor == 'accl_processor':\n if running_dict:\n if running_dict['accl_processor'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running' and running_dict['zookeeper'] == 'Running' and running_dict['kafka'] == 'Running':\n try:\n cmd_line = self.cmd_start_accl_processor\n print cmd_line\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n #cmd = subprocess.Popen(['nohup',cmd_line])\n # cmd = subprocess.Popen(cmd_line)\n\n print ('Accelerometer processor has been started')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if running_dict['accl_processor'] == 'Running':\n print('Accelerometer processor is running!! please trying to stop it before it starts.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n elif running_dict['zookeeper'] == 'Stopped':\n print('Please start zookeeper server first!! trying to use command \"start zookeeper\"')\n return\n elif running_dict['kafka'] == 'Stopped':\n print('Please start kafka server first!! trying to use command \"start kafka\"')\n return\n else:\n print('Please type correct command! You may use \"help start\" see more help')\n sys.exit(1)\n\n elif processor == 'baro_processor':\n if running_dict:\n if running_dict['baro_processor'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running' and running_dict['zookeeper'] == 'Running' and running_dict['kafka'] == 'Running':\n try:\n cmd_line = self.cmd_start_baro_processor\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n print ('Barometer processor has been started')\n\t\t\tprint (cmd_line)\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if running_dict['baro_processor'] == 'Running':\n print('Barometer processor is running!! please trying to stop it before it starts.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n elif running_dict['zookeeper'] == 'Stopped':\n print('Please start zookeeper server first!! trying to use command \"start zookeeper\"')\n return\n elif running_dict['kafka'] == 'Stopped':\n print('Please start kafka server first!! trying to use command \"start kafka\"')\n return\n else:\n print('Please type correct command! You may use \"help start\" see more help')\n sys.exit(1)\n\n elif processor == 'gyro_processor':\n if running_dict:\n if running_dict['gyro_processor'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running' and running_dict['zookeeper'] == 'Running' and running_dict['kafka'] == 'Running':\n try:\n cmd_line = self.cmd_start_gyro_processor\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n print ('Gyroscope processor has been started')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if running_dict['gyro_processor'] == 'Running':\n print('Gyroscope processor is running!! please trying to stop it before it starts.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n elif running_dict['zookeeper'] == 'Stopped':\n print('Please start zookeeper server first!! trying to use command \"start zookeeper\"')\n return\n elif running_dict['kafka'] == 'Stopped':\n print('Please start kafka server first!! trying to use command \"start kafka\"')\n return\n else:\n print('Please type correct command! You may use \"help start\" see more help')\n sys.exit(1)\n\n elif processor == 'aggr_processor':\n if running_dict:\n if running_dict['aggr_processor'] != 'Running' and running_dict['spark<spark_worker>'] == 'Running' and running_dict['spark<spark_master>'] == 'Running' and running_dict['zookeeper'] == 'Running' and running_dict['kafka'] == 'Running':\n try:\n cmd_line = self.cmd_start_aggr_naiv\n cmd = subprocess.Popen([cmd_line],shell=True,stderr=subprocess.PIPE)\n print ('Aggregator processor has been started')\n return\n except Exception as ex:\n print(\" Failed to run processor with ERROR(%s).\", str(ex))\n sys.exit(1)\n else:\n if running_dict['aggr_processor'] == 'Running':\n print('Aggregator processor is running!! please trying to stop it before it starts.')\n return\n elif running_dict['spark<spark_worker>'] == 'Stopped' or running_dict['spark<spark_master>'] == 'Stopped':\n print('Please start spark first!! trying to use command \"start spark\"')\n return\n elif running_dict['zookeeper'] == 'Stopped':\n print('Please start zookeeper server first!! trying to use command \"start zookeeper\"')\n return\n elif running_dict['kafka'] == 'Stopped':\n print('Please start kafka server first!! trying to use command \"start kafka\"')\n return\n else:\n print ('Please type correct command! You may use \"help start\" see more help')\n sys.exit(1)\n\n else:\n print ('Please type correct command! You may use \"help start\" see more help')",
"def start(self):\n self.__parse_model()\n self.__start_server()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Put the Microblaze processor into reset. This method will set processor status as "STOPPED". | def stop(self):
self.microblaze.reset() | [
"def reset(self):\n self._internal_pong.machine_reset()",
"def reset(self):\n if self._reset is not None:\n self._reset.value = False\n time.sleep(0.001)\n self._reset.value = True\n time.sleep(0.001)\n else:\n raise RuntimeError(\"No reset pin defined\")",
"def system_reset(self):\n self.send([SYSTEM_RESET])",
"def hard_reset(self):\n self._get_controller().hard_reset()",
"def resetBoard(self):\n #Reset registers\n self.femb.write_reg( self.REG_RESET, 2)\n time.sleep(1.)\n\n #Reset state machines\n self.femb.write_reg( self.REG_RESET, 4)\n time.sleep(1.)\n\n #Reset reset register to 0\n self.femb.write_reg( self.REG_RESET, 0)\n time.sleep(0.2)",
"def sensor_reset():\n print(\"Sending reset signal to sensor\")\n PMS_RESET.value(0)\n time.sleep(0.5)\n PMS_RESET.value(1)\n time.sleep(1.0)",
"def _global_reset(self):\n self.reg.set(types.IXGBE_CTRL, types.IXGBE_CTRL_RST_MASK)\n self.reg.wait_clear(types.IXGBE_CTRL, types.IXGBE_CTRL_RST_MASK)\n time.sleep(0.01)",
"def reset(self, mode='soft'):\r\n return self.vmrun('reset', mode)",
"def svc_reset_system_mode(self) -> None:\n self._call_client_api(self._device.reset_mode)",
"def hard_reset(self, reset_pin):\n # tDRESET, tRESET, figure 7 in datasheet\n if reset_pin is not None:\n reset_pin.value(0)\n utime.sleep_ms(1) #\n reset_pin.value(1)\n utime.sleep_ms(2) # tSTART, figure 7 in datasheet",
"def test_reset():\n dev = _aws_device(wires=2)\n dev._circuit = CIRCUIT\n dev._task = TASK\n\n dev.reset()\n assert dev.circuit is None\n assert dev.task is None",
"def reset():\n for cpu_id in POSSIBLE_CPUS:\n set_cpu(cpu_id, True)",
"def set_working_state(self):\n self.state = 0\n self.port = None",
"def reset_ms_status():\n with GlobalVars.MSStatus.rw_lock:\n GlobalVars.MSStatus.ms_is_up = True\n GlobalVars.MSStatus.counter = 0",
"def reset(self):\n Generic.reset(self)\n self.prev_run_time = self.p - 1\n self.pending_time = 0",
"def reset(self):\n self.current_state = None",
"def reset(self):\n# \n self.end_and_close()\n# self.sim.start()\n\n # Start the next simulation\n self.sim._model.swmm_open()\n self.sim._model.swmm_start()\n\n # get the state\n state = self._state()\n return state",
"def reset(self):\n command = \"export STLINK_DEVICE=\" + self.stlink.port + \"; st-flash reset\"\n subprocess.run(command, shell=True)\n time.sleep(1)",
"def nmt_service_reset_node(self):\n self._node.nmt.send_command(0x81)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Load the Microblaze processor's switch configuration. This method will update switch config. Each pin requires 8 bits for configuration. | def load_switch_config(self, config=None):
if config is None:
config = ARDUINO_SWCFG_DIOALL
elif not len(config) == 4*ARDUINO_SWITCHCONFIG_NUMREGS:
raise TypeError('Invalid switch config {}.'.format(config))
# Build switch config word
self.iop_switch_config = config
sw_config_words = [0]*ARDUINO_SWITCHCONFIG_NUMREGS
for ix, cfg in enumerate(self.iop_switch_config):
if ix < 4:
sw_config_words[0] |= (cfg << ix*8)
elif ix < 8:
sw_config_words[1] |= (cfg << (ix-4)*8)
elif ix < 12:
sw_config_words[2] |= (cfg << (ix-8)*4)
elif ix < 16:
sw_config_words[3] |= (cfg << (ix-12)*4)
else:
sw_config_words[4] |= (cfg << (ix-16)*4)
# Configure switch
for i in range(ARDUINO_SWITCHCONFIG_NUMREGS):
self.write_cmd(ARDUINO_SWITCHCONFIG_BASEADDR + 4*i,
sw_config_words[i]) | [
"def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> \"SwitchPlatformInterface\":\n raise NotImplementedError",
"def configure_switch(self, config):\n raise NotImplementedError",
"def telnet_switch_reload_and_verify_using_cached_SWI_and_new_config(self,\n node):\n t = test.Test()\n con = t.dev_console(node)\n con.enable(\"show running-config\")\n helpers.log(con.cli('')['content'])\n con.enable(\"enable\")\n con.send(\"reload now\")\n con.expect(\"Discovered Switch Light manifest from neighbor discovery\")\n con.expect(\"Using cached ZTN SWI\")\n con.expect(\"Downloading new startup-config\")\n con.expect(\"Caching ZTN startup-config\")\n con.expect(r'.*login: $')",
"def telnet_switch_reload_and_verify_using_cached_SWI_and_config(self, node):\n t = test.Test()\n con = t.dev_console(node)\n con.enable(\"show running-config\")\n helpers.log(con.cli('')['content'])\n con.enable(\"enable\")\n con.send(\"reload now\")\n con.expect(\"Discovered Switch Light manifest from neighbor discovery\")\n con.expect(\"Using cached ZTN SWI\")\n con.expect(\"Using cached ZTN startup-config\")\n con.expect(r'.*login: $')",
"def handle_set_config(self, switch_config):\n pass",
"def load(self, config):\n toggle = NanpyGPIOToggle(self.mudpi, config)\n if toggle:\n node = self.extension.nodes[config['node']]\n if node:\n toggle.node = node\n self.add_component(toggle)\n else:\n raise MudPiError(f'Nanpy node {config[\"node\"]} not found trying to connect {config[\"key\"]}.')\n return True",
"def initialize(self):\n self.log.info(\"Initialize raspPinball hardware.\")\n\n self.config = self.machine.config['rasppinball']\n self.machine.config_validator.validate_config(\"rasppinball\", self.config)\n print(\"***************************\")\n print(self.config)\n #self.machine_type = (\n # self.machine.config['hardware']['driverboards'].lower())\n\n self._connect_to_hardware()\n\n\n # keypad\n self._kp = Keypad()\n self.old_key = \"\"\n self.key = \"\"\n # leds\n self.init_strips()",
"def activateBootloader(self):\n self._harness.pinMode(PIN_HWB, OUTPUT)\n self._harness.digitalWrite(PIN_HWB, 0)\n self._harness.pinMode(PIN_RESET, OUTPUT)\n self._harness.digitalWrite(PIN_RESET, 0)\n time.sleep(0.1)\n self._harness.pinMode(PIN_RESET, INPUT)\n time.sleep(0.1)\n self._harness.pinMode(PIN_HWB, INPUT)",
"def setup_traffic_light():\n gpio.setmode(gpio.BCM)\n for pin in [9, 10, 11]:\n gpio.setup(pin, gpio.OUT)",
"def _use_existing_configuration(self):\n HW_Init(self.ftdi, None)",
"def load_switches(self):\n new_switches = list()\n for site in self.sites:\n switches = self.get_switches_stats(site_id=site['id'])\n for switch in switches:\n if len(switch['name']) < 1:\n switch['name'] = ':'.join([switch['mac'][i:i + 2].upper() for i in range(0, len(switch['mac']), 2)])\n new_switch = {\n \"name\": switch['name'],\n \"site\": site['name'],\n \"site_id\": site['id'],\n \"device_id\": switch['id'],\n \"mac\": switch['mac'],\n \"mac_str\": ':'.join([switch['mac'][i:i + 2].upper() for i in range(0, len(switch['mac']), 2)]),\n \"ip_config\": switch['ip_config'],\n \"ip_actual\": switch['ip_stat'],\n \"net_obj\": get_network(address=switch['ip_config']['ip'], netmask=switch['ip_config']['netmask']) if 'ip' in switch['ip_config'] else None\n }\n for vlan, addr in new_switch['ip_actual']['ips'].items():\n if new_switch['ip_actual']['ip'] == addr:\n new_switch['ip_actual']['vlan'] = vlan.strip('vlan')\n else:\n new_switch['ip_actual']['vlan'] = 0\n if new_switch['ip_config']['network'] and new_switch['ip_config']['network'] != \"default\":\n new_switch['ip_config']['vlan'] = site['network_template']['networks'][new_switch['ip_config']['network']]['vlan_id']\n logger.debug(f\"Matched {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n elif new_switch['ip_config']['network'] and new_switch['ip_config']['network'] == \"default\":\n new_switch['ip_config']['vlan'] = 1\n logger.debug(f\"Matched {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n else:\n new_switch['ip_config']['vlan'] = 0\n logger.error(f\"Did not match {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n new_switches.append(new_switch)\n self.switches = new_switches",
"def request_led_switches(self):\r\n self.KCube.CC_RequestLEDswitches(self.serial)",
"def read_switch(self):\n switch = GPIO.input(SWITCH_PIN)\n\n if (switch == 0):\n switch=1\n else:\n switch=0\n\n return switch",
"def configure_light(self, number: str, subtype: str, config: LightConfig,\n platform_settings: dict) -> \"LightPlatformInterface\":\n raise NotImplementedError",
"def start(self):\n self.microblaze.run()\n self.microblaze.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET, 0)\n self.load_switch_config(self.iop_switch_config)",
"def configureP4Switch(**switch_args):\n\n if \"sw_path\" in switch_args and 'grpc' in switch_args['sw_path']:\n # If grpc appears in the BMv2 switch target, we assume will start P4 Runtime\n class ConfiguredP4RuntimeSwitch(P4RuntimeSwitch):\n def __init__(self, *opts, **kwargs):\n kwargs.update(switch_args)\n P4RuntimeSwitch.__init__(self, *opts, **kwargs)\n\n def describe(self):\n print((\"%s -> gRPC port: %d\" % (self.name, self.grpc_port)))\n\n return ConfiguredP4RuntimeSwitch\n else:\n class ConfiguredP4Switch(P4Switch):\n next_thrift_port = 9090\n\n def __init__(self, *opts, **kwargs):\n global next_thrift_port\n kwargs.update(switch_args)\n kwargs['thrift_port'] = ConfiguredP4Switch.next_thrift_port\n ConfiguredP4Switch.next_thrift_port += 1\n P4Switch.__init__(self, *opts, **kwargs)\n\n def describe(self):\n print((\"%s -> Thrift port: %d\" % (self.name, self.thrift_port)))\n\n return ConfiguredP4Switch",
"def curl_get_switch_startup_config(self, mac):\n t = test.Test()\n bsn_common = bsnCommon()\n master_ip = bsn_common.get_node_ip('master')\n single = False\n try:\n slave_ip = bsn_common.get_node_ip('slave')\n except:\n helpers.log(\"Single node cluster\")\n single = True\n\n if not single:\n url = (\"http://%s/ztn/switch/%s/startup_config?proxy=1\"\n % (str(slave_ip), str(mac)))\n helpers.log(\"Verifying that Slave controller does not provide\"\n \" any startup-config for the switch\")\n helpers.log(\"Trying to get switch startup config at %s\" % url)\n try:\n req = urllib2.Request(url)\n res = urllib2.urlopen(req)\n helpers.log(\"Response is: %s\" % res)\n config = res.read()\n helpers.log(\"Response is: %s\" % ''.join(config))\n helpers.log(\"Slave responded with startup-config. Erroring out\")\n helpers.test_failure(\"Slave responded with startup-config\")\n except urllib2.HTTPError as err:\n if err.code == 404:\n helpers.log(\"Error 404 trying to get startup-config\"\n \" from Slave - it is expected\")\n else:\n return helpers.test_failure(\"Error connecting to Slave\")\n except:\n return helpers.test_failure(\"Other error connecting to Slave\")\n\n url = (\"http://%s/ztn/switch/%s/startup_config?internal=1&proxy=1\"\n % (str(slave_ip), str(mac)))\n helpers.log(\"Verifying that Slave can compute startup config\"\n \" for us if internal=1 flag attached\")\n helpers.log(\"Trying to get switch startup config at %s\" % url)\n try:\n req = urllib2.Request(url)\n res = urllib2.urlopen(req)\n helpers.log(\"Response is: %s\" % res)\n slave_config = res.read()\n helpers.log(\"Response is: %s\" % ''.join(slave_config))\n except:\n helpers.log(traceback.print_exc())\n return helpers.test_failure(\"Other error connecting to Slave\")\n\n url = (\"http://%s/ztn/switch/%s/startup_config?proxy=1\"\n % (str(master_ip), str(mac)))\n helpers.log(\"Trying to get switch startup config at %s\" % url)\n try:\n req = urllib2.Request(url)\n res = urllib2.urlopen(req)\n helpers.log(\"Response is: %s\" % res)\n config = res.read()\n helpers.log(\"Response is: %s\" % ''.join(config))\n except:\n helpers.log(traceback.print_exc())\n return helpers.test_failure(\"Error trying to get startup-config\"\n \" from Master\")\n config = config.replace('\\\\x', '\\\\0x')\n config = config.split('\\n')\n\n if not single:\n slave_config = slave_config.replace('\\\\x', '\\\\0x')\n slave_config = slave_config.split('\\n')\n if slave_config == config:\n helpers.log(\"Master: %s\" % config)\n helpers.log(\"Slave: %s\" % slave_config)\n helpers.log(\"Configs generated by Master and Slave are equal\")\n return config\n else:\n helpers.log(\"Master: %s\" % config)\n helpers.log(\"Slave: %s\" % slave_config)\n return helpers.test_failure(\"Slave and Master generated\"\n \" different startup configs\")\n return config",
"def configureP4Switch(**switch_args):\n if \"sw_path\" in switch_args and 'grpc' in switch_args['sw_path']:\n # If grpc appears in the BMv2 switch target, we assume will start P4Runtime\n class ConfiguredP4RuntimeSwitch(P4RuntimeSwitch):\n def __init__(self, *opts, **kwargs):\n kwargs.update(switch_args)\n P4RuntimeSwitch.__init__(self, *opts, **kwargs)\n\n def describe(self):\n print \"%s -> gRPC port: %d\" % (self.name, self.grpc_port)\n\n return ConfiguredP4RuntimeSwitch\n else:\n class ConfiguredP4Switch(P4Switch):\n next_thrift_port = 9090\n def __init__(self, *opts, **kwargs):\n global next_thrift_port\n kwargs.update(switch_args)\n kwargs['thrift_port'] = ConfiguredP4Switch.next_thrift_port\n ConfiguredP4Switch.next_thrift_port += 1\n P4Switch.__init__(self, *opts, **kwargs)\n\n def describe(self):\n print \"%s -> Thrift port: %d\" % (self.name, self.thrift_port)\n\n return ConfiguredP4Switch",
"def switch_changed(self, switch, name):\n section, option = name\n v = (\"1\" if switch.value else \"0\")\n _stash.config.set(section, option, v)\n self.save()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the status of the Microblaze processor. Returns str The processor status ("IDLE", "RUNNING", or "STOPPED"). | def status(self):
return self.microblaze.state | [
"def processor_status(self):\n return self._processor_status",
"def hardware_status(self):\n return self._hardware_status",
"def read_MPPT_Status(self):\n bitstream = self._port.read_holding_registers(0x0040, 1) # 0x0040 Device Status uint16 r\n result = unpack('<H', pack('<H', bitstream[0]))[0]\n if result == 0:\n return str('Hibernate')\n elif result == 1:\n return str('Power Save')\n elif result == 2:\n return str('Safe Mode')\n elif result == 3:\n return str('Operating')\n elif result == 4:\n return str('Diagnostic Mode')\n elif result == 5:\n return str('Remote Power Off')\n elif result == 255:\n return str('Data Not Available')\n else:\n return str('UNKNOWN STATE!')",
"def determineProcessorType(self): \n if commands.getstatusoutput('uname -p')[0] == 0: \n self.processor_type = commands.getoutput('uname -p')",
"def processor(self):\n msg = self.run('/Status')\n processor = None\n matched_line = [line for line in msg.split('\\n') if \"Current routine\" in line]\n if matched_line:\n # get the processor\n processor = re.findall(r'\\(([^)]+)\\)', matched_line[0])[0]\n return processor",
"def ha_rsc_status():\n if not path.isfile('/usr/bin/cl_status'):\n return None\n try:\n c = popen('/usr/bin/cl_status hbstatus >/dev/null 2>&1 && /usr/bin/cl_status rscstatus 2>/dev/null').read().strip()\n return c\n except:\n return None",
"def processor():\n return uname().processor",
"def hardware_status(self):\n stat = structs.JLinkHardwareStatus()\n res = self._dll.JLINKARM_GetHWStatus(ctypes.byref(stat))\n if res == 1:\n raise errors.JLinkException('Error in reading hardware status.')\n return stat",
"def runtime_status(self):\n try:\n return self.yarn_api.state(self.app_id)\n except:\n return \"NONE\"",
"def platform_status(self):\n # curl https://api.bitfinex.com/v2/platform/status\n path = \"/v2/platform/status\"\n return bool(self._get(path)[0])",
"def status(self):\n if self._proc is None:\n return self.IDLE\n elif self._proc.poll() is None:\n return self.RUNNING\n else:\n return self.CRASHED",
"def get_processor_info():\n with open('/proc/cpuinfo') as cpuinfo:\n processor = re.findall(r'model name\\s+: (.+)\\n', cpuinfo.read())[0]\n return processor.replace('(R)', '').replace('(TM)', '')",
"def get_power_status(self):\n try:\n power = tuple()\n power = self.command('system-power query')\n except:\n raise ReceiverError('Query power failed', 'get_power_status')\n if power[1] != 'on' and power[1] != 'standby':\n return 'unknown'\n else:\n return power[1]",
"def get_status(self):\n\n return str(self.le_status.text())",
"def get_cpuinfo() -> str:\n\n # Read /proc/cpuinfo\n try:\n with open('/proc/cpuinfo', 'r') as f:\n return f.read()\n except IOError:\n print('Error: Could not read /proc/cpuinfo', file = sys.stderr)\n return ''",
"def get_cpu_info():\n cpu = subprocess.check_output('lscpu | grep \"Model name\"', shell=True).decode()\n return cpu.split(':')[-1].strip()",
"def get_cpu_mode(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetCpuMode', self.handle)",
"def get_cpu_info():\n try:\n cpu_info = subprocess.check_output('lscpu')\n return cpu_info\n except OSError:\n return None",
"def cpuinfo(self):\n \n command = 'cat /proc/cpuinfo'\n\tpipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n stdout, stderr = pipe.communicate()\n\tinfo = stdout.strip()\n cpu_type = None\n\tn_proc = 0\n\tfor line in info.split('\\n'):\n if 'model name' in line:\n\t n_proc += 1\n if cpu_type is None:\n\t\t cpu_type = ' '.join(line.split(':')[-1].strip().split())\n\t\n\treturn (cpu_type, n_proc)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Check whether the command mailbox is idle. Returns bool True if the command in the mailbox is idle. | def is_cmd_mailbox_idle(self):
mb_cmd_word = self.microblaze.read(MAILBOX_OFFSET +
MAILBOX_PY2IOP_CMD_OFFSET)
return (mb_cmd_word & 0x1) == 0 | [
"def is_idle(self) -> bool:\n\n return self.get_runningstate == self.cmd.C815_IDLE_STATE",
"def is_idle(self) -> bool:",
"def is_idle(self):\n return self._idle",
"def any_idle(self) -> bool:\n return self.counts()[JobStatus.IDLE] > 0",
"def connection_is_idle(self):\r\n return self._idle_byte_intervals >= self._max_idle_count",
"def is_idle(self):\n if not g.ADDON.getSettingBool('wait_idle'):\n return True\n\n lastidle = xbmc.getGlobalIdleTime()\n if xbmc.Player().isPlaying():\n self.startidle = lastidle\n if lastidle < self.startidle:\n self.startidle = 0\n idletime = lastidle - self.startidle\n return idletime >= 300",
"def _isInIdle(self):\r\n if core.FW_conf['blackbox'].isVideoRecorderAvailable() and core.FW_conf['blackbox'].videoClient.GetCurrentState() == 'idle':\r\n self.inIdle = True\r\n return True\r\n else:\r\n return False",
"def idle(self):\n self._idle_tag = self._imap._command('IDLE')\n resp = self._imap._get_response()\n if resp is not None:\n raise self.Error('Unexpected IDLE response: %s' % resp)",
"def still_active(pid: int, cmd: str) -> bool:\n os_cmd = get_command_for_pid(pid)\n return cmd in os_cmd",
"def is_idle(self):\n idle = len(self.__tasks) == 0, self.__queue.qsize() == 0\n return collections.namedtuple('TaskletIdle', ['tasklet', 'worker'])(*idle)",
"def check_command(self):\n return self.process is not None and self.process.poll() is None",
"def should_poll(self):\r\n return self._command_state is not None",
"async def _mpdIdle(self):\n msg = message()\n # Issue idle command\n try:\n response = await self._mpdCommand(\n \"idle\\n\", lambda x: parser.listOfKvp(x, \":\", \"changed\")\n )\n except MessageTimeout:\n try:\n await self._mpdCommand(\"noidle\\n\")\n except MessageException as ex:\n raise MessageException(\n f\"Got unexpected error while attempting to leave idle mode: {ex.message}\"\n )\n\n # Report Success but with no data as this is a normal exit for idle\n msg.status = status.SUCCESS\n return msg\n\n try:\n changed = [i[\"changed\"] for i in response.data]\n except (ValueError, AttributeError):\n raise MessageException(\"Unable to parse idle message\", response)\n\n # Based upon what subsystems have changed determine which command to issue\n # to get current system state\n commands = set()\n for item in changed:\n action = {\n \"player\": (\"currentsong\", \"status\"),\n \"mixer\": (\"status\",),\n \"options\": (\"status\",),\n \"playlist\": (\"playlistinfo\",),\n }.get(item, [])\n for a in action:\n commands.add(a)\n\n # Issue commands and collect results\n msg.data = {}\n for c in commands:\n parsefunc = {\n \"currentsong\": parser.kvp,\n \"status\": parser.kvp,\n \"playlistinfo\": lambda msg: parser.listOfKvp(msg, \":\", \"file\"),\n }.get(c)\n await self._collectData(c, parsefunc)",
"def should_poll(self):\n return self._command_state is not None",
"def is_busy(self):\n cons = self.rpc.call(MsfRpcMethod.ConsoleList)['consoles']\n for c in cons:\n if c['id'] == self.cid:\n return c['busy']",
"def is_not_busy():\n\n async def predicate(ctx: commands.SlashContext) -> bool:\n if ctx.bot.commands_in_use.get(ctx.author.id, None):\n raise IsBusy(ctx)\n return True\n\n return commands.check(predicate)",
"def poll_until_idle(self):\n return self.parent.poll_until_idle(self.number)",
"def is_alive(self):\n return not self.actor_stopped.is_set()",
"def is_locked():\n if Quartz is None:\n return None\n current_session = Quartz.CGSessionCopyCurrentDictionary()\n if not current_session:\n return None\n return bool(current_session.get('CGSSessionScreenIsLocked', False))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computes the hamming distance for sequences in seqs_mat indicated by pairs of indices. | def nb_vector_hamming_distance(indices, seqs_mat, seqs_L, check_lengths=True):
return _nb_vector_hamming_distance(indices, seqs_mat, seqs_L, check_lengths) | [
"def get_pairwise_hamming_dists(seqs):\n return get_pairwise_dists(seqs, hamming)",
"def hamming_distance(s1, s2):\n assert(len(s1) == len(s2))\n return np.sum([1 if c1 != c2 else 0 for c1, c2 in zip(s1, s2)])",
"def hamming_distance(seq1, seq2):\n dist = sum([char1 != char2 for char1, char2 in zip(seq1, seq2)])\n return dist",
"def matrix_distance(pattern, dna_strings):\n\n k = len(pattern)\n distance = 0\n\n for dna_string in dna_strings:\n\n found_hamming_distance = len(dna_string) ** len(dna_strings) # Initialize a maximum\n\n for i in range(len(dna_string) - k):\n\n dna_kmer = dna_string[i: i + k]\n hd = hamming_distance(dna_kmer, pattern)\n\n if found_hamming_distance > hd:\n found_hamming_distance = hd\n\n distance += found_hamming_distance\n\n return distance",
"def hamming_dist(sequence1, sequence2):\n assert len(sequence1) == len(sequence2), 'Unequal sequence length. ' \\\n + '{} compared to {}. '.format(len(sequence1), len(sequence2))\n \n dist = 0\n for sym1, sym2 in zip(sequence1, sequence2):\n if sym1 != sym2:\n dist += 1\n\n # for pos in range(len(sequence1)):\n # if sequence1[pos] != sequence2[pos]:\n # dist += 1\n \n return dist",
"def distance_matrix(sequences):\n\n if len(sequences) == 0:\n print(\"FATAL: No sequences found\")\n sys.exit(-1)\n else:\n print(\"Found %d sequences\" % len(sequences))\n \n print(\"Creating distance matrix start.\")\n dmx = PairwiseSimilarity(sequences)\n print(\"Distance matrix complete.\")\n return dmx",
"def compute_distance_matrix_hamming(a, b):\n\n dims = a.size(1)\n dmat = torch.matmul(a, torch.transpose(b, 0, 1))\n dmat = (dims - dmat) * 0.5\n return dmat",
"def modified_hamming_distance(s1, s2):\n if len(s1) != len(s2):\n raise ValueError(\"Undefined for sequences of unequal length\")\n score = 0\n for el1, el2 in zip(s1, s2):\n if el2 != 0:\n \n return sum(el1 != el2 for el1, el2 in zip(s1, s2))",
"def hamming_distance(array1, array2):\n if (array1.shape != array2.shape):\n raise ValueError(\"Input arrays must have same shape!\")\n distance = 0\n for i in range(array1.shape[0]):\n if (array1[i] != array2[i]):\n distance += 1\n return distance",
"def PairwiseSimilarity(sequences):\n\n seq = sequences\n N = len(seq)\n\n # Define distance matrix\n d_matrix = zeros((N, N), float64)\n\n for i in range(len(seq)):\n for j in range(len(seq)):\n d_matrix[i][j] = -1\n\n # Similarity matrix\n s_matrix = zeros((N, N), float64)\n\n for i in range(N):\n for j in range(N):\n s_matrix[i][j] = -1\n\n # Find pairs\n for i in range(N):\n for j in range(N):\n\n if s_matrix[i][j] >= 0:\n continue\n\n seq1 = seq[i][1]\n seq2 = seq[j][1]\n minlen = min(len(seq1), len(seq2))\n \n len1 = len2 = sims = 0\n for x in range(minlen):\n if seq1[x] != 256:\n len1 += 1.0\n\n if seq1[x] == seq2[x]:\n sims += 1.0\n\n if seq2[x] != 256:\n len2 += 1.0\n\n maxlen = max(len1, len2)\n s_matrix[i][j] = sims / maxlen\n\n # Get distance matrix\n for i in range(N):\n for j in range(N):\n d_matrix[i][j] = s_matrix[i][i] - s_matrix[i][j]\n \n return d_matrix",
"def hamming_distance(words: Iterator[str], vocabulary: Dict[str, int]):\n\n for word in words:\n distances = []\n suggestions = []\n vocab_list = list(vocabulary)\n for (i,vocab) in enumerate(vocab_list):\n if len(vocab) == len(word):\n distances.append(hamming(word, vocab))\n else:\n distances.append(120)\n \n idx = np.array(distances).argsort()[:5]\n \n for i in range(5):\n for j in range(i+1,5):\n if distances[idx[i]] == distances[idx[j]]:\n if vocabulary.get(vocab_list[idx[i]]) < vocabulary.get(vocab_list[idx[j]]):\n temp = idx[i] \n idx[i] = idx[j]\n idx[j] = temp \n\n for i in idx:\n suggestions.append(vocab_list[i])\n\n output(\"{misspelled}\\t{corrections}\".format(\n misspelled=word,\n corrections=\"\\t\".join(suggestions)\n )) # may cause IO bottleneck",
"def hamming_distance(X, X_train):\n def hamming(vector_x):\n return np.sum(np.logical_xor(vector_x, X_train.toarray()), axis=1)\n\n return np.apply_along_axis(hamming, axis=1, arr=X.toarray())",
"def hamming_distance(graph: nx.DiGraph) -> Tuple[Dict[Any, int], np.array]:\n x = []\n map = {}\n for i, u in enumerate(graph.nodes()):\n map[u] = i\n x.append(graph.nodes[u]['embedding'])\n x = np.row_stack(x)\n dist_mat = np.dot(x, (1 - x).T) + np.dot(1-x, x.T)\n return map, dist_mat",
"def hamming_distance(cs):\n d = 0.0\n end = len(cs) - 1\n for idx in range(end):\n s1 = cs[idx]\n s2 = cs[idx + 1]\n assert len(s1) == len(s2)\n s1_bits = ''.join('{:b}'.format(c).zfill(8) for c in s1)\n s2_bits = ''.join('{:b}'.format(c).zfill(8) for c in s2)\n d += sum(c1 != c2 for c1, c2 in zip(s1_bits, s2_bits))\n return d / end",
"def _PD_hamming(alignA, alignB, subst, bySite, withinA, ignoreGaps=True):\n L = len(alignA.iloc[0])\n gapCode = AA2CODE['-']\n\n \"\"\"Convert alignments into integer arrays first to speed comparisons\"\"\"\n matA = np.zeros((len(alignA), L))\n for seqi, s in enumerate(alignA):\n matA[seqi,:] = _seq2vec(s)\n if not withinA:\n matB = np.zeros((len(alignB), L))\n for seqi, s in enumerate(alignB):\n matB[seqi,:] = _seq2vec(s)\n\n \"\"\"Dist will be 1 where equal, 0 where not and nan if one is a gap\"\"\"\n if withinA:\n dist=np.zeros((int(scipy.special.comb(len(alignA), 2)), L))\n allPairs = itertools.combinations(np.arange(len(alignA)), 2)\n for j, (seqi1, seqi2) in enumerate(allPairs):\n dist[j,:] = matA[seqi1,:]!=matA[seqi2,:]\n if ignoreGaps:\n gapInd = (matA[seqi1,:]==gapCode) | (matA[seqi2,:]==gapCode)\n dist[j, gapInd] = np.nan\n else:\n dist=np.zeros((len(alignA)*len(alignB), L))\n allPairs = itertools.product(np.arange(len(alignA)), np.arange(len(alignB)))\n for j, (seqiA, seqiB) in enumerate(allPairs):\n dist[j,:] = matA[seqiA,:]!=matB[seqiB,:]\n if ignoreGaps:\n gapInd = (matA[seqiA,:]==gapCode) | (matB[seqiB,:]==gapCode)\n dist[j, gapInd] = np.nan\n\n if not bySite:\n dist=np.nanmean(dist, axis=1)\n return np.nanmean(dist, axis=0)",
"def hammingDistance(s1 = \"\", s2 = \"\"):\n # if len(s1) != len(s2):\n # raise ValueError(\"Undefined for sequences of unequal length\")\n return sum(bool(ord(ch1) - ord(ch2)) for ch1, ch2 in zip(s1, s2))",
"def hamming_distance(\n self, reference_seqs, positions_to_compare=None, ref_seq_positions=None,\n set_diff=False, ignore_characters=[], treat_as_match=[], normalized=False,\n names=None, return_as_dataframe=True, reference_seq_ids=None\n ):\n if normalized is True:\n diffs, bases = self.compare_to_references(\n reference_seqs, positions_to_compare, ref_seq_positions,\n flip=True, set_diff=set_diff, ignore_characters=ignore_characters, treat_as_match=treat_as_match,\n names=names, return_num_bases=True, return_as_dataframe=return_as_dataframe, reference_seq_ids=reference_seq_ids\n )\n hamming_result = ((diffs.sum(axis=1) / bases))\n else:\n hamming_result = self.compare_to_references(\n reference_seqs, positions_to_compare, ref_seq_positions,\n flip=True, set_diff=set_diff, ignore_characters=ignore_characters, treat_as_match=treat_as_match,\n names=names, return_as_dataframe=return_as_dataframe, reference_seq_ids=reference_seq_ids\n ).sum(axis=1)\n\n if return_as_dataframe:\n hamming_result = hamming_result.unstack()\n\n return hamming_result",
"def distance_embeddings(embeddings_matrix):\n distance = 0\n l = 0\n for row1, row2 in it.combinations(embeddings_matrix, 2):\n new_distance = np.sqrt(np.sum(np.power(row1-row2, 2)))\n distance += new_distance\n l += 1\n\n av_distance = distance / l\n return av_distance",
"def distance_matrix(sequences, substitution_mat):\n distance_mat = numpy.empty((len(sequences), len(sequences)), dtype='float')\n\n print(\"Building distance matrix\")\n # Get similarity score\n for i, seqA in enumerate(sequences):\n sys.stdout.write(\"\\r%.f%%\" % (float(i+1)/len(sequences)*100))\n sys.stdout.flush()\n for j, seqB in enumerate(sequences[i:], start=i):\n score = substitution_score(substitution_mat, seqA, seqB)\n distance_mat[i, j] = score\n distance_mat[j, i] = score\n print(\"\")\n # Set equal the diagonal\n diag_mini = numpy.min(distance_mat.diagonal())\n for i in range(len(sequences)):\n distance_mat[i, i] = diag_mini\n # Convert similarity score into a distance\n mini = numpy.min(distance_mat)\n maxi = numpy.max(distance_mat)\n return 1 - (distance_mat + abs(mini))/(maxi - mini)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computes the Levenshtein edit distance between two sequences, with the AA substitution distances provided in distance_matrix. The default distance matrix has a 1 for mismatches and 0 for matches. | def nb_editdistance(seq_vec1, seq_vec2, distance_matrix=identity_nb_distance_matrix, gap_penalty=1):
q_L = seq_vec1.shape[0]
s_L = seq_vec2.shape[0]
if q_L == s_L:
"""No gaps: substitution distance
This will make it differ from a strict edit-distance since
the optimal edit-distance may insert same number of gaps in both sequences"""
dist = 0
for i in range(q_L):
dist += distance_matrix[seq_vec1[i], seq_vec2[i]]
return dist
ldmat = np.zeros((q_L, s_L), dtype=np.int16)
for row in range(1, q_L):
ldmat[row, 0] = row * gap_penalty
for col in range(1, s_L):
ldmat[0, col] = col * gap_penalty
for col in range(1, s_L):
for row in range(1, q_L):
ldmat[row, col] = min(ldmat[row-1, col] + gap_penalty,
ldmat[row, col-1] + gap_penalty,
ldmat[row-1, col-1] + distance_matrix[seq_vec1[row-1], seq_vec2[col-1]]) # substitution
return ldmat[row, col] | [
"def damerau_levenshtein_distance(seq1, seq2):\r\n # codesnippet:D0DE4716-B6E6-4161-9219-2903BF8F547F\r\n # Conceptually, this is based on a len(seq1) + 1 * len(seq2) + 1 matrix.\r\n # However, only the current and two previous rows are needed at once,\r\n # so we only store those.\r\n oneago = None\r\n thisrow = list(range(1, len(seq2) + 1)) + [0]\r\n for x in range(len(seq1)):\r\n # Python lists wrap around for negative indices, so put the\r\n # leftmost column at the *end* of the list. This matches with\r\n # the zero-indexed strings and saves extra calculation.\r\n twoago, oneago, thisrow = (oneago, thisrow, [0] * len(seq2) + [x + 1])\r\n for y in range(len(seq2)):\r\n delcost = oneago[y] + 1\r\n addcost = thisrow[y - 1] + 1\r\n subcost = oneago[y - 1] + (seq1[x] != seq2[y])\r\n thisrow[y] = min(delcost, addcost, subcost)\r\n # This block deals with transpositions\r\n if (x > 0 and y > 0 and seq1[x] == seq2[y - 1]\r\n and seq1[x - 1] == seq2[y] and seq1[x] != seq2[y]):\r\n thisrow[y] = min(thisrow[y], twoago[y - 2] + 1)\r\n return thisrow[len(seq2) - 1]",
"def word_embedding_levenshtein(seq1, seq2, embeddings, average_distance, r=0.9, normalise=False):\n\tx1 = 1 + len(seq1)\n\tx2 = 1 + len(seq2)\n\n\talpha = r / ((1 - r) * average_distance)\n\n\t# Initialisation of the matrix\n\td = [] # Using Numpy structures for this is probably not more efficient\n\td.append(list(range(x2)))\n\tfor i in range(1, x1):\n\t\td.append([i] * x2)\n\n\t# Core of the algorithm\n\tfor i in range(1, x1):\n\t\tfor j in range(1, x2):\n\t\t\te1 = seq1[i-1]\n\t\t\te2 = seq2[j-1]\n\n\t\t\tif(e1 == e2): c = 0\n\t\t\telse:\n\t\t\t\tv1 = embeddings[e1]\n\t\t\t\tv2 = embeddings[e2]\n\n\t\t\t\tif((v1 is None) or (v2 is None)): c = 1\n\t\t\t\telse:\n\t\t\t\t\tdst = np.linalg.norm(v1 - v2) # Distance 2 (or L2 norm of the difference)\n\n\t\t\t\t\t# Now, we need a function increasing function mapping 0 to 0 and +inf to 1\n\t\t\t\t\tc = 1 - (1 / (1 + (alpha * dst)))\n\n\t\t\t\t\t#c /= r # If you uncomment this line, the cost of a substitution at distance `average_distance` will be 1 and substitutions might have higher cost, up to 1/r. This might be justified as long as `r` is above 0.5 (otherwise, some substitutions might be more expensive than an insertion followed by a deletion).\n\n\t\t\td[i][j] = min(\n\t\t\t\t(d[(i-1)][j] + 1), # Deletion of seq1[i]\n\t\t\t\t(d[i][(j-1)] + 1), # Insertion of seq2[j]\n\t\t\t\t(d[(i-1)][(j-1)] + c) # Substitution from seq1[i] to seq2[j]\n\t\t\t)\n\n\traw = d[-1][-1]\n\n\tif(normalise): return (raw / (len(seq1) + len(seq2)))\n\treturn raw",
"def levenshtein_distance(sentence1, sentence2):\n seq1 = lemmatizer.lemmatize(sentence1, ignore_stop_words=True)\n seq2 = lemmatizer.lemmatize(sentence2, ignore_stop_words=True)\n return nltk.edit_distance(seq1, seq2, transpositions=False) / max(len(seq1), len(seq2))",
"def levenshtein_align(a, b, score):\n\n\t# reconstruct the sequence from by back-walking the matrix from the bottom right corner\n\ti = len(a)\n\tj = len(b)\n\n\t# initialize lists to track combined sequence and components\n\tseq = []\n\tseq_a = []\n\tseq_b = []\n\n\twhile i > 0 or j > 0:\n\n\t\tif i == 0:\n\t\t\t# add\n\t\t\tseq.append(b[j-1])\n\t\t\tseq_a.append(0)\n\t\t\tseq_b.append(1)\n\t\t\tj -= 1\n\t\t\tcontinue\n\t\tif j == 0:\n\t\t\t# subtract\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(0)\n\t\t\ti -= 1\n\t\t\tcontinue\n\n\t\tcur_val = score[i,j]\n\t\teq_val = score[i-1, j-1]\n\t\tsub_val = score[i-1, j]\n\t\tadd_val = score[i, j-1]\n\n\t\tif sub_val == cur_val - 1:\n\t\t\t# subtract\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(0)\n\t\t\ti -= 1\n\t\t\tcontinue\n\n\t\tif add_val == cur_val - 1:\n\t\t\t# add\n\t\t\tseq.append(b[j-1])\n\t\t\tseq_a.append(0)\n\t\t\tseq_b.append(1)\n\t\t\tj -= 1\n\t\t\tcontinue\n\n\t\tif eq_val == cur_val - 1 or eq_val == cur_val:\n\t\t\t# move up the diagonal\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(1)\n\t\t\ti -= 1\n\t\t\tj -= 1\n\t\t\tcontinue\n\n\t# reverse sequences\n\tseq.reverse()\n\tseq_a.reverse()\n\tseq_b.reverse()\n\n\treturn seq, seq_a, seq_b",
"def levenshtein_distance(self, a,b):\n\n n, m = len(a), len(b)\n if n > m:\n a,b = b,a\n n,m = m,n\n current = range(n+1)\n for i in range(1,m+1):\n previous, current = current, [i]+[0]*n\n for j in range(1,n+1):\n add, delete = previous[j]+1, current[j-1]+1\n change = previous[j-1]\n if a[j-1] != b[i-1]:\n change = change + 1\n current[j] = min(add, delete, change)\n return current[n]",
"def distance_matrix(sequences, substitution_mat):\n distance_mat = numpy.empty((len(sequences), len(sequences)), dtype='float')\n\n print(\"Building distance matrix\")\n # Get similarity score\n for i, seqA in enumerate(sequences):\n sys.stdout.write(\"\\r%.f%%\" % (float(i+1)/len(sequences)*100))\n sys.stdout.flush()\n for j, seqB in enumerate(sequences[i:], start=i):\n score = substitution_score(substitution_mat, seqA, seqB)\n distance_mat[i, j] = score\n distance_mat[j, i] = score\n print(\"\")\n # Set equal the diagonal\n diag_mini = numpy.min(distance_mat.diagonal())\n for i in range(len(sequences)):\n distance_mat[i, i] = diag_mini\n # Convert similarity score into a distance\n mini = numpy.min(distance_mat)\n maxi = numpy.max(distance_mat)\n return 1 - (distance_mat + abs(mini))/(maxi - mini)",
"def _DamerauLevenshtein(a, b):\n memo = {}\n\n def Distance(x, y):\n \"\"\"Recursively defined string distance with memoization.\"\"\"\n if (x, y) in memo:\n return memo[x, y]\n if not x:\n d = len(y)\n elif not y:\n d = len(x)\n else:\n d = min(\n Distance(x[1:], y) + 1, # correct an insertion error\n Distance(x, y[1:]) + 1, # correct a deletion error\n Distance(x[1:], y[1:]) + (x[0] != y[0])) # correct a wrong character\n if len(x) >= 2 and len(y) >= 2 and x[0] == y[1] and x[1] == y[0]:\n # Correct a transposition.\n t = Distance(x[2:], y[2:]) + 1\n if d > t:\n d = t\n\n memo[x, y] = d\n return d\n return Distance(a, b)",
"def calculate_levenshtein_distance(str_1, str_2):\n distance = 0\n buffer_removed = buffer_added = 0\n for x in ndiff(str_1, str_2):\n code = x[0]\n # Code ? is ignored as it does not translate to any modification\n if code == ' ':\n distance += max(buffer_removed, buffer_added)\n buffer_removed = buffer_added = 0\n elif code == '-':\n buffer_removed += 1\n elif code == '+':\n buffer_added += 1\n distance += max(buffer_removed, buffer_added)\n return distance",
"def levenshtein_distance(str1, str2):\n m = len(str1)\n n = len(str2)\n lensum = float(m + n)\n d = [] \n for i in range(m+1):\n d.append([i]) \n del d[0][0] \n for j in range(n+1):\n d[0].append(j) \n for j in range(1,n+1):\n for i in range(1,m+1):\n if str1[i-1] == str2[j-1]:\n d[i].insert(j,d[i-1][j-1]) \n else:\n minimum = min(d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1]+2) \n d[i].insert(j, minimum)\n ldist = d[-1][-1]\n ratio = (lensum - ldist)/lensum\n return {'distance':ldist, 'ratio':ratio}",
"def matrix_distance(self, other, matrix, use_indices=False):\n return self.distance(other, DistanceFromMatrix(matrix))",
"def full_matrix(word_one, word_two, print_matrix=False):\n distance = 0\n word_one_len = len(word_one)\n word_two_len = len(word_two)\n\n if word_one == word_two:\n return distance\n elif not word_one or not word_two:\n return max(word_one_len, word_two_len)\n\n distance_matrix = init_matrix(word_one, word_two)\n # sinc this only looks at things previously created, it seems like\n # I should be able to just do this in the previous iteration\n # over `word_two` and combine initializing the matrix with this iteration.\n # I think with minimal special casing this could just become O(N) as well\n # by just iterating the length of the shorter of the two strings\n # and then tacking on however many inserts are needed to make the longer string.\n # That would kill printing the final matrix, of course.\n for idx, row in enumerate(distance_matrix):\n # switch this of an n^2 iteration over the row now\n if idx == 0:\n continue\n # row is length of word_one + 1\n for idx2, col in enumerate(row):\n # continue if idx2 == 0?\n action_cost = 1\n # if the letters we are comparing are the same, then there is no cost to this action\n if idx2 <= word_one_len and idx <= word_two_len and word_one[idx2-1] == word_two[idx-1]:\n action_cost = 0\n\n before = row[idx2-1]\n above = distance_matrix[idx-1][idx2]\n angle = distance_matrix[idx-1][idx2-1]\n\n # add the action cost to lowest of previous cost changes\n current_cost = min(before, above, angle) + action_cost \n row[idx2] = current_cost\n\n if print_matrix:\n print([' ', ' '] + list(word_one))\n\n for idx, x in enumerate(distance_matrix):\n if idx > 0:\n line = [word_two[idx-1]] \n else:\n line = [' ']\n line += [str(y) for y in x]\n print(line)\n\n return current_cost",
"def matrix_distance(self, other, matrix):\n return self.distance(other, DistanceFromMatrix(matrix))",
"def test_matrix_distance(self):\n # note that the score matrix must contain 'diagonal' elements m[i][i]\n # to avoid failure when the sequences match.\n m = {\"U\": {\"U\": 0, \"C\": 1, \"A\": 5}, \"C\": {\"C\": 0, \"A\": 2, \"G\": 4}}\n self.assertEqual(self.RNA(\"UUUCCC\").matrix_distance(\"UCACGG\", m), 14)\n self.assertEqual(self.RNA(\"UUUCCC\").matrix_distance(\"\", m), 0)\n self.assertEqual(self.RNA(\"UUU\").matrix_distance(\"CAC\", m), 7)\n self.assertRaises(KeyError, self.RNA(\"UUU\").matrix_distance, \"CAG\", m)",
"def levenshtein(s1, s2):\n if len(s1) < len(s2):\n return levenshtein(s2, s1)\n\n # len(s1) >= len(s2)\n if len(s2) == 0:\n return len(s1)\n\n previous_row = range(len(s2) + 1)\n for i, c1 in enumerate(s1):\n current_row = [i + 1]\n for j, c2 in enumerate(s2):\n insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer\n deletions = current_row[j] + 1 # than s2\n substitutions = previous_row[j] + (c1 != c2)\n current_row.append(min(insertions, deletions, substitutions))\n previous_row = current_row\n \n return previous_row[-1]",
"def needleman_wunsch(gseq1, gseq2, match, mismatch, gap, similarity_score_matrix): \n if(not (type(gseq1)== str and type(gseq2) == str)):\n print(\"DNA sequences pass to this function must be of string type\")\n \n \n # set the variables that we are going to use\n score_match = match\n score_mistach = mismatch\n score_gap = gap\n sscm = similarity_score_matrix\n \n \n alignmentA = gseq1\n alignmentB = gseq2\n i = len(gseq1)\n j = len(gseq2)\n \n # declare the 2d matrix to represent the similarity matrix\n # [r],[c]\n Fmatrix = [[],[]]\n \n while(i > 0 or j > 0):\n \n if(i > 0 and j > 0 and Fmatrix[i][j] == Fmatrix[i-1][j-1] + gs\n \n \n \n match = 1\n mistmatch = -1\n gap = -1\n \n #create the 2d grid\n grid = []\n grid.append([])\n grid.append([])",
"def _pairwise_dist(self,seq1,seq2):\n \n return jf.damerau_levenshtein_distance(str(seq1), str(seq2))",
"def damerau_levenshtein_similarity(s1, s2):\n max_cost = max(len(s1), len(s2))\n\n if max_cost == 0:\n return 1.0\n\n return 1.0 - float(damerau_levenshtein_distance(s1, s2)) / max_cost",
"def matrix_distance(pattern, dna_strings):\n\n k = len(pattern)\n distance = 0\n\n for dna_string in dna_strings:\n\n found_hamming_distance = len(dna_string) ** len(dna_strings) # Initialize a maximum\n\n for i in range(len(dna_string) - k):\n\n dna_kmer = dna_string[i: i + k]\n hd = hamming_distance(dna_kmer, pattern)\n\n if found_hamming_distance > hd:\n found_hamming_distance = hd\n\n distance += found_hamming_distance\n\n return distance",
"def levenshteinDistanceWord(s1, s2):\n if len(s1) > len(s2):\n s1, s2 = s2, s1\n distances = range(len(s1) + 1)\n for i2, c2 in enumerate(s2):\n distances_ = [i2+1]\n for i1, c1 in enumerate(s1):\n if c1 == c2:\n distances_.append(distances[i1])\n else:\n distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))\n distances = distances_\n return distances[-1]"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Compute "tcrdist" distance between two TCR CDR3 sequences. Using default weight, gap penalty, ntrim and ctrim is equivalent to the original distance published in Dash et al, (2017). By setting ntrim and ctrim to 0 and adjusting the dist_weight, it is also possible to compute the CDR1/2 loop distances which can be combined with the CDR3 distance for overall distance. See tcrdist2 package for details. | def nb_tcrdist(seq_vec1, seq_vec2, distance_matrix=tcr_nb_distance_matrix, dist_weight=3, gap_penalty=4, ntrim=3, ctrim=2, fixed_gappos=True):
q_L = seq_vec1.shape[0]
s_L = seq_vec2.shape[0]
if q_L == s_L:
"""No gaps: substitution distance"""
tmp_dist = 0
for i in range(ntrim, q_L - ctrim):
tmp_dist += distance_matrix[seq_vec1[i], seq_vec2[i]]
return tmp_dist * dist_weight
short_len = min(q_L, s_L)
len_diff = abs(q_L - s_L)
if fixed_gappos:
"""If we are not aligning, use a fixed gap position relative to the start of the CDR3
that reflects the typically longer and more variable-length contributions to
the CDR3 from the J than from the V. For a normal-length
CDR3 this would be after the Cys+5 position (ie, gappos = 6; align 6 rsds on N-terminal side of CDR3).
Use an earlier gappos if lenshort is less than 11."""
min_gappos = min(6, 3 + (short_len - 5) // 2)
max_gappos = min_gappos
else:
"""The CYS and the first G of the GXG are 'aligned' in the beta sheet
the alignment seems to continue through roughly CYS+4
ie it's hard to see how we could have an 'insertion' within that region
gappos=1 would be a insertion after CYS
gappos=5 would be a insertion after CYS+4 (5 rsds before the gap)
the full cdr3 ends at the position before the first G
so gappos of len(shortseq)-1 would be gap right before the 'G'
shifting this back by 4 would be analogous to what we do on the other strand, ie len(shortseq)-1-4"""
min_gappos = 5
max_gappos = short_len - 1 - 4
while min_gappos > max_gappos:
min_gappos -= 1
max_gappos += 1
min_dist = -1
# min_count = -1
for gappos in range(min_gappos, max_gappos + 1):
tmp_dist = 0
# tmp_count = 0
remainder = short_len - gappos
for n_i in range(ntrim, gappos):
"""n_i refers to position relative to N term"""
# print (n_i, shortseq[i], longseq[i], distance_matrix[shortseq[i]+longseq[i]])
tmp_dist += distance_matrix[seq_vec1[n_i], seq_vec2[n_i]]
# tmp_count += 1
#print('sequence_distance_with_gappos1:', gappos, remainder, dist[seq_i])
for c_i in range(ctrim, remainder):
"""c_i refers to position relative to C term, counting upwards from C term"""
tmp_dist += distance_matrix[seq_vec1[q_L - 1 - c_i], seq_vec2[s_L - 1 - c_i]]
# tmp_count += 1
#print('sequence_distance_with_gappos2:', gappos, remainder, dist[seq_i])
if tmp_dist < min_dist or min_dist == -1:
min_dist = tmp_dist
# min_count = tmp_count
if min_dist == 0:
break
"""Note that weight_cdr3_region is not applied to the gap penalty"""
return min_dist * dist_weight + len_diff * gap_penalty | [
"def cdtw(c1, c2, num_steiner=5, interp=0.3, r=0):\n\n print(\"Note: Parameter r={} is not used in cdtw_plot. Use cdtw_noplot.cdtw if required.\".format(r))\n d = graph_distance(c1, c2, num_steiner, interp=interp)\n return d",
"def func_c_align_split_n(self, args):\n tik_instance, ub_ori, ub_trans, n_before, n_len = args\n\n n_d, d_d, h_d, w_d, c_d = self.dst_shape\n dhw_d = d_d * h_d * w_d\n hw_d = h_d * w_d\n\n data_offset = n_before * self.c_0\n ub_offset = 0\n ori_nburst = dhw_d * self.c_1\n burst_len = n_len * self.c_0 // self.cp_align_len\n src_stride = (n_d - n_len) * self.c_0 // self.cp_align_len\n dst_stride = 0\n args = tik_instance, self.src_gm, ub_ori, data_offset, ub_offset, \\\n ori_nburst, burst_len, src_stride, dst_stride, self.cp_align_len\n _gm_to_ub_one(args)\n\n hwnoni = hw_d * n_len\n with tik_instance.for_range(0, d_d) as num_d:\n with tik_instance.for_range(0, self.c_1) as num_c1:\n ori_cur = num_d * self.c_1 * hwnoni * self.c_0 \\\n + num_c1 * hwnoni * self.c_0\n trans_cur = num_d * self.c_1 * hwnoni * self.c_0 \\\n + num_c1 * self.c_0\n nburst = hwnoni\n burst_len = self.c_0 // self.cp_align_len\n src_stride = 0\n dst_stride = (self.c_1 - 1) * self.c_0 // self.cp_align_len\n tik_instance.data_move(\n ub_trans[trans_cur],\n ub_ori[ori_cur],\n 0, nburst, burst_len, src_stride, dst_stride)\n\n with tik_instance.for_range(0, dhw_d) as num_dhw:\n src_cur = num_dhw * n_len * c_d\n dst_cur = num_dhw * c_d\n nburst = n_len\n burst_len = c_d // self.cp_align_len\n src_stride = 0\n dst_stride = (dhw_d - 1) * c_d // self.cp_align_len\n tik_instance.data_move(\n ub_ori[dst_cur],\n ub_trans[src_cur],\n 0, nburst, burst_len, src_stride, dst_stride)\n\n dst_offset = n_before * dhw_d * c_d\n burst_len = n_len * dhw_d * c_d // self.cp_align_len\n tik_instance.data_move(self.dst_gm[dst_offset],\n ub_ori,\n 0, 1, burst_len, 0, 0)",
"def _dtw_distance(self, ts_a, ts_b, d = lambda x,y: abs(x-y)):\n\n # Create cost matrix via broadcasting with large int\n ts_a, ts_b = np.array(ts_a), np.array(ts_b)\n M, N = len(ts_a), len(ts_b)\n cost = sys.maxint * np.ones((M, N))\n\n # Initialize the first row and column\n cost[0, 0] = d(ts_a[0], ts_b[0])\n for i in xrange(1, M):\n cost[i, 0] = cost[i-1, 0] + d(ts_a[i], ts_b[0])\n\n for j in xrange(1, N):\n cost[0, j] = cost[0, j-1] + d(ts_a[0], ts_b[j])\n\n # Populate rest of cost matrix within window\n for i in xrange(1, M):\n for j in xrange(max(1, i - self.max_warping_window),\n min(N, i + self.max_warping_window)):\n choices = cost[i - 1, j - 1], cost[i, j-1], cost[i-1, j]\n cost[i, j] = min(choices) + d(ts_a[i], ts_b[j])\n\n # Return DTW distance given window \n return cost[-1, -1]",
"def get_dist_wgt(mesh, rxLoc, actv, R, R0):\n\n # Find non-zero cells\n if actv.dtype == 'bool':\n inds = np.where(actv)[0]\n else:\n inds = actv\n\n nC = len(inds)\n\n # Create active cell projector\n P = sp.csr_matrix((np.ones(nC), (inds, range(nC))),\n shape=(mesh.nC, nC))\n\n # Geometrical constant\n p = 1 / np.sqrt(3)\n\n # Create cell center location\n Ym, Xm, Zm = np.meshgrid(mesh.vectorCCy, mesh.vectorCCx, mesh.vectorCCz)\n hY, hX, hZ = np.meshgrid(mesh.hy, mesh.hx, mesh.hz)\n\n # Remove air cells\n Xm = P.T * mkvc(Xm)\n Ym = P.T * mkvc(Ym)\n Zm = P.T * mkvc(Zm)\n\n hX = P.T * mkvc(hX)\n hY = P.T * mkvc(hY)\n hZ = P.T * mkvc(hZ)\n\n V = P.T * mkvc(mesh.vol)\n wr = np.zeros(nC)\n\n ndata = rxLoc.shape[0]\n count = -1\n print(\"Begin calculation of distance weighting for R= \" + str(R))\n\n for dd in range(ndata):\n\n nx1 = (Xm - hX * p - rxLoc[dd, 0])**2\n nx2 = (Xm + hX * p - rxLoc[dd, 0])**2\n\n ny1 = (Ym - hY * p - rxLoc[dd, 1])**2\n ny2 = (Ym + hY * p - rxLoc[dd, 1])**2\n\n nz1 = (Zm - hZ * p - rxLoc[dd, 2])**2\n nz2 = (Zm + hZ * p - rxLoc[dd, 2])**2\n\n R1 = np.sqrt(nx1 + ny1 + nz1)\n R2 = np.sqrt(nx1 + ny1 + nz2)\n R3 = np.sqrt(nx2 + ny1 + nz1)\n R4 = np.sqrt(nx2 + ny1 + nz2)\n R5 = np.sqrt(nx1 + ny2 + nz1)\n R6 = np.sqrt(nx1 + ny2 + nz2)\n R7 = np.sqrt(nx2 + ny2 + nz1)\n R8 = np.sqrt(nx2 + ny2 + nz2)\n\n temp = (R1 + R0)**-R + (R2 + R0)**-R + (R3 + R0)**-R + \\\n (R4 + R0)**-R + (R5 + R0)**-R + (R6 + R0)**-R + \\\n (R7 + R0)**-R + (R8 + R0)**-R\n\n wr = wr + (V * temp / 8.)**2.\n\n count = progress(dd, count, ndata)\n\n wr = np.sqrt(wr) / V\n wr = mkvc(wr)\n wr = np.sqrt(wr / (np.max(wr)))\n\n print(\"Done 100% ...distance weighting completed!!\\n\")\n\n return wr",
"def chord_dist(n1, n2):\n return min(((n2.node_id - n1.node_id) % (2 ** config.ring_size_bits)),\n ((n1.node_id - n2.node_id) % (2 ** config.ring_size_bits)),\n ) / float(2 ** config.ring_size_bits)",
"def calculate_auto_travel_time_for_pathway3(tm_run_id, origin_city_abbreviation):\n grouping1 = ' '\n grouping2 = ' '\n grouping3 = ' '\n METRIC_ID = 'Affordable 2'\n\n LOGGER.info(\"calculate_auto_travel_time_for_pathway3() for {}, metric: {}, city: {}\".format(tm_run_id, METRIC_ID, origin_city_abbreviation))\n LOGGER.info(\"Calculating {} for {}\".format(METRIC_ID, tm_run_id))\n\n # load tables that contain TAZs for trips originating within and outside of the cordons and headed to the cordons + the cordons itself\n NGFS_OD_ORIGINS_FILE = os.path.join(TM1_GIT_DIR, \"utilities\", \"NextGenFwys\", \"metrics\", \"Input Files\", \"taz_with_origins.csv\")\n NGFS_OD_ORIGINS_DF = pd.read_csv(NGFS_OD_ORIGINS_FILE)\n LOGGER.info(\" Read {:,} rows from {}\".format(len(NGFS_OD_ORIGINS_DF), NGFS_OD_ORIGINS_FILE))\n\n NGFS_OD_CORDONS_FILE = os.path.join(TM1_GIT_DIR, \"utilities\", \"NextGenFwys\", \"metrics\", \"Input Files\", \"taz_with_cordons.csv\")\n NGFS_OD_CORDONS_DF = pd.read_csv(NGFS_OD_CORDONS_FILE)\n LOGGER.info(\" Read {:,} rows from {}\".format(len(NGFS_OD_CORDONS_DF), NGFS_OD_CORDONS_FILE))\n\n # columns: orig_taz, dest_taz, trip_mode, timeperiod_label, incQ, incQ_label, num_trips, avg_travel_time_in_mins\n ODTravelTime_byModeTimeperiod_file = os.path.join(NGFS_SCENARIOS, tm_run_id, \"OUTPUT\", \"core_summaries\", ODTRAVELTIME_FILENAME) #changed \"ODTravelTime_byModeTimeperiodIncome.csv\" to a variable for better performance during debugging\n # this is large so join/subset it immediately\n trips_od_travel_time_df = pd.read_csv(ODTravelTime_byModeTimeperiod_file)\n LOGGER.info(\" Read {:,} rows from {}\".format(len(trips_od_travel_time_df), ODTravelTime_byModeTimeperiod_file))\n\n trips_od_travel_time_df = trips_od_travel_time_df.loc[ trips_od_travel_time_df.timeperiod_label == 'AM Peak' ]\n LOGGER.info(\" Filtered to AM only: {:,} rows\".format(len(trips_od_travel_time_df)))\n\n # pivot out the income since we don't need it\n trips_od_travel_time_df = pd.pivot_table(trips_od_travel_time_df,\n index=['orig_taz','dest_taz','trip_mode'],\n values=['num_trips','avg_travel_time_in_mins'],\n aggfunc={'num_trips':numpy.sum, 'avg_travel_time_in_mins':numpy.mean})\n trips_od_travel_time_df.reset_index(inplace=True)\n LOGGER.info(\" Aggregated income groups: {:,} rows\".format(len(trips_od_travel_time_df)))\n\n # join to OD cities for origin\n origin_column_name = \"ORIGIN_\" + origin_city_abbreviation\n trips_od_travel_time_df = pd.merge(left=trips_od_travel_time_df,\n right=NGFS_OD_ORIGINS_DF,\n left_on=\"orig_taz\",\n right_on=\"taz1454\")\n trips_od_travel_time_df.rename(columns={origin_column_name:\"orig_ZONE\"}, inplace=True)\n trips_od_travel_time_df.drop(columns=[\"taz1454\"], inplace=True)\n # join to OD cities for destination\n trips_od_travel_time_df = pd.merge(left=trips_od_travel_time_df,\n right=NGFS_OD_CORDONS_DF,\n left_on=\"dest_taz\",\n right_on=\"taz1454\")\n trips_od_travel_time_df.rename(columns={\"CORDON\":\"dest_CORDON\"}, inplace=True)\n trips_od_travel_time_df.drop(columns=[\"taz1454\"], inplace=True)\n LOGGER.info(\" Joined with {} for origin, destination: {:,} rows\".format(NGFS_OD_CITIES_FILE, len(trips_od_travel_time_df)))\n LOGGER.debug(\"trips_od_travel_time_df.head():\\n{}\".format(trips_od_travel_time_df.head()))\n\n # filter again to only those of interest\n trips_od_travel_time_df = pd.merge(left=trips_od_travel_time_df,\n right=NGFS_OD_CORDONS_OF_INTEREST_DF,\n indicator=True)\n trips_od_travel_time_df = trips_od_travel_time_df.loc[ trips_od_travel_time_df._merge == 'both']\n LOGGER.info(\" Filtered to only NGFS_OD_CORDONS_OF_INTEREST: {:,} rows\".format(len(trips_od_travel_time_df)))\n\n # we're going to aggregate trip modes; auto includes TAXI and TNC\n trips_od_travel_time_df['agg_trip_mode'] = \"N/A\"\n trips_od_travel_time_df.loc[ trips_od_travel_time_df.trip_mode.isin(MODES_PRIVATE_AUTO), 'agg_trip_mode' ] = \"auto\"\n trips_od_travel_time_df.loc[ trips_od_travel_time_df.trip_mode.isin(MODES_TAXI_TNC), 'agg_trip_mode' ] = \"auto\"\n\n # to get weighted average, transform to total travel time\n trips_od_travel_time_df['tot_travel_time_in_mins'] = \\\n trips_od_travel_time_df['avg_travel_time_in_mins']*trips_od_travel_time_df['num_trips']\n\n # pivot down to orig_ZONE x dest_CORDON x agg_trip_mode\n trips_od_travel_time_df = pd.pivot_table(trips_od_travel_time_df, \n index=['orig_ZONE','dest_CORDON','agg_trip_mode'],\n values=['num_trips','tot_travel_time_in_mins'],\n aggfunc={'num_trips':numpy.sum, 'tot_travel_time_in_mins':numpy.sum})\n trips_od_travel_time_df.reset_index(inplace=True)\n trips_od_travel_time_df['avg_travel_time_in_mins'] = \\\n trips_od_travel_time_df['tot_travel_time_in_mins']/trips_od_travel_time_df['num_trips']\n LOGGER.debug(trips_od_travel_time_df)\n\n # pivot again to move agg_mode to column\n # columns will now be: orig_ZONE_, dest_CORDON_, avg_travel_time_in_mins_auto, avg_travel_time_in_mins_transit, num_trips_auto, num_trips_transit\n trips_od_travel_time_df = pd.pivot_table(trips_od_travel_time_df, \n index=['orig_ZONE','dest_CORDON'],\n columns=['agg_trip_mode'],\n values=['num_trips','avg_travel_time_in_mins'])\n trips_od_travel_time_df.reset_index(inplace=True)\n # flatten resulting MultiIndex column names\n # rename from ('orig_ZONE',''), ('dest_CORDON',''), ('avg_travel_time_in_mins','auto'), ('avg_travel_time_in_mins', 'transit'), ...\n # to orig_ZONE, dest_CORDON, avg_travel_time_in_mins_auto, avg_travel_time_in_mins_transit, ...\n trips_od_travel_time_df.columns = ['_'.join(col) if len(col[1]) > 0 else col[0] for col in trips_od_travel_time_df.columns.values]\n\n # convert to metrics dataframe by pivoting one last time to just columns orig_ZONE, dest_CORDON\n trips_od_travel_time_df = pd.melt(trips_od_travel_time_df, \n id_vars=['orig_ZONE','dest_CORDON'], \n var_name='metric_desc',\n value_name='value')\n # travel times and num trips are extra\n trips_od_travel_time_df['intermediate/final'] = 'intermediate'\n \n # key is orig_ZONE, dest_CORDON\n trips_od_travel_time_df['key'] = trips_od_travel_time_df['orig_ZONE'] + \"_into_\" + trips_od_travel_time_df['dest_CORDON']\n trips_od_travel_time_df.drop(columns=['orig_ZONE','dest_CORDON'], inplace=True)\n\n trips_od_travel_time_df['modelrun_id'] = tm_run_id\n trips_od_travel_time_df['year'] = tm_run_id[:4]\n trips_od_travel_time_df['metric_id'] = METRIC_ID\n\n LOGGER.info(trips_od_travel_time_df)\n\n for OD in trips_od_travel_time_df['key']:\n # add travel times to metric dict\n OD_cordon_travel_time_df = trips_od_travel_time_df.loc[trips_od_travel_time_df['key'] == OD]\n LOGGER.info(OD_cordon_travel_time_df)\n OD_cordon_travel_time = OD_cordon_travel_time_df.loc[OD_cordon_travel_time_df['metric_desc'] == 'avg_travel_time_in_mins_auto'].iloc[0]['value']\n LOGGER.info(OD_cordon_travel_time)\n LOGGER.info(type(OD_cordon_travel_time))\n metrics_dict[OD + '_AM', 'Travel Time', grouping3, tm_run_id,METRIC_ID,'extra','By Corridor','travel_time_%s' % OD + '_AM',year] = OD_cordon_travel_time",
"def nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix=tcr_nb_distance_matrix, dist_weight=3, gap_penalty=4, ntrim=3, ctrim=2, fixed_gappos=True):\n\n return _nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix, dist_weight, gap_penalty, ntrim, ctrim, fixed_gappos)",
"def compute_weights_from_drift_along_arcs(self):\n arc_drifts = np.empty((len(self.old_stringpath) - 1,))\n for start_point in range(len(self.old_stringpath) - 1):\n end_point = start_point + 1\n # if self.fixed_endpoints and start_point == 0):\n # path[start_point, :] = self.old_stringpath[start_point]\n transition_forward = self.swarmProcessor.compute_average_transition_metric(start_point, end_point)\n transition_backward = self.swarmProcessor.compute_average_transition_metric(end_point, start_point)\n arc_drifts[start_point] = max(arcweight_epsilon, transition_forward, transition_backward)\n return arc_drifts",
"def tnc(self):\n\n if os.path.isfile(self.scenario_path + \"/output/TNCTrips.csv\"):\n\n # load the output folder tnc trip list\n trips = pd.read_csv(self.scenario_path + \"/output/TNCTrips.csv\",\n usecols=[\"trip_ID\", # unique trip surrogate key\n \"originTaz\", # trip origin TAZ\n \"destinationTaz\", # trip destination TAZ\n \"totalPassengers\"]) # passengers in vehicle excluding driver (0-6)\n\n # append distance and time skims\n # using am peak period hov-2 low value of time\n am_skims = om.open_file(self.scenario_path + \"/output/traffic_skims_AM.omx\")\n\n trips[\"distanceTotal\"] = [\n am_skims[\"AM_HOV2_L_DIST\"][o - 1, d - 1]\n for o, d in zip(trips[\"originTaz\"], trips[\"destinationTaz\"])\n ]\n\n trips[\"timeTotal\"] = [\n am_skims[\"AM_HOV2_L_TIME\"][o - 1, d - 1]\n for o, d in zip(trips[\"originTaz\"], trips[\"destinationTaz\"])\n ]\n\n am_skims.close()\n\n # create person and trip-based weights based on occupancy\n trips[\"passengers\"] = trips[\"totalPassengers\"]\n trips[\"weightPersonTrip\"] = (trips[\"totalPassengers\"] + 1) * 1 / self.sample_rate\n trips[\"weightTrip\"] = 1 * 1 / self.sample_rate\n\n return trips[[\"trip_ID\",\n \"passengers\",\n \"distanceTotal\",\n \"timeTotal\",\n \"weightPersonTrip\",\n \"weightTrip\"]]\n\n else:\n return False",
"def distanceTo(\n self,\n trgtstn=None,\n instofst=None,\n trgtofst=None,\n refcoef=None,\n ddxyz=False,\n offsettype=None,\n ):\n diff = self.vectorTo(trgtstn, instofst, trgtofst, offsettype=offsettype)\n dist = np.sqrt(np.vdot(diff, diff))\n if not ddxyz:\n return dist\n diff /= dist\n return dist, -diff, diff",
"def distance_path(self, src, dst, middle_nodes):\n\n distance = 0.0\n last_node = src\n for curr_node in middle_nodes:\n distance += self.weight(last_node, curr_node)\n last_node = curr_node\n distance += self.weight(last_node, dst)\n return distance",
"def calculate_distance_between_phi_w_and_input_distances(self, input_dataset):\n distance = cdist(\n input_dataset,\n self.phi_of_map_rbf_grids.dot(self.W)\n + np.ones((np.prod(self.shape_of_map), 1)).dot(\n np.reshape(self.bias, (1, len(self.bias)))\n ),\n 'sqeuclidean')\n return distance",
"def distance(self, t1, t2, costs=unit_costs):\r\n #print costs\r\n #raw_input(\"pause\")\r\n # Cf. Zhang & Shasha:p.1252-1253\r\n #===========================================================================\r\n # Use an embedded function, so T1,T2, l1,l2, and TD are available from the\r\n # name space of the outer function and don't need to be dragged around in\r\n # each function call\r\n # TREEDIST function\r\n #===========================================================================\r\n def edit_dist(i, j):\r\n \"\"\"\r\n compute edit distance between two subtrees rooted in nodes i and j\r\n respectively\r\n \"\"\"\r\n # temporary array for forest distances\r\n FD = ForestDist()\r\n for n in range(l1[i], i+1):\r\n FD[ (l1[i],n), None ] = ( FD[ (l1[i],n-1), None ] + \r\n costs(T1[n], None) ) #NOT SURE ABOUT THE T1[n].label --> TO BE CHECKED\r\n \r\n for m in range(l2[j], j+1):\r\n FD[ None, (l2[j],m) ] = ( FD[ None, (l2[j],m-1) ] + \r\n costs(None, T2[m]) )\r\n \r\n for n in range(l1[i], i+1):\r\n for m in range(l2[j], j+1):\r\n if l1[n] == l1[i] and l2[m] == l2[j]:\r\n FD[ (l1[i],n), (l2[j],m) ] = min(\r\n FD[(l1[i],n-1),(l2[j],m)] + costs(T1[n], None),\r\n FD[(l1[i],n),(l2[j],m-1)] + costs(None, T2[m]),\r\n FD[(l1[i],n-1),(l2[j],m-1)] + costs(T1[n], T2[m]))\r\n \r\n TD[n, m] = FD[ (l1[i],n), (l2[j],m) ]\r\n else:\r\n FD[ (l1[i],n), (l2[j],m) ] = min(\r\n FD[(l1[i],n-1),(l2[j],m)] + costs(T1[n], None),\r\n FD[(l1[i],n),(l2[j],m-1)] + costs(None, T2[m]),\r\n FD[(l1[i],n-1),(l2[j],m-1)] + TD[n,m])\r\n return TD[i,j]\r\n \r\n \r\n #Compute T1[] and T2[]\r\n T1 = self.postorder(t1)\r\n T2 = self.postorder(t2)\r\n \r\n # Compute l()\r\n l1 = self.leftmost_leaf_descendant_indices(T1)\r\n l2 = self.leftmost_leaf_descendant_indices(T2)\r\n \r\n # LR_keyroots1 and LR_keyroots2\r\n kr1 = self.key_root_indices(l1)\r\n kr2 = self.key_root_indices(l2)\r\n \r\n # permanent treedist array\r\n TD = dict()\r\n for i in kr1:\r\n for j in kr2:\r\n edit_dist(i, j)\r\n \r\n #self.print_matrix(T1, T2, TD)\r\n \r\n return TD[i,j]",
"def rccf(tab, tab1, tab2, par, nthreads=-1, write=True, plot=False, **kwargs):\n lj = 27 # nr of characters for left justification of some status msgs\n \n # Initialize logs, check if par has the right kind, etc. Common to all CF functions\n t0 = time.time()\n (par,log,logf,logff,runspyder,t0) = initialize('rccf', par, nthreads=nthreads, write=write, plot=plot)\n\n # Find number of particles in input tables and set nr of threads ---------\n npt, npt1, npt2 = len(tab), len(tab1), len(tab2)\n nt = set_threads(nthreads)\n \n # Log calling information ------------------------------------------------\n logcallinfo(log, par, npts=[npt,npt1,npt2])\n \n # Create bins in redshift space -------------------------------------------\n seps,sepsout = makebins(par.nseps,par.sepsmin,par.dseps,par.logseps)\n\n # Unpack column names in params just for shorter writing ------\n cra, cdec, cred, cwei = par.cra, par.cdec, par.cred, par.cwei\n cra1, cdec1, cred1, cwei1 = par.cra1, par.cdec1, par.cred1, par.cwei1\n cra2, cdec2, cred2, cwei2 = par.cra2, par.cdec2, par.cred2, par.cwei2\n\n # Get comoving distances --------------------------------------------------\n if par.calcdist:\n tstart = time.time()\n dc = comdis(tab[cred].data, par, nt)\n tend = time.time()\n log.info('Comov_dist_tab compute time (s)'.ljust(lj)+' : {:0.3f}'.format(tend-tstart))\n tstart = time.time()\n dc1 = comdis(tab1[cred1].data, par, nt)\n tend = time.time()\n log.info('Comov_dist_tab1 compute time (s)'.ljust(lj)+' : {:0.3f}'.format(tend-tstart))\n tstart = time.time()\n dc2 = comdis(tab2[cred2].data, par, nt)\n tend = time.time()\n log.info('Comov_dist_tab2 compute time (s)'.ljust(lj)+' : {:0.3f}'.format(tend-tstart))\n else:\n log.info('Using input comov. distances')\n dc = tab[par.cdcom].data\n dc1 = tab1[par.cdcom1].data\n dc2 = tab2[par.cdcom2].data\n \n # Write out the boundary of the survey ------------------------------------\n par.sbound = bound3d([tab[cdec].data,tab1[cdec1].data,tab2[cdec2].data],[dc,dc1,dc2])\n log.info('Sample boundaries : ('+('{:0.5f}, '*6).format(*par.sbound)[:-2]+')')\n\n # Guess if the sample cross the 360-0 deg division\n cross0 = cross0guess(tab[cra].data)\n log.info('Sample seems to cross RA=0 : ' + str(cross0) )\n if cross0 is True : log.info('Custom RA boundaries : ' + str(par.custRAbound))\n\n # Adequate pars to CD and CR counts --------------------------------------\n par_cd = deepcopy(par)\n par_cd.kind = 'sC'\n par_cd.cntid = 'CD'\n par_cr = deepcopy(par)\n par_cr.kind = 'sC'\n par_cr.cntid = 'CR'\n par_cr.wfib = False # don't do fiber corrections in crounts counts ? \n par_cr.doboot = False # don't do bootstraping in cross counts ?\n \n # If requested, try to find the best skip grid size ----------------------\n if par.autogrid:\n log.info('SK Autogrid ON')\n # We choose to use a single SK grid for all tables, instead of two different\n # for cd and cr counts. By feeding bestSKgrid3d() with the combination of\n # the 2 largest samples among C,D,R, we get a very good (h1,h2,h3) set.\n # Most likely, its dominated by the random sample R when npt1 is large\n ltn = [npt,npt1,npt2]\n mxposA = np.argmax(ltn)\n ltn[mxposA] = -1\n mxposB = np.argmax(ltn)\n ltn = [npt, npt1, npt2]\n ltras = [tab[cra].data, tab1[cra1].data, tab2[cra2].data]\n argn = [ltn[i] for i in [mxposA,mxposB]]\n argras = [ltras[i] for i in [mxposA,mxposB]]\n par.mxh1, par.mxh2, par.mxh3, tdens = bestSKgrid3d(par_cr, argn, argras, dens=par.dens)\n par_cd.mxh1, par_cd.mxh2, par_cd.mxh3 = par.mxh1, par.mxh2, par.mxh3\n par_cr.mxh1, par_cr.mxh2, par_cr.mxh3 = par.mxh1, par.mxh2, par.mxh3\n log.info('SK cell target density'.ljust(lj)+' : {:0.3f}'.format(tdens))\n else:\n log.info('Autogrid OFF')\n log.info('SK grid size [dec,ra,dcom]'.ljust(lj)+' : ' + str([par.mxh1,par.mxh2,par.mxh3]))\n\n # Sort table/s according to some order -----------------------------------\n tstart = time.time()\n sidx = pixsort(tab,[cra, cdec, cred], par) #par or par_cd, par_cr ??? XXXXX\n tab, dc = tab[sidx], dc[sidx]\n sidx1 = pixsort(tab1,[cra1, cdec1, cred1], par)\n tab1, dc1 = tab1[sidx1], dc1[sidx1]\n sidx2 = pixsort(tab2,[cra2, cdec2, cred2], par)\n tab2, dc2 = tab2[sidx2], dc2[sidx2]\n tend = time.time()\n log.info('Pixsort time (s)'.ljust(lj)+' : {:0.3f}'.format(tend-tstart))\n \n # Create SK and LL tables -----------------------------------------------\n tstart = time.time()\n sk,ll = cff.mod.skll3d(par.mxh1,par.mxh2,par.mxh3,npt,tab[cra],tab[cdec],dc,par.sbound,seps,par.nseps)\n sk1,ll1 = cff.mod.skll3d(par.mxh1,par.mxh2,par.mxh3,npt1,tab1[cra1],tab1[cdec1],dc1,par.sbound,seps,par.nseps)\n tend = time.time()\n log.info('SK-LL tables build time (s)'.ljust(lj)+' : {:0.3f}'.format(tend-tstart))\n\n # Convert ra,dec,z to spherical coords ------------\n x, y, z = radec2xyz(tab[cra].data*np.pi/180., tab[cdec].data*np.pi/180.)\n x1, y1, z1 = radec2xyz(tab1[cra1].data*np.pi/180., tab1[cdec1].data*np.pi/180.)\n x2, y2, z2 = radec2xyz(tab2[cra2].data*np.pi/180., tab2[cdec2].data*np.pi/180.)\n\n # Find out if all weights are 1.0 to later call slighly faster functions\n wunit = (tab[cwei].data==1).all()\n wunit1 = (tab1[cwei1].data==1).all()\n wunit2 = (tab2[cwei2].data==1).all()\n wunit_cd = wunit2 and wunit\n wunit_cr = wunit2 and wunit1\n\n\n #==========================================================================\n #========================== COUNT PAIRS ===============================\n log.info('==== Counting ' + par_cd.cntid + ' pairs in ' + np.str(par_cd.mxh1) + ' DEC strips =====')\n if runspyder : log.info(' [for progress updates check ' + logff + ']')\n tstart = time.time()\n tt1 = pairs_cross(par_cd, wunit_cd, logff, tab2, x2, y2, z2, \n tab, x, y, z, sk, ll, dc=dc2, dc1=dc)\n tend = time.time()\n logtimming(log,par_cd.cntid,tend-tstart)\n tacc = tend-tstart\n\n log.info('==== Counting ' + par_cr.cntid + ' pairs in ' + np.str(par_cr.mxh1) + ' DEC strips =====')\n if runspyder : log.info(' [for progress updates check ' + logff + ']')\n tstart = time.time()\n tt2 = pairs_cross(par_cr, wunit_cr, logff, tab2, x2, y2, z2, \n tab1, x1, y1, z1, sk1, ll1, dc=dc2, dc1=dc1)\n tend = time.time()\n logtimming(log,par_cd.cntid,tend-tstart)\n tacc = tacc + (tend-tstart)\n #========================= END PAIR COUNTS ==============================\n #==========================================================================\n# tt1 = fcall_sC_serial(npt2, tab2[cra2].data, tab2[cdec2].data, dc2, tab2[cwei2].data, x2, y2, z2, \n# npt, tab[cra], tab[cdec].data, dc, tab[cwei].data, x, y, z, \n# seps, sk, ll, allw_cd, par_cd)\n#\n# tt2 = fcall_sC_serial(npt2, tab2[cra2].data, tab2[cdec2].data, dc2, tab2[cwei2].data, x2, y2, z2, \n# npt1, tab1[cra1], tab1[cdec1].data, dc1, tab1[cwei1].data, x1, y1, z1, \n# seps, sk1, ll1, allw_cr, par_cr)\n\n # Tidy ouput counts ------------------------------------------------------\n cd,bcd = tidy_counts(tt1,par_cd)\n cr,dum = tidy_counts(tt2,par_cr)\n\n # Compute projected correlation function estimate ------------------------\n (xis, xiserr) = tpccf(npt, npt1, cd, bcd, cr, estimator=par.estimator)\n\n # Do plot if desired -----------------------------------------------------\n if plot:\n try:\n #plt.figure(1)\n plt.figure('RCCF plot 1')\n plotcf(sepsout[1], xis, xiserr, fac=1., write=write, par=par, **kwargs)\n except ValueError:\n print('Warning: there is a problem with the plot !!!') \n\n # Build ouput ------------------------------------------------------------\n counts = buildoutput(par, npts=[npt,npt1,npt2], binslmr=sepsout, cd=cd, cr=cr,\n bootc=bcd, cf=xis, cferr=xiserr)\n\n # Finalize ---------------------------------------------------------------\n finalize(log,logf,logff,tacc,t0,counts)\n \n # Write ascii counts, correlations and parameters ------------------------\n if write:\n writeasc_cf(*sepsout, xis, xiserr, par)\n writeasc_counts(*sepsout, cd, par,cntid='cd')\n writeasc_counts(*sepsout, cr, par,cntid='cr')\n savepars(par)\n savecounts(counts)\n\n # Close log --------------------------------------------------------------\n closelog(log,runspyder=runspyder)\n \n return counts",
"def get_distances(self, crds):\n self.all_dist = np.zeros((self.natom, self.natom))\n # Loop over upper triangle of atom pairs\n for iat in range(self.natom-1):\n # Get the atom indices\n at_inds = np.arange(len(crds))\n\n # Calc distances between atoms (only upper triangle though)\n at_msk = at_inds > iat\n all_ut_dist = crds[at_msk] - crds[iat]\n all_ut_dist = np.linalg.norm(all_ut_dist, axis=1)\n\n self.all_dist[iat, iat+1:] = all_ut_dist\n\n # Get lower triangle indices\n self.all_dist = self.all_dist + self.all_dist.T",
"def pairwiseNucleosomeDistance(self):\n try :\n if (self.center_r == None):\n self.centerBeads() \n except:\n pass # centerbeads has already been run\n nucLocs = np.asarray(np.linspace(0,self.n_beads-1,self.n_beads)[self.wrapped>0],dtype='int')\n self.pair_dist = scipy.spatial.distance.pdist(self.center_r[nucLocs,:])",
"def _cluster_distances(traj, atom_selection):\n\n\tatom_pairs = list(combinations(atom_selection, 2))\n\tpairwise_distances = mdt.compute_distances(traj=traj, atom_pairs=atom_pairs)\n\n\treturn pairwise_distances",
"def tcode_spacing_between_codingblocks(self,others):\n\n if type(others) not in (type([]),type(())): others = [ others ]\n distdict = {}\n target = self._get_target_organism()\n maxsr = dict([ (self.organism_by_node(n), r) for n,r in self.maximal_spanning_range().iteritems() ])\n orfObjs = self.get_orfs_of_graph()\n\n for other in others:\n _maxsr = dict([ (other.organism_by_node(n), r) for n,r in other.maximal_spanning_range().iteritems() ])\n for organism,setrange in _maxsr.iteritems():\n if distdict.has_key(organism): continue\n if not maxsr.has_key(organism): continue\n # calculate distance\n if not setrange.symmetric_difference(maxsr[organism]):\n # identical PacbPORFS\n distdict[organism] = None\n else:\n prev_dna_end = max(maxsr[organism])*3\n next_dna_sta = min(setrange)*3\n if next_dna_sta <= prev_dna_end:\n # no distance left to calculate TCODE score for\n distdict[organism] = None\n else:\n orfObj = orfObjs[organism][0]\n tcode = orfObj.find_lowest_scoring_tcode_stretch(\n orfObj._RAW_TCODE_DATA,\n prev_dna_end,next_dna_sta)\n distdict[organism] = tcode\n\n\n # remove None values in distdict\n while None in distdict.values():\n for k,v in distdict.iteritems():\n if v == None:\n del( distdict[k] )\n break\n # return TCODE distance dictionary\n return distdict",
"def prep_distance(self, t: str = 'float') -> np.ndarray:\n d = np.zeros([self.ic.shape[0]*self.ic.shape[1],\n self.ic.shape[1]*self.ic.shape[0]])\n\n u,v = np.meshgrid(np.arange(self.ic.shape[0]),\n np.arange(self.ic.shape[1]),\n sparse=False, indexing='xy')\n u = u.ravel()\n v = v.ravel()\n z = np.array([u,v]).T\n\n for (k,x) in enumerate(z):\n if not self.boundary:\n d[k,:] = np.array(np.sqrt((u - x[0])**2 + (v - x[1])**2),dtype=t)\n\n else:\n d[k,:] = self.torus(x[0],x[1],\n self.ic.shape[0],\n self.ic.shape[1]\n ).ravel()\n\n return d"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computes the tcrdist distance for sequences in seqs_mat indicated by pairs of indices. | def nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix=tcr_nb_distance_matrix, dist_weight=3, gap_penalty=4, ntrim=3, ctrim=2, fixed_gappos=True):
return _nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix, dist_weight, gap_penalty, ntrim, ctrim, fixed_gappos) | [
"def calc_dist_mat(\n self, seqs: Sequence[str], seqs2: Optional[Sequence[str]] = None\n ) -> coo_matrix:\n pass",
"def distances(s, t_subs, ord=ord):\n return (np.linalg.norm((s - t_subs), ord=ord, axis=1)) / s.shape[0]",
"def compute_trans_dist(lmtrans):\n\n dmatrix = np.zeros((len(lmtrans), len(lmtrans)))\n for i in range(len(lmtrans)):\n for j in range(len(lmtrans)):\n dmatrix[i, j] = dfuns[dist](lmtrans[i], lmtrans[j])\n return dmatrix",
"def distance_matrix(sequences):\n\n if len(sequences) == 0:\n print(\"FATAL: No sequences found\")\n sys.exit(-1)\n else:\n print(\"Found %d sequences\" % len(sequences))\n \n print(\"Creating distance matrix start.\")\n dmx = PairwiseSimilarity(sequences)\n print(\"Distance matrix complete.\")\n return dmx",
"def PairwiseSimilarity(sequences):\n\n seq = sequences\n N = len(seq)\n\n # Define distance matrix\n d_matrix = zeros((N, N), float64)\n\n for i in range(len(seq)):\n for j in range(len(seq)):\n d_matrix[i][j] = -1\n\n # Similarity matrix\n s_matrix = zeros((N, N), float64)\n\n for i in range(N):\n for j in range(N):\n s_matrix[i][j] = -1\n\n # Find pairs\n for i in range(N):\n for j in range(N):\n\n if s_matrix[i][j] >= 0:\n continue\n\n seq1 = seq[i][1]\n seq2 = seq[j][1]\n minlen = min(len(seq1), len(seq2))\n \n len1 = len2 = sims = 0\n for x in range(minlen):\n if seq1[x] != 256:\n len1 += 1.0\n\n if seq1[x] == seq2[x]:\n sims += 1.0\n\n if seq2[x] != 256:\n len2 += 1.0\n\n maxlen = max(len1, len2)\n s_matrix[i][j] = sims / maxlen\n\n # Get distance matrix\n for i in range(N):\n for j in range(N):\n d_matrix[i][j] = s_matrix[i][i] - s_matrix[i][j]\n \n return d_matrix",
"def nb_vector_editdistance(indices, seqs_mat, seqs_L, distance_matrix=identity_nb_distance_matrix, gap_penalty=1):\n #print(indices.shape)\n #print(seqs_mat.shape)\n #print(seqs_L.shape)\n return _nb_vector_editdistance(indices, seqs_mat, seqs_L, distance_matrix, gap_penalty)",
"def _cluster_distances(traj, atom_selection):\n\n\tatom_pairs = list(combinations(atom_selection, 2))\n\tpairwise_distances = mdt.compute_distances(traj=traj, atom_pairs=atom_pairs)\n\n\treturn pairwise_distances",
"def _pairwise_dist(self,seq1,seq2):\n \n return jf.damerau_levenshtein_distance(str(seq1), str(seq2))",
"def ComputeDistMatrix(dict_alignedSequences):\r\n \r\n # check if dictionary with keys as tuples containing integers and values as tuples containing strings\r\n check = True \r\n #1 Check Input is dict\r\n if isinstance(dict_alignedSequences, dict) == False:\r\n check = False\r\n \r\n #2 Check are the keys and values tuples. Do the keys only contain integers and the vlaues only strings\r\n i = 0\r\n while len(dict_alignedSequences) > i:\r\n #checking for keys and values as tuples\r\n if isinstance(list(dict_alignedSequences.keys())[i], tuple) == False or isinstance(list(dict_alignedSequences.values())[i], tuple) == False:\r\n check = False\r\n break\r\n #checking keys for integers\r\n if isinstance(list(dict_alignedSequences.keys())[i][0], int) == False or isinstance(list(dict_alignedSequences.keys())[i][1], int) == False:\r\n check = False\r\n break\r\n #checking values for strings\r\n if isinstance(list(dict_alignedSequences.values())[i][0], str) == False or isinstance(list(dict_alignedSequences.values())[i][1], str) == False:\r\n check = False\r\n break\r\n \r\n #increment the counter for while loop\r\n i += 1\r\n \r\n #3 Check sequences contain aligned DNA and are of equal length\r\n for key in dict_alignedSequences:\r\n if is_aligned_dna(dict_alignedSequences[key][0]) == False or is_aligned_dna(dict_alignedSequences[key][1]) == False:\r\n check = False\r\n break\r\n if len(dict_alignedSequences[key][0]) != len(dict_alignedSequences[key][1]):\r\n check = False\r\n break\r\n \r\n #final evalauation if data is usable\r\n if check == False:\r\n raise TypeError ('malformed input')\r\n \r\n #get number of sequences\r\n matrixdim = howmany_sequences(dict_alignedSequences)\r\n #initialize dist matrix\r\n distMatrix = init_Dist_Matrix(matrixdim)\r\n \r\n \r\n for i in dict_alignedSequences.keys():\r\n # useing the key i to get the corisponding aligned sequences \r\n seq = dict_alignedSequences[i]\r\n #calculate distances between the sequences\r\n distance = calculate_distance(seq[0],seq[1])\r\n #markdown result at the corrsiponding place in the distmatrix\r\n distMatrix[i[0]][i[1]] = distance\r\n distMatrix[i[1]][i[0]] = distance\r\n \r\n return(distMatrix)",
"def cal_distances(embeddings):\n # calculate\n dist = np.zeros([len(embeddings), len(embeddings)], dtype=float)\n for ii in xrange(len(embeddings)):\n for jj in xrange(ii + 1, len(embeddings)):\n dist[ii, jj] = np.linalg.norm(embeddings[ii] - embeddings[jj])\n dist[jj, ii] = dist[ii, jj] \n \n # return\n return dist",
"def build_distance_matrix(artists, artist_index):\n from util.io_helper import unpickle_object\n from rnn import ArtistLSTM\n\n n = len(artists)\n net = unpickle_object('rnn.pickle')\n p = [np.array(x.detach()) for x in net.out.weight]\n EDM = np.zeros((n, n))\n for i in range(n):\n for j in range(n):\n if i != j:\n a1, a2 = artist_index[artists[i]], artist_index[artists[j]]\n dist = np.linalg.norm(p[a1] - p[a2])\n EDM[i][j] = dist\n else:\n EDM[i][j] = float('inf')\n return EDM",
"def distance_matrix(sequences, substitution_mat):\n distance_mat = numpy.empty((len(sequences), len(sequences)), dtype='float')\n\n print(\"Building distance matrix\")\n # Get similarity score\n for i, seqA in enumerate(sequences):\n sys.stdout.write(\"\\r%.f%%\" % (float(i+1)/len(sequences)*100))\n sys.stdout.flush()\n for j, seqB in enumerate(sequences[i:], start=i):\n score = substitution_score(substitution_mat, seqA, seqB)\n distance_mat[i, j] = score\n distance_mat[j, i] = score\n print(\"\")\n # Set equal the diagonal\n diag_mini = numpy.min(distance_mat.diagonal())\n for i in range(len(sequences)):\n distance_mat[i, i] = diag_mini\n # Convert similarity score into a distance\n mini = numpy.min(distance_mat)\n maxi = numpy.max(distance_mat)\n return 1 - (distance_mat + abs(mini))/(maxi - mini)",
"def get_distances(self, crds):\n self.all_dist = np.zeros((self.natom, self.natom))\n # Loop over upper triangle of atom pairs\n for iat in range(self.natom-1):\n # Get the atom indices\n at_inds = np.arange(len(crds))\n\n # Calc distances between atoms (only upper triangle though)\n at_msk = at_inds > iat\n all_ut_dist = crds[at_msk] - crds[iat]\n all_ut_dist = np.linalg.norm(all_ut_dist, axis=1)\n\n self.all_dist[iat, iat+1:] = all_ut_dist\n\n # Get lower triangle indices\n self.all_dist = self.all_dist + self.all_dist.T",
"def calc_CA_dist_matrix(chain, idx_subset):\n idx_subset = set(idx_subset)\n residue_coords = [residue[\"CA\"].coord for i, residue in enumerate(chain.get_residues()) if i in idx_subset]\n\n return squareform(pdist(residue_coords))",
"def matrix_distance(pattern, dna_strings):\n\n k = len(pattern)\n distance = 0\n\n for dna_string in dna_strings:\n\n found_hamming_distance = len(dna_string) ** len(dna_strings) # Initialize a maximum\n\n for i in range(len(dna_string) - k):\n\n dna_kmer = dna_string[i: i + k]\n hd = hamming_distance(dna_kmer, pattern)\n\n if found_hamming_distance > hd:\n found_hamming_distance = hd\n\n distance += found_hamming_distance\n\n return distance",
"def calc_distances(self, templates_features=None, batch_size=50000, th=0.2, beta=1.1):\n if templates_features is None:\n templates_features = self.calc_templates()\n distances = np.empty((self.pairs.shape[0]), dtype=np.float32)\n start, end = 0, 0\n for batch in self.batches(self.pairs, batch_size):\n t1 = np.empty((len(batch), self.features_dim), dtype=np.float32)\n t2 = np.empty((len(batch), self.features_dim), dtype=np.float32)\n start = end\n end += len(batch)\n # attenuate = np.empty((len(batch)), dtype=np.bool)\n for i, pair in enumerate(batch):\n t1[i] = templates_features[pair[0]]\n t2[i] = templates_features[pair[1]]\n # lomax1 = np.max(self.quality_scores[pair[0]])\n # lomax2 = np.max(self.quality_scores[pair[1]])\n # attenuate[i] = lomax1 <= th or lomax2 <= th\n\n ## find cosine distance, assume template descriptors are normalized\n distances[start:end] = 1 - np.einsum(\"ij,ij->i\", t1, t2)\n # distances[start:end] = np.where(attenuate, distances[start:end], distances[start:end] / beta)\n return distances",
"def create_dist_matrix(self):\n dm = np.zeros((self.size, self.size))\n for i in range(0, self.size):\n for j in range(i+1, self.size):\n align = pairwise2.align.globalxx(self.records[i].seq, self.records[j].seq, one_alignment_only=1)\n dm[i, j] = 1-align[0][2]/len(align[0][0])\n print(time.process_time())\n dm = dm + dm.T\n return dm",
"def subject_distances(subjects):\n sim = cosine_similarity(subjects)\n return (1.0 - ((sim + 1.0) / 2.0)) ** 2",
"def _seq_to_cell_idx(\n unique_seqs: np.ndarray, cdr_seqs: np.ndarray\n ) -> Dict[int, List[int]]:\n # 1) reverse mapping of amino acid sequence to index in sequence-distance matrix\n seq_to_index = {seq: i for i, seq in enumerate(unique_seqs)}\n\n # 2) indices of cells in adata that have a CDR3 sequence.\n cells_with_chain = np.where(~_is_na(cdr_seqs))[0]\n\n # 3) indices of the corresponding sequences in the distance matrix.\n seq_inds = {\n chain_id: seq_to_index[cdr_seqs[chain_id]] for chain_id in cells_with_chain\n }\n\n # 4) list of cell-indices in the cell distance matrix for each sequence\n seq_to_cell = {seq_id: list() for seq_id in seq_to_index.values()}\n for cell_id in cells_with_chain:\n seq_id = seq_inds[cell_id]\n seq_to_cell[seq_id].append(cell_id)\n\n return seq_to_cell"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computes the Levenshtein edit distance for sequences in seqs_mat indicated by pairs of indices. | def nb_vector_editdistance(indices, seqs_mat, seqs_L, distance_matrix=identity_nb_distance_matrix, gap_penalty=1):
#print(indices.shape)
#print(seqs_mat.shape)
#print(seqs_L.shape)
return _nb_vector_editdistance(indices, seqs_mat, seqs_L, distance_matrix, gap_penalty) | [
"def nb_editdistance(seq_vec1, seq_vec2, distance_matrix=identity_nb_distance_matrix, gap_penalty=1):\n \n q_L = seq_vec1.shape[0]\n s_L = seq_vec2.shape[0]\n if q_L == s_L:\n \"\"\"No gaps: substitution distance\n This will make it differ from a strict edit-distance since\n the optimal edit-distance may insert same number of gaps in both sequences\"\"\"\n dist = 0\n for i in range(q_L):\n dist += distance_matrix[seq_vec1[i], seq_vec2[i]]\n return dist\n\n ldmat = np.zeros((q_L, s_L), dtype=np.int16)\n for row in range(1, q_L):\n ldmat[row, 0] = row * gap_penalty\n\n for col in range(1, s_L):\n ldmat[0, col] = col * gap_penalty\n \n for col in range(1, s_L):\n for row in range(1, q_L):\n ldmat[row, col] = min(ldmat[row-1, col] + gap_penalty,\n ldmat[row, col-1] + gap_penalty,\n ldmat[row-1, col-1] + distance_matrix[seq_vec1[row-1], seq_vec2[col-1]]) # substitution\n return ldmat[row, col]",
"def damerau_levenshtein_distance(seq1, seq2):\r\n # codesnippet:D0DE4716-B6E6-4161-9219-2903BF8F547F\r\n # Conceptually, this is based on a len(seq1) + 1 * len(seq2) + 1 matrix.\r\n # However, only the current and two previous rows are needed at once,\r\n # so we only store those.\r\n oneago = None\r\n thisrow = list(range(1, len(seq2) + 1)) + [0]\r\n for x in range(len(seq1)):\r\n # Python lists wrap around for negative indices, so put the\r\n # leftmost column at the *end* of the list. This matches with\r\n # the zero-indexed strings and saves extra calculation.\r\n twoago, oneago, thisrow = (oneago, thisrow, [0] * len(seq2) + [x + 1])\r\n for y in range(len(seq2)):\r\n delcost = oneago[y] + 1\r\n addcost = thisrow[y - 1] + 1\r\n subcost = oneago[y - 1] + (seq1[x] != seq2[y])\r\n thisrow[y] = min(delcost, addcost, subcost)\r\n # This block deals with transpositions\r\n if (x > 0 and y > 0 and seq1[x] == seq2[y - 1]\r\n and seq1[x - 1] == seq2[y] and seq1[x] != seq2[y]):\r\n thisrow[y] = min(thisrow[y], twoago[y - 2] + 1)\r\n return thisrow[len(seq2) - 1]",
"def levenshtein_align(a, b, score):\n\n\t# reconstruct the sequence from by back-walking the matrix from the bottom right corner\n\ti = len(a)\n\tj = len(b)\n\n\t# initialize lists to track combined sequence and components\n\tseq = []\n\tseq_a = []\n\tseq_b = []\n\n\twhile i > 0 or j > 0:\n\n\t\tif i == 0:\n\t\t\t# add\n\t\t\tseq.append(b[j-1])\n\t\t\tseq_a.append(0)\n\t\t\tseq_b.append(1)\n\t\t\tj -= 1\n\t\t\tcontinue\n\t\tif j == 0:\n\t\t\t# subtract\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(0)\n\t\t\ti -= 1\n\t\t\tcontinue\n\n\t\tcur_val = score[i,j]\n\t\teq_val = score[i-1, j-1]\n\t\tsub_val = score[i-1, j]\n\t\tadd_val = score[i, j-1]\n\n\t\tif sub_val == cur_val - 1:\n\t\t\t# subtract\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(0)\n\t\t\ti -= 1\n\t\t\tcontinue\n\n\t\tif add_val == cur_val - 1:\n\t\t\t# add\n\t\t\tseq.append(b[j-1])\n\t\t\tseq_a.append(0)\n\t\t\tseq_b.append(1)\n\t\t\tj -= 1\n\t\t\tcontinue\n\n\t\tif eq_val == cur_val - 1 or eq_val == cur_val:\n\t\t\t# move up the diagonal\n\t\t\tseq.append(a[i-1])\n\t\t\tseq_a.append(1)\n\t\t\tseq_b.append(1)\n\t\t\ti -= 1\n\t\t\tj -= 1\n\t\t\tcontinue\n\n\t# reverse sequences\n\tseq.reverse()\n\tseq_a.reverse()\n\tseq_b.reverse()\n\n\treturn seq, seq_a, seq_b",
"def iterative_levenshtein(s, t):\n rows = len(s)+1\n cols = len(t)+1\n dist = [[0 for x in range(cols)] for x in range(rows)]\n # source prefixes can be transformed into empty strings \n # by deletions:\n for i in range(1, rows):\n dist[i][0] = i\n # target prefixes can be created from an empty source string\n # by inserting the characters\n for i in range(1, cols):\n dist[0][i] = i\n \n for col in range(1, cols):\n for row in range(1, rows):\n if s[row-1] == t[col-1]:\n cost = 0\n else:\n cost = 1\n dist[row][col] = min(dist[row-1][col] + 1, # deletion\n dist[row][col-1] + 1, # insertion\n dist[row-1][col-1] + cost) # substitution\n #for r in range(rows):\n #print(dist[r])\n \n \n return dist[row][col]",
"def _pairwise_dist(self,seq1,seq2):\n \n return jf.damerau_levenshtein_distance(str(seq1), str(seq2))",
"def word_embedding_levenshtein(seq1, seq2, embeddings, average_distance, r=0.9, normalise=False):\n\tx1 = 1 + len(seq1)\n\tx2 = 1 + len(seq2)\n\n\talpha = r / ((1 - r) * average_distance)\n\n\t# Initialisation of the matrix\n\td = [] # Using Numpy structures for this is probably not more efficient\n\td.append(list(range(x2)))\n\tfor i in range(1, x1):\n\t\td.append([i] * x2)\n\n\t# Core of the algorithm\n\tfor i in range(1, x1):\n\t\tfor j in range(1, x2):\n\t\t\te1 = seq1[i-1]\n\t\t\te2 = seq2[j-1]\n\n\t\t\tif(e1 == e2): c = 0\n\t\t\telse:\n\t\t\t\tv1 = embeddings[e1]\n\t\t\t\tv2 = embeddings[e2]\n\n\t\t\t\tif((v1 is None) or (v2 is None)): c = 1\n\t\t\t\telse:\n\t\t\t\t\tdst = np.linalg.norm(v1 - v2) # Distance 2 (or L2 norm of the difference)\n\n\t\t\t\t\t# Now, we need a function increasing function mapping 0 to 0 and +inf to 1\n\t\t\t\t\tc = 1 - (1 / (1 + (alpha * dst)))\n\n\t\t\t\t\t#c /= r # If you uncomment this line, the cost of a substitution at distance `average_distance` will be 1 and substitutions might have higher cost, up to 1/r. This might be justified as long as `r` is above 0.5 (otherwise, some substitutions might be more expensive than an insertion followed by a deletion).\n\n\t\t\td[i][j] = min(\n\t\t\t\t(d[(i-1)][j] + 1), # Deletion of seq1[i]\n\t\t\t\t(d[i][(j-1)] + 1), # Insertion of seq2[j]\n\t\t\t\t(d[(i-1)][(j-1)] + c) # Substitution from seq1[i] to seq2[j]\n\t\t\t)\n\n\traw = d[-1][-1]\n\n\tif(normalise): return (raw / (len(seq1) + len(seq2)))\n\treturn raw",
"def iterative_levenshtein(s, t):\n rows = len(s) + 1\n cols = len(t) + 1\n dist = [[0 for x in range(cols)] for x in range(rows)]\n # source prefixes can be transformed into empty strings\n # by deletions:\n for i in range(1, rows):\n dist[i][0] = i\n # target prefixes can be created from an empty source string\n # by inserting the characters\n for i in range(1, cols):\n dist[0][i] = i\n\n for col in range(1, cols):\n for row in range(1, rows):\n if s[row - 1] == t[col - 1]:\n cost = 0\n else:\n cost = 1\n dist[row][col] = min(dist[row - 1][col] + 1, # deletion\n dist[row][col - 1] + 1, # insertion\n dist[row - 1][col - 1] + cost) # substitution\n # for r in range(rows):\n # print(dist[r])\n\n # return dist[row][col]\n return dist[row][col]/(len(s)+len(t))",
"def levenshtein(s1, s2):\n if len(s1) < len(s2):\n return levenshtein(s2, s1)\n\n # len(s1) >= len(s2)\n if len(s2) == 0:\n return len(s1)\n\n previous_row = range(len(s2) + 1)\n for i, c1 in enumerate(s1):\n current_row = [i + 1]\n for j, c2 in enumerate(s2):\n insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer\n deletions = current_row[j] + 1 # than s2\n substitutions = previous_row[j] + (c1 != c2)\n current_row.append(min(insertions, deletions, substitutions))\n previous_row = current_row\n \n return previous_row[-1]",
"def iterative_levenshtein(s, t):\n rows = len(s) + 1\n cols = len(t) + 1\n dist = [[0 for x in range(cols)] for x in range(rows)]\n # source prefixes can be transformed into empty strings\n # by deletions:\n for i in range(1, rows):\n dist[i][0] = i\n # target prefixes can be created from an empty source string\n # by inserting the characters\n for i in range(1, cols):\n dist[0][i] = i\n\n for col in range(1, cols):\n for row in range(1, rows):\n if s[row - 1] == t[col - 1]:\n cost = 0\n else:\n cost = 1\n dist[row][col] = min(dist[row - 1][col] + 1, # deletion\n dist[row][col - 1] + 1, # insertion\n dist[row - 1][col - 1] + cost) # substitution\n return dist[rows - 1][cols - 1]",
"def compute_backpointers(s0, s1): #Tillverkar en array med backpointrs\r\n if s0 == None or s1 == None:\r\n raise Exception('Both s0 and s1 have to be set')\r\n rows = len(s0)+1 # antalet rader\r\n columns = len(s1)+1 # antalet kolumner\r\n\r\n ####### Tillverkar Levenshtein matrisen ########\r\n # Gör en tom matris med nollor\r\n distance = [[0 for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # Gör de yttre lagrerna i matrisen 0 -> len(str) vertikalt och horisontellt\r\n for i in range(1,rows):\r\n distance[i][0] = i\r\n for i in range(1,columns):\r\n distance[0][i] = i\r\n\r\n # Beräknar kostnaderna för varje plats inne i matrisen och sätter in dem\r\n # kollar om bokstaven på indexet i de två orden är samma i sådana fall kostar det 0\r\n # och skall ha samma värde som diagonalt innan, annars kostar det 1 från över eller underself.\r\n for column in range(1,columns):\r\n for row in range(1,rows): # kolla varje rad i vare column\r\n if s0[row-1] == s1[column -1]: # om det är samma bokstav kostar det 0\r\n c = 0\r\n else: # annars kostar det 2\r\n c = 2\r\n distance[row][column] = min(distance[row-1][column] + 1,distance[row][column-1] + 1,distance[row-1][column-1] + c)\r\n # raden över säger att det minsta värdet av över eller bredvid + 1 eller diagonalt innan plus (0 eller 2)\r\n # skall sättas in på platsen i matrisen.\r\n\r\n # det minsta avståndet är\r\n cost = distance[row][column]\r\n print(\"totalkostnaden är\")\r\n print(cost)\r\n\r\n\r\n ####### Tillverkar backptr-matrisen ########\r\n # Tillverkar en tom matris med [0,0] för till backptr-matrisen\r\n backptr = [[[0, 0] for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # går igenom platserna i Levenshtein matrisen bakirfrån\r\n for column in range(columns-1,0,-1):\r\n for row in range(rows-1,0,-1):\r\n # Om värdet till vänster är det minsta: peka vänster\r\n if distance[row][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row\r\n backptr[row][column][1] = column -1\r\n # Om värdet över är det minsta: peka upp\r\n if distance[row-1][column] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row -1\r\n backptr[row][column][1] = column\r\n # om värdet diagonalt är minst: peka på diagonalt\r\n if distance[row-1][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row-1\r\n backptr[row][column][1] = column -1\r\n\r\n # Gör yttervärdena i matrisen, (OBS behövs ej)\r\n for i in range(0,rows):\r\n j = i-1\r\n backptr[i][0][0] = j\r\n backptr[i][0][1] = 0\r\n for i in range(0,columns):\r\n j = i-1\r\n backptr[0][i][1] = j\r\n backptr[0][i][0] = 0\r\n\r\n return backptr",
"def distance_matrix(sequences, substitution_mat):\n distance_mat = numpy.empty((len(sequences), len(sequences)), dtype='float')\n\n print(\"Building distance matrix\")\n # Get similarity score\n for i, seqA in enumerate(sequences):\n sys.stdout.write(\"\\r%.f%%\" % (float(i+1)/len(sequences)*100))\n sys.stdout.flush()\n for j, seqB in enumerate(sequences[i:], start=i):\n score = substitution_score(substitution_mat, seqA, seqB)\n distance_mat[i, j] = score\n distance_mat[j, i] = score\n print(\"\")\n # Set equal the diagonal\n diag_mini = numpy.min(distance_mat.diagonal())\n for i in range(len(sequences)):\n distance_mat[i, i] = diag_mini\n # Convert similarity score into a distance\n mini = numpy.min(distance_mat)\n maxi = numpy.max(distance_mat)\n return 1 - (distance_mat + abs(mini))/(maxi - mini)",
"def nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix=tcr_nb_distance_matrix, dist_weight=3, gap_penalty=4, ntrim=3, ctrim=2, fixed_gappos=True):\n\n return _nb_vector_tcrdist(indices, seqs_mat, seqs_L, distance_matrix, dist_weight, gap_penalty, ntrim, ctrim, fixed_gappos)",
"def PairwiseSimilarity(sequences):\n\n seq = sequences\n N = len(seq)\n\n # Define distance matrix\n d_matrix = zeros((N, N), float64)\n\n for i in range(len(seq)):\n for j in range(len(seq)):\n d_matrix[i][j] = -1\n\n # Similarity matrix\n s_matrix = zeros((N, N), float64)\n\n for i in range(N):\n for j in range(N):\n s_matrix[i][j] = -1\n\n # Find pairs\n for i in range(N):\n for j in range(N):\n\n if s_matrix[i][j] >= 0:\n continue\n\n seq1 = seq[i][1]\n seq2 = seq[j][1]\n minlen = min(len(seq1), len(seq2))\n \n len1 = len2 = sims = 0\n for x in range(minlen):\n if seq1[x] != 256:\n len1 += 1.0\n\n if seq1[x] == seq2[x]:\n sims += 1.0\n\n if seq2[x] != 256:\n len2 += 1.0\n\n maxlen = max(len1, len2)\n s_matrix[i][j] = sims / maxlen\n\n # Get distance matrix\n for i in range(N):\n for j in range(N):\n d_matrix[i][j] = s_matrix[i][i] - s_matrix[i][j]\n \n return d_matrix",
"def distance_embeddings(embeddings_matrix):\n distance = 0\n l = 0\n for row1, row2 in it.combinations(embeddings_matrix, 2):\n new_distance = np.sqrt(np.sum(np.power(row1-row2, 2)))\n distance += new_distance\n l += 1\n\n av_distance = distance / l\n return av_distance",
"def _levenshtein_distance(t1: Trace, t2: Trace):\n if t1.length > t2.length:\n t1, t2 = t2, t1\n\n distances = range(t1.length + 1)\n for i2, c2 in enumerate(t2.event_list):\n distances_ = [i2 + 1]\n for i1, c1 in enumerate(t1.event_list):\n if c1 == c2:\n distances_.append(distances[i1])\n else:\n distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))\n distances = distances_\n return distances[-1]",
"def sentence_distance(d, words, vecs, s1, s2, sigmasq=1.0):\n pairs = itertools.product(s1,s2)\n numpairs = 0\n distance = 0\n # this for loop is annoyingly long\n for pair in pairs:\n numpairs += 1\n # get the distance\n p1, p2 = pair\n if type(p1) == int:\n i1 = p1\n elif type(p1) == str or type(p1) == unicode:\n try:\n # this is a slow step\n i1 = words.index(p1)\n except ValueError:\n # don't know this word\n pair_dist = 0\n distance += np.exp(-(pair_dist*pair_dist)/sigmasq)\n continue\n else:\n print 'WTF???'\n if type(p2) == int:\n i2 = p2\n elif type(p2) == str or type(p2) == unicode:\n try:\n i2 = words.index(p2)\n except ValueError:\n # don't know this word\n pair_dist = 0\n distance += np.exp(-(pair_dist*pair_dist)/sigmasq)\n continue\n if not d == None:\n # already have pairwise distances\n if len(d.shape) == 2:\n # d is a matrix\n # NOTE d MUST CORRESPOND TO INDEXING OF THE WORDS\n pair_dist = d[i1,i2]\n elif len(d.shape) == 1:\n # d is a distance vector (:()\n n = d.shape[0]\n pair_dist = d[square_to_condensed(i1,i2,n)]\n else:\n # no pairwise distances\n # these vectors should exist becuse we always checked for invalid indices\n v1 = vecs[i1, :]\n v2 = vecs[i2, :]\n # OBSERVE CHOICE OF METRIC\n # this is a slow step\n pair_dist = sps.distance.cosine(v1,v2)\n distance += np.exp(-(pair_dist*pair_dist)/sigmasq)\n return distance/numpairs",
"def levenshtein_distance(self, a,b):\n\n n, m = len(a), len(b)\n if n > m:\n a,b = b,a\n n,m = m,n\n current = range(n+1)\n for i in range(1,m+1):\n previous, current = current, [i]+[0]*n\n for j in range(1,n+1):\n add, delete = previous[j]+1, current[j-1]+1\n change = previous[j-1]\n if a[j-1] != b[i-1]:\n change = change + 1\n current[j] = min(add, delete, change)\n return current[n]",
"def levinshtein(u, v):\n n = len(u)+1\n m = len(v)+1\n d = np.zeros((n,m))\n #La matriz d nos ayudará a almacenar cálculos, pues la \n #distancia se computa de manera recursiva\n for i in range(n):\n d[i][0]=i\n for i in range(m):\n d[0][i] = i\n #Los ciclos for's anteriores son la base recursiva del \n #cálculo\n for i in range(1, n):\n for j in range(1, m):\n #Ecuación recursiva\n d[i][j] = min(d[i][j-1]+1, d[i-1][j]+1, d[i-1][j-1]+(not u[i-1] == v[j-1]))\n #La última entrada de la matriz es igual a d(u,v)\n return float(d[n-1][m-1])",
"def levenshtein_distance(sentence1, sentence2):\n seq1 = lemmatizer.lemmatize(sentence1, ignore_stop_words=True)\n seq2 = lemmatizer.lemmatize(sentence2, ignore_stop_words=True)\n return nltk.edit_distance(seq1, seq2, transpositions=False) / max(len(seq1), len(seq2))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Store the names and grades of school students. | def __init__(self):
self.students = {} | [
"def add_student(self, name: str, grade: int) -> None:\n school_grade = self.students.setdefault(grade, [])\n school_grade.append(name)\n school_grade.sort()",
"def __init__(self, name, grade):\n self.student_info = [name, grade]",
"def __init__(self, name, grade):\n self.student_info = {'name': name, 'grade': grade}",
"def add_student(student_name):\n students = load_students()\n students.append({'name': student_name, 'words': [], 'review words': [], 'reading strategy': [], 'books' : [], 'group': \"\"})\n print \"{} has been added\".format(student_name)\n save_students(students)",
"def addGrade(self, student, grade):\n try:\n self.grades[student.getIdNum()].append(grade\n except KeyError:\n raise ValueError(\"Student not in grade book.\")\n\n def getGrades(self, student):\n \"\"\"Return a list of grades for a student.\"\"\"\n try: # return a copy of a student grades\n self.grades[student.getIdNum()][:]\n except KeyError:\n raise ValueError(\"Student not in grade book.\")",
"def show_student_grades(self):\n assignments_with_grades = [assignment for assignment in self.student.assignments\n if assignment.grade is not None]\n if not self.student.assignments:\n StudentView.print_user_have_no_grades()\n assignments_as_strings_list = []\n for assignment in assignments_with_grades:\n grade_with_assignment_name = 'Grade: {}, Assignment title: {}'.format(assignment.grade, assignment.title)\n assignments_as_strings_list.append(grade_with_assignment_name)\n StudentView.display_user_grades(assignments_as_strings_list)",
"def add_student():\n\n\tprint('You must enter the student as is:\\n'\n\t\t\"'First name', 'middle name', 'Last name', 'major', 'major', 'gpa', id_number, 'minor'\"\n\t\t\" 'minor' graduation year, advisor number\\n For example: 'Kyle', 'Jacob', 'Ranney', 'Insurance'\"\n\t\t\", 'Chemistry', 3.0, 93988, 'Biology', 'NULL', 2016, 2234\\n\")\n\t# use sql insert statement\n\t# become familiar with this!\t",
"def initialize_data():\r\n \r\n \r\n # Import student data from school district program output. \r\n all_students = pd.read_excel(SCHOOL_FILE, index_col = [1], header=[10])\r\n\r\n # Convert string categories to integers and floats.\r\n conv_students = convert_attribs(all_students)\r\n\r\n # Group students by grade.\r\n gradegroups = conv_students.groupby(conv_students.Grade)\r\n kg = gradegroups.get_group('KG')\r\n firstg = gradegroups.get_group('01')\r\n secondg = gradegroups.get_group('02')\r\n thirdg = gradegroups.get_group('03')\r\n fourthg = gradegroups.get_group('04')\r\n fifthg = gradegroups.get_group('05')\r\n\r\n students_by_grade = [kg, firstg, secondg, thirdg, fourthg, fifthg]\r\n\r\n return students_by_grade",
"def student_grades(student, course):\n cg = CourseGradeFactory().create(student, course)\n return cg.summary",
"def add_student(student):",
"def _get_grades(self, path):\n try:\n for cwid, course, grade, instructor_cwid in file_reading_gen(path,4, sep='\\t',header=False):\n if cwid in self._students.keys():\n self._students[cwid].add_course(course, grade)\n else: \n print(f\"dint find student with {cwid}\")\n \n if instructor_cwid in self._instructor.keys():\n self._instructor[instructor_cwid].get_student_no(course)\n else:\n print(f\"didnt find prof {instructor_cwid} whose course was mentioned\")\n except ValueError:\n print(\"Not getting the details for the user\")",
"def __init__(self):\r\n self.dict_of_students = {}\r\n self.dict_of_courses = {}",
"def __init__(self, name, surname):\n\t\t\n\t\tself.grades = {}\n\t\tself.attendance = 0\n\t\t\n\t\tif not (isinstance(name, str) and isinstance(surname, str)):\n\t\t\tname, surname = \"None\", \"None\"\n\t\tself.name, self.surname = name, surname",
"def test_student(self):\n student = Student('11788',' Fuller, E ',' SYEN')\n student.add_course_grade('SSW 555', 'A')\n self.assertEqual(student.student_info(), ['11788',' Fuller, E ', ['SSW 555']])",
"def _get_grades(self, path):\n try:\n for cwid, course, grade, instructor_cwid in file_reading_gen(path, 4, sep='|', header=True):\n if cwid in self._students.keys():\n self._students[cwid].add_course(course, grade)\n else:\n print(f\"didnt find student {cwid} whose grade was mentioned\")\n if instructor_cwid in self._instructors.keys():\n self._instructors[instructor_cwid].get_student_no(course)\n else:\n print(f\"didnt find prof with cwid = {cwid} whose course was mentioned\")\n except ValueError as ve:\n print(f\"exception {ve} occured\")",
"def grades(self, grades):\n\n self._grades = grades",
"def make_gradebook(roster, grades, sub_info):\n gradebook = []\n for student in roster.keys():\n s = {}\n # fill student file with evaluation grades\n for day, score in zip(sub_info.keys(), grades):\n s[str(day)] = score[student]\n s['total'] = sum(s.values())\n s['username'] = student\n gradebook.append(s)\n return gradebook",
"def add_teachers_to_school(school, teacher_name, teacher_age, teacher_salary):\n pass",
"def get_students_for_gradebook(self):\n raise Exception('Not implemented')"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add a student to a grade in the roster. | def add_student(self, name: str, grade: int) -> None:
school_grade = self.students.setdefault(grade, [])
school_grade.append(name)
school_grade.sort() | [
"def add_student(student):",
"def add_grade(self, student, grade):\n try:\n self.grades[student.id].append(grade)\n except KeyError:\n raise ValueError('Student not in Grade Book.')",
"def add_student(self, student: 'Student') -> None:\n self.students.append(student)",
"def add_student(self, student):\n if student in self.students:\n raise ValueError('Duplicate Student.')\n self.students.append(student)\n self.grades[student.id] = []\n self.is_sorted = False",
"def add_student(self, student):\n self.student_list.append(student)",
"def add_student(student_name):\n students = load_students()\n students.append({'name': student_name, 'words': [], 'review words': [], 'reading strategy': [], 'books' : [], 'group': \"\"})\n print \"{} has been added\".format(student_name)\n save_students(students)",
"def add_student():\n\n\tprint('You must enter the student as is:\\n'\n\t\t\"'First name', 'middle name', 'Last name', 'major', 'major', 'gpa', id_number, 'minor'\"\n\t\t\" 'minor' graduation year, advisor number\\n For example: 'Kyle', 'Jacob', 'Ranney', 'Insurance'\"\n\t\t\", 'Chemistry', 3.0, 93988, 'Biology', 'NULL', 2016, 2234\\n\")\n\t# use sql insert statement\n\t# become familiar with this!\t",
"def AddStudent(self, event):\n pass",
"def addGrade(self, student, grade):\n try:\n self.grades[student.getIdNum()].append(grade\n except KeyError:\n raise ValueError(\"Student not in grade book.\")\n\n def getGrades(self, student):\n \"\"\"Return a list of grades for a student.\"\"\"\n try: # return a copy of a student grades\n self.grades[student.getIdNum()][:]\n except KeyError:\n raise ValueError(\"Student not in grade book.\")",
"def grant_student_access(account_id, grade, level):\n query = 'INSERT INTO student VALUES( %s, %s, %s );'\n args = (account_id, grade, level)\n database.connection.save_data(query, args)",
"def addStud(self,ID,name,attNr,grade):\n if ID < 0: raise Exception(\"Invalid ID!\")\n parts = name.split(' ')\n if len(parts) < 2: raise Exception('Invalid name!')\n for part in parts:\n if len(part)<3: raise Exception('Invalid name!')\n if attNr < 0: raise Exception('Invalid number of attendances!')\n if grade not in range(0,11): raise Exception('Invalid grade!')\n self.__studRepo.add(Student(ID,name,attNr,grade))",
"def add_student(user_inputs):\r\n no_space = (remove_space(user_inputs))\r\n student_tuple = student_info._make(no_space.split(\",\"))\r\n StudentRoster.append(student_tuple)",
"def add_grade(self, grade):\n if self._grades is None:\n self._grades = []\n self._grades.append(grade)",
"def add_course(self, course, grade):\n \n self._courses[course] = grade",
"def test_add_student():\n classroom = setup_for_test()\n student = Student(\"Andrew Tsukuda\")\n classroom.add_student(student)\n assert len(classroom.student_dir) == 1\n assert classroom.student_dir[0].ID == 1",
"def add_course_grade(self, course, grade):\n self._course_grade[course] = grade",
"def add_course_grade(self, course, grade):\n course_grade_tuple = (course, grade)\n self.courses_grades.append(course_grade_tuple)",
"def set_grade(student, assignment, points, cur):\n \n sql = \"INSERT INTO GradeEntries VALUES({}, {}, {})\".format(student[0], assignment[0], points)\n cur.execute(sql)",
"def update_grade(grade, student_id):\n query = 'UPDATE student SET grade = %s WHERE student_id = %s;'\n args = (grade, student_id)\n database.connection.save_data(query, args)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Computing initial values for position and velocity in GCRS system This is for later use in orbit integration, from tables in the prediction files. Use a lagrange polynomial in order to interpolate in the tables. | def calculate_initial_values(eph, rundate):
data = sorted(eph["positions"].items())
pos_itrs = np.zeros((len(data), 3))
mjd1, mjd2 = zip(*[t for t, d in data])
rotation_mat = rotation.trs2gcrs(time.Time(val=mjd1, val2=mjd2, fmt="mjd", scale="utc"))
tbl = time.Time(val=mjd1, val2=mjd2, fmt="mjd", scale="utc")
for i in range(0, len(data)):
pos_itrs[i] = data[i][1]["pos"]
diffsec = np.array([(t - rundate).total_seconds() for t in tbl.utc.datetime])
# Table given in ITRF coordinate system. Convert to GCRS, where the integration of the satellite orbit will
# be done
pos_gcrs = np.sum(rotation_mat @ pos_itrs[:, :, None], axis=2)
log.info("Interpolating data from prediction file in order to get initial pos/vel")
pos_gcrs_ip, vel_gcrs_ip = interpolation.interpolate_with_derivative(
diffsec, pos_gcrs, np.array([0.0]), kind="lagrange", window=10, bounds_error=False
)
eph["initial_pos"] = pos_gcrs_ip[0]
eph["initial_vel"] = vel_gcrs_ip[0]
return eph | [
"def compute_velocities(self):\n \n xfunc = interpolate.splrep(self.TIME, self.XPOS, s=0)\n yfunc = interpolate.splrep(self.TIME, self.YPOS, s=0)\n zfunc = interpolate.splrep(self.TIME, self.ZPOS, s=0)\n \n self.XVEL = interpolate.splev(self.TIME, xfunc, der=1)\n self.YVEL = interpolate.splev(self.TIME, yfunc, der=1)\n self.ZVEL = interpolate.splev(self.TIME, zfunc, der=1)",
"def calc_velocity(self):\n\n\t\t# get position\n\t\tyear, month, day, hour, minutes, sec = self.orbital_time\n\t\tvelo = self.sat_pos_obj.propagate(year, month, day, hour, minutes, sec)[1]\n\n\t\t# calculate velocity from vector components\n\t\tvelocity = np.sqrt(velo[0]**2 + velo[1]**2 +velo[2]**2) * 3600\n\n\t\treturn velocity",
"def initialize():\n\n global z_from_t_interp\n\n # Logarithmic spacing\n log_z_set = np.linspace(0.0, 3.0, 300)\n z_set = 10**(log_z_set) - 1.0\n\n t_set = np.zeros(len(z_set))\n for i, z in enumerate(z_set):\n t_set[i] = calc_lookback_time(z) / 1.0e6 # in Myr\n\n z_from_t_interp = interp1d(t_set, z_set, bounds_error=False, fill_value=100.0)",
"def __init__(self, v_0, alpha_0, time, gamma=1, x_init=0,y_init=0):\n self.v_init=np.array([v_0*np.cos(alpha_0),v_0*np.sin(alpha_0)])\n self.r_init=np.array([x_init,y_init])\n self.delta=time\n self.gamma=gamma\n self.g=9.81 #gravitational const in [m/s]\n self.v_old=[]\n self.v_new=[]",
"def __init__(self, timestep=1.0 * simtk.unit.femtoseconds):\n\n super(VelocityVerletIntegrator, self).__init__(timestep)\n\n self.addPerDofVariable(\"x1\", 0)\n\n self.addUpdateContextState()\n self.addComputePerDof(\"v\", \"v+0.5*dt*f/m\")\n self.addComputePerDof(\"x\", \"x+dt*v\")\n self.addComputePerDof(\"x1\", \"x\")\n self.addConstrainPositions()\n self.addComputePerDof(\"v\", \"v+0.5*dt*f/m+(x-x1)/dt\")\n self.addConstrainVelocities()",
"def main():\n \n def get_x_input():\n \"\"\"\n This gets the initial x position and velocity values\n Param:none\n Return:Tuple with x pos and vel\n \"\"\"\n # Ask for and validate user input for x pos and vel\n while True:\n try:\n posx = float(input(\"Please enter the initial x position in m: \"))\n except ValueError:\n print(\"Invalid Input\")\n continue\n else:\n break\n\n while True:\n try:\n velx = float(input(\"Please enter the initial x velocity in m/s: \"))\n except ValueError:\n print(\"Invalid Input\")\n continue\n else:\n break\n \n #return tuple\n xinput = (posx, velx)\n return xinput\n\n def get_y_input():\n \"\"\"\n This gets the initial y position and velocity values\n Param:none\n Return:Tuple with y pos and vel\n \"\"\" \n # Ask for and validate user input for y pos and vel\n while True:\n try:\n posy = float(input(\"Please enter the initial y position in m: \"))\n\n #start at ground\n if posy < 0:\n print(\"Please enter a positive value.\")\n continue\n\n except ValueError:\n print(\"Invalid input\")\n continue\n else:\n break\n\n while True:\n try:\n vely = float(input(\"Please enter the initial y velocity in m/s: \"))\n except ValueError:\n print(\"Invalid Input\")\n continue\n else:\n break\n \n # Return tuple\n yinput = (posy, vely)\n return yinput\n\n #Inital position and velocity of user input x and y\n posx0, velx0 = get_x_input()\n posy0, vely0 = get_y_input()\n \n #acceleration y acceleration is gravity\n accelx = 0.0\n GRAVITY = -9.8 \n \n #Initial time of 0s, time intervals of .01 s\n deltat = .01\n t = 0.0\n \n #lists of all x and y positions in the motion \n x = [posx0]\n y = [posy0]\n \n #limit of time intervals to calculate\n intervals = 4000\n\n for i in range(0, intervals):\n #increment time, add xy positions at that time\n t = t + deltat\n x.append(position(posx0, velx0, t, accelx))\n y.append(position(posy0, vely0, t, GRAVITY))\n \n #if the projectile has hit the ground, break\n if y[i+1] <= 0:\n break\n\n plot_motion(x, y)",
"def solar_param((y,mo,d,h,mi),latitude,longitude, UTC_diff=0, groundalbedo=0.18):\n time_shift = datetime.timedelta(hours=UTC_diff) #SGT is UTC+8 \n thistime = pd.DatetimeIndex([pd.Timestamp(np.datetime64(datetime.datetime(y,mo,d,h,mi) + time_shift), tz='UTC')]) \n thisloc = pvlib.location.Location(latitude, longitude, tz='UTC', altitude=0, name=None)\n solpos = thisloc.get_solarposition(thistime) \n \n sunpz = np.sin(np.radians(solpos.elevation[0])); hyp = np.cos(np.radians(solpos.elevation[0]))\n sunpy = hyp*np.cos(np.radians(solpos.azimuth[0]))\n sunpx = hyp*np.sin(np.radians(solpos.azimuth[0]))\n \n solar_pmt = thisloc.get_clearsky(thistime,model='ineichen') #\n E_sol= solar_pmt.dni[0] #direct normal solar irradiation [W/m^2]\n Ground_Diffuse = pvlib.irradiance.grounddiffuse(90,solar_pmt.ghi,albedo =groundalbedo)[0] #Ground Reflected Solar Irradiation - vertical asphalt surface [W/m^2]\n Sky_Diffuse = pvlib.irradiance.isotropic(90, solar_pmt.dhi)[0] #Diffuse Solar Irradiation - vertical surface[W/m^2]. \n \n #Formula 9 in Huang et. al. for a standing person, largely independent of gender, body shape and size. For a sitting person, approximately 0.25\n solarvf=abs(0.0355*np.sin(solpos.elevation[0])+2.33*np.cos(solpos.elevation[0])*(0.0213*np.cos(solpos.azimuth[0])**2+0.00919*np.sin(solpos.azimuth[0])**2)**(0.5)); \n results = pd.DataFrame({\n 'solarvector':[(sunpx,sunpy,sunpz)],\n 'solarviewfactor':[solarvf],\n 'direct_sol':[E_sol],\n 'diffuse_frm_sky':[Sky_Diffuse],\n 'diffuse_frm_ground':[Ground_Diffuse]\n }) \n return results",
"def initialise_calibration(self):\n for i in range(0, self.NUM_SENSORS):\n self.calibratedMax[i] = 0\n self.calibratedMin[i] = self.READING_TIMEOUT",
"def initial_conditions(self):\n\n n = self.num_states() # (Maximal) Number of states in MultiCellModel\n VS = VectorFunctionSpace(self.mesh(), \"DG\", 0, n + 1)\n vs = Function(VS)\n vs_tmp = Function(VS)\n\n markers = self.markers()\n\n u = TrialFunction(VS)\n v = TestFunction(VS)\n\n dy = Measure(\"dx\", domain=self.mesh(), subdomain_data=markers)\n\n # Define projection into multiverse\n a = inner(u, v)*dy(i_k)\n\n Ls = list()\n for k, model in enumerate(self.models()):\n i_k = self.keys()[k] # Extract domain index of cell model k\n ic = model.initial_conditions() # Extract initial conditions\n n_k = model.num_states() # Extract number of local states\n L_k = sum(ic[j]*v[j]*dy(i_k) for j in range(n_k + 1)) # include v and s\n Ls.append(L_k)\n L = sum(Ls)\n solve(a == L, vs)\n return vs",
"def setupVelocities(self, N):\n #get velocity space phase angles\n self.uniformVelPhaseAngle()\n\n if self.vMode == 'single':\n print(\"Gyro orbit calculation from single plasma temperature\")\n log.info(\"Gyro orbit calculation from single plasma temperature\")\n self.T0 = np.ones((N))*self.gyroT_eV\n #get average velocity for each temperature point\n self.vThermal = self.temp2thermalVelocity(self.T0)\n #set upper bound of v*f(v) (note that this cuts off high energy particles)\n self.vMax = 5 * self.vThermal\n #get 100 points to initialize functional form of f(v) (note this is a 2D matrix cause vMax is 2D)\n self.vScan = np.linspace(0,self.vMax,10000).T\n #get velocity slices for each T0\n self.pullEqualProbabilityVelocities()\n\n else:\n #TO ADD THIS YOU WILL NEED TO PASS IN XYZ COORDINATES OF CTRS AND INTERPOLATE\n print(\"3D plasma temperature interpolation from file not yet supported. Run gyro orbits in single mode\")\n log.info(\"3D plasma temperature interpolation from file not yet supported. Run gyro orbits in single mode\")\n\n return",
"def __call__(self, t_):\n X, Y, t, _n = self.X, self.Y, self.t, self._n\n x, y = 0, 0 # initial x and y return values\n for i in _n:\n p_i = 1 # initial lagrange polynomial value\n for j in _n:\n # if i != j: update lagrange polynomial\n if i != j: p_i *= (t_ - t[j]) / (t[i] - t[j])\n # mult ith control point by ith lagrange polynomial\n # (ith control point maps to ith time point)\n x += X[i] * p_i\n y += Y[i] * p_i\n return x, y",
"def calc_refl(velocity, shotloc_x, shotloc_z, layer_idxs):\n solver_dg = pykonal.EikonalSolver(coord_sys=\"cartesian\")\n solver_dg.vv.min_coords = velocity.min_coords\n solver_dg.vv.node_intervals = velocity.node_intervals\n solver_dg.vv.npts = velocity.npts\n solver_dg.vv.values = velocity.values\n\n #shotloc = 2.56 # km\n src_idx = (int((shotloc_x - velocity.min_coords[0])/velocity.node_intervals[0]), int(shotloc_z/velocity.node_intervals[1]), 0)\n solver_dg.tt.values[src_idx] = 0\n solver_dg.unknown[src_idx] = False\n solver_dg.trial.push(*src_idx)\n solver_dg.solve()\n\n solver_ug = pykonal.EikonalSolver(coord_sys=\"cartesian\")\n solver_ug.vv.min_coords = solver_dg.vv.min_coords\n solver_ug.vv.node_intervals = solver_dg.vv.node_intervals\n solver_ug.vv.npts = solver_dg.vv.npts\n solver_ug.vv.values = solver_dg.vv.values\n\n for ix in range(solver_ug.tt.npts[0]):\n #idx = (ix, solver_ug.tt.npts[1]-1, 0)\n idx = (ix, layer_idxs[ix], 0)\n solver_ug.tt.values[idx] = solver_dg.tt.values[idx]\n #print(idx, solver_dg.tt.values[idx])\n solver_ug.unknown[idx] = False\n solver_ug.trial.push(*idx)\n solver_ug.solve()\n \n return solver_ug.tt.values[:,0,0]",
"def initial_velocity(self) -> float:\n return self._initial_velocity",
"def initialize(self, state_space, state_positions, **__):\n # for organization purposes\n interval = self._initializer['interval']\n random_dist = self._initializer['random_init']\n random_params = self._initializer['random_params']\n self._initial_states.update(self._default_initializer['states'])\n if self._initializer['states'] is not None:\n self._initial_states.update(self._initializer['states'])\n\n # different limits for InductionMotor\n if any(state in self._initial_states for state in ['psi_ralpha', 'psi_rbeta']):\n # caution: _initial_limits sometimes contains singleton ndarrays, they must be\n # extracted with .item()\n nominal_values_ =\\\n [self._initial_limits[state].item() if isinstance(self._initial_limits[state], np.ndarray)\n else self._initial_limits[state] for state in self._initial_states]\n upper_bound = np.asarray(np.abs(nominal_values_), dtype=float)\n # state space for Induction Envs based on documentation\n # ['i_salpha', 'i_sbeta', 'psi_ralpha', 'psi_rbeta', 'epsilon']\n # hardcoded for induction motors currently given in the toolbox\n state_space_low = np.array([-1, -1, -1, -1, -1])\n lower_bound = upper_bound * state_space_low\n else:\n if isinstance(self._nominal_values, dict):\n nominal_values_ = [self._nominal_values[state]\n for state in self._initial_states.keys()]\n nominal_values_ = np.asarray(nominal_values_)\n else:\n nominal_values_ = np.asarray(self._nominal_values)\n\n state_space_idx = [\n state_positions[state] for state in self._initial_states.keys()\n ]\n\n upper_bound = np.asarray(nominal_values_, dtype=float)\n lower_bound = upper_bound * \\\n np.asarray(state_space.low, dtype=float)[state_space_idx]\n # clip nominal boundaries to user defined\n if interval is not None:\n lower_bound = np.clip(\n lower_bound,\n a_min=np.asarray(interval, dtype=float).T[0],\n a_max=None\n )\n upper_bound = np.clip(\n upper_bound,\n a_min=None,\n a_max=np.asarray(interval, dtype=float).T[1]\n )\n # random initialization for each motor state (current, epsilon)\n if random_dist is not None:\n if random_dist == 'uniform':\n initial_value = (upper_bound - lower_bound) \\\n * self._random_generator.uniform(size=len(self._initial_states.keys())) \\\n + lower_bound\n # writing initial values in initial_states dict\n random_states = {\n state: initial_value[idx] for idx, state in enumerate(self._initial_states.keys())\n }\n self._initial_states.update(random_states)\n\n elif random_dist in ['normal', 'gaussian']:\n # specific input or middle of interval\n mue = random_params[0] or (\n upper_bound - lower_bound) / 2 + lower_bound\n sigma = random_params[1] or 1\n a, b = (lower_bound - mue) / sigma, (upper_bound - mue) / sigma\n initial_value = truncnorm.rvs(\n a, b, loc=mue, scale=sigma, size=(\n len(self._initial_states.keys())),\n random_state=self.seed_sequence.pool[0]\n )\n # writing initial values in initial_states dict\n random_states = {\n state: initial_value[idx] for idx, state in enumerate(self._initial_states.keys())\n }\n self._initial_states.update(random_states)\n\n else:\n raise NotImplementedError\n # constant initialization for each motor state (current, epsilon)\n elif self._initial_states is not None:\n initial_value = np.atleast_1d(list(self._initial_states.values()))\n # check init_value meets interval boundaries\n if ((lower_bound <= initial_value).all()\n and (initial_value <= upper_bound).all()):\n initial_states_ = \\\n {state: initial_value[idx]\n for idx, state in enumerate(self._initial_states.keys())}\n self._initial_states.update(initial_states_)\n else:\n raise Exception(\n 'Initialization value has to be within nominal boundaries')\n else:\n raise Exception('No matching Initialization Case')",
"def computeVelSetpoint(self):\n # Compute commands\n self.body_vx_cmd_ = self.kP_xy_*self.ex_ + self.kI_xy_*self.ex_int_\n self.body_vy_cmd_ = self.kP_xy_*self.ey_ + self.kI_xy_*self.ey_int_\n self.body_vz_cmd_ = self.kP_z_*self.ez_ + self.kI_z_*self.ez_int_\n\n # Horizontal velocity constraints\n vel_magnitude = sqrt(self.body_vx_cmd_**2 + self.body_vy_cmd_**2)\n if vel_magnitude > self.vXYMAX_ : # anti-windup scaling \n scale = self.vXYMAX_/vel_magnitude\n self.body_vx_cmd_ = self.body_vx_cmd_*scale\n self.body_vy_cmd_ = self.body_vy_cmd_*scale\n else:\n if self.engaged_: # if armed & offboard\n self.ex_int_ = self.ex_int_ + self.ex_ # You can divide self.ex_ by the controller rate, but you can just tune self.kI_xy_ for now!\n self.ey_int_ = self.ey_int_ + self.ey_\n\n # Vertical velocity constraints\n if self.body_vz_cmd_ > self.vUpMAX_ : # anti-windup scaling \n self.body_vz_cmd_ = self.vUpMAX_\n elif self.body_vz_cmd_ < -self.vDownMAX_:\n self.body_vz_cmd_ = -self.vDownMAX_\n else:\n if self.engaged_: # if armed & offboard\n self.ez_int_ = self.ez_int_ + self.ez_ # You can divide self.ex_ by the controller rate, but you can just tune self.kI_z_ for now!\n\n return self.body_vx_cmd_, self.body_vy_cmd_, self.body_vz_cmd_",
"def _create_latent_variables(self):\n\n self.latent_variables.add_z('Constant', fam.Normal(0,3,transform=None), fam.Normal(0, 3))\n\n for ar_term in range(self.ar):\n self.latent_variables.add_z('AR(' + str(ar_term+1) + ')', fam.Normal(0,0.5,transform=None), fam.Normal(0, 3))\n\n for ma_term in range(self.ma):\n self.latent_variables.add_z('MA(' + str(ma_term+1) + ')', fam.Normal(0,0.5,transform=None), fam.Normal(0, 3))\n\n self.latent_variables.add_z('Sigma', fam.Flat(transform='exp'), fam.Normal(0, 3))\n\n self.latent_variables.z_list[0].start = np.mean(self.data)",
"def linearize(self, x0, u0):\n\t\tlr = self.lr\n\t\tdr = self.dr\n\t\tacc = u0[0]\n\t\tsteer = u0[1]\n\t\tpsi = x0[2]\n\t\tvel = x0[3]\n\n\t\ttandelta = np.tan(steer)\n\t\tcosdelta = np.cos(steer)\n\t\trtandelta = dr*tandelta\n\t\tbeta = np.arctan(rtandelta)\n\t\tcospsibeta = np.cos(psi + beta)\n\t\tsinpsibeta = np.sin(psi + beta)\n\t\tsinbeta = np.sin(beta)\n\t\tcosbeta = np.cos(beta)\n\t\tdelarctan = 1/(1+(dr*tandelta)**2)\n\t\tsec2delta = 1/(cosdelta**2)\n\n\t\tA = np.array([\n\t\t\t[0, 0, -vel*sinpsibeta, \t cospsibeta],\n\t\t\t[0, 0, vel*cospsibeta, \t sinpsibeta],\n\t\t\t[0, 0, \t\t\t 0, \t sinbeta/lr],\n\t\t\t[0, 0, \t\t\t\t 0, \t\t\t 0]\n\t\t\t])\n\t\tB = np.array([\n\t\t\t[0, -vel*sinpsibeta*delarctan*dr*sec2delta],\n\t\t\t[0, vel*cospsibeta*delarctan*dr*sec2delta],\n\t\t\t[0, vel*cosbeta*delarctan*dr*sec2delta/lr],\n\t\t\t[1, \t\t\t \t\t\t\t\t\t 0],\n\t\t\t])\n\t\tg = np.array([\n\t\t\t[vel*cospsibeta],\n\t\t\t[vel*sinpsibeta],\n\t\t\t[vel*sinbeta/lr],\n\t\t\t[\t\t\tacc],\n\t\t\t]).reshape(-1,)\n\t\treturn A, B, g",
"def generate_velocity_vectors(self):\n self.Magnitude, self.Direction = EarthVelocity.generate_vectors(self.Velocities)",
"def initialise(self):\n for i in range(self.nx):\n self.T[:, i] = (\n self.t_sun\n + self.mu\n * self.m_u\n * self.nabla\n * self.g\n * (self.y - self.y_max)\n / self.kb\n )\n self.P = self.p_sun * (self.T / self.t_sun) ** (1 / self.nabla)\n\n if self.Gaussian_perturbation:\n x_mean = 6e6\n y_mean = 2e6\n sigma = 8e5\n xx, yy = np.meshgrid(self.x, self.y)\n gaussian = self.t_sun * np.exp(\n -((xx - x_mean) ** 2 + (yy - y_mean) ** 2) / (2 * sigma ** 2)\n )\n self.T[:, :] = self.T[:, :] + gaussian\n\n self.rho[:, :] = self.P * self.mu * self.m_u / (self.kb * self.T[:, :])\n self.e[:, :] = self.P[:, :] / (self.Y - 1)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Do the initialization and setup for building a postage stamp. In the base class, we check for and parse the appropriate size and position values in config (aka base['stamp'] or base['image']. Values given in base['stamp'] take precedence if these are given in both places (which would be confusing, so probably shouldn't do that, but there might be a use case where it would make sense). config The configuration dict for the stamp field. base The base configuration dict. xsize The xsize of the image to build (if known). ysize The ysize of the image to build (if known). ignore A list of parameters that are allowed to be in config that we can ignore here. i.e. it won't be an error if these parameters are present. logger If given, a logger object to log progress. xsize, ysize, image_pos, world_pos | def setup(self, config, base, xsize, ysize, ignore, logger):
# .. Do any custom setup you need to do.
# Probably want to call the base class setup function to do the normal determination
# of the size and position values.
# Extra processing of 'bandpass' argument
# Most needed type-checking is done in galsim.bandpass
self._req_bp_fields = ['throughput', 'wave_type']
self._opt_bp_fields = ['red_limit', 'blue_limit', 'zeropoint']
try:
bp = config['bandpass']
for req in self._req_bp_fields:
if req not in bp.keys():
raise ValueError('Must pass field {} for a bandpass object!'.format(req))
# for opt in self._opt_bp_fields:
# if opt not in bp.keys():
# config['bandpass'][opt] = None
for key in bp.keys():
if key not in (self._req_bp_fields+self._opt_bp_fields):
raise ValueError('Field {} is not a valid entry for a bandpass!'.format(key))
except KeyError:
raise KeyError('`bandpass` is a required field for a COSMOSChromatic stamp!')
extra_ignore = ignore + ['bandpass']
return super(self.__class__, self).setup(config, base, xsize, ysize, extra_ignore, logger) | [
"def __init__(self, **log_config):\n super(LoggingConfig, self).__init__(**log_config)\n self.level = self._parse_string('level')\n self.format = self._parse_string('format')\n self.show_statsd_messages = self._parse_bool('show_statsd_messages')\n self.show_sql_messages = self._parse_bool('show_sql_messages')\n self.show_werkzeug_messages = self._parse_bool('show_werkzeug_messages')\n self.enable_scrubbing = self._parse_bool('enable_scrubbing')\n self.log_directory = self._parse_string('log_directory', optional=True)\n self.file_prefix = self._parse_string('file_prefix', optional=True)\n self.file_rotation_backup_count = self._parse_positive_int('file_rotation_backup_count', allow_zero=True)\n self.file_rotation_max_bytes = self._parse_positive_int('file_rotation_max_bytes', allow_zero=True)",
"def prepare(self):\n\t\tpath = os.path.expanduser(os.path.expandvars(self.path))\n\n\t\tif os.path.exists(path):\n\t\t\tif os.path.isdir(path):\n\t\t\t\tlogger.info('Loading log data: %s', path)\n\n\t\t\t\tsummary_path = os.path.join(path, self.SUMMARY)\n\t\t\t\tif os.path.exists(summary_path):\n\t\t\t\t\tself.load_summary()\n\t\t\t\t\thas_summary = True\n\t\t\t\telse:\n\t\t\t\t\tlogger.trace('Loading old-style binary logger.')\n\t\t\t\t\thas_summary = False\n\n\t\t\t\t_, training_time, training_loss = self.load_statistic(\n\t\t\t\t\tStatistic(Statistic.Type.TRAINING, 'loss', 'total')\n\t\t\t\t)\n\t\t\t\tif training_loss is None:\n\t\t\t\t\tself.best_training_loss = None\n\t\t\t\telse:\n\t\t\t\t\tself.best_training_loss = training_loss.min()\n\n\t\t\t\t\t# Handle the old log format.\n\t\t\t\t\tif not has_summary:\n\t\t\t\t\t\tself.epochs = len(training_loss)\n\n\t\t\t\t_, validation_time, validation_loss = self.load_statistic(\n\t\t\t\t\tStatistic(Statistic.Type.VALIDATION, 'loss', 'total')\n\t\t\t\t)\n\t\t\t\tif validation_loss is None:\n\t\t\t\t\tself.best_validation_loss = None\n\t\t\t\telse:\n\t\t\t\t\tself.best_validation_loss = validation_loss.min()\n\n\t\t\t\t_, batch_time, _ = self.load_statistic(\n\t\t\t\t\tStatistic(Statistic.Type.BATCH, 'loss', 'total')\n\t\t\t\t)\n\n\t\t\t\tif batch_time is not None:\n\t\t\t\t\tself.latest_timestamp = batch_time[-1]\n\t\t\t\telif validation_time is not None:\n\t\t\t\t\tself.latest_timestamp = validation_time[-1]\n\t\t\t\telif training_time is not None:\n\t\t\t\t\tself.latest_timestamp = training_time[-1]\n\t\t\t\telse:\n\t\t\t\t\tself.latest_timestamp = 0\n\n\t\t\t\tself.timestamper.duration = self.latest_timestamp\n\n\t\t\telse:\n\t\t\t\traise ValueError('Binary logger stores its information in a '\n\t\t\t\t\t'directory. The supplied log path already exists, but it '\n\t\t\t\t\t'is a file: {}'.format(path))\n\n\t\telse:\n\t\t\tlogger.info('Log does not exist. Creating path: %s', path)\n\t\t\tos.makedirs(path, exist_ok=True)\n\n\t\t\tself.best_training_loss = None\n\t\t\tself.best_validation_loss = None",
"def __init__(self, logger, config):\n\n self._logger = logger\n self._config = config",
"def __init__(self, config, logger):\n self.cfg = config\n self.log = logger\n\n # This log line is here for illustrative purposes and is only active if you change config.default_log_level\n # to DEBUG in the code. Command-line options have not been processed yet so --verbose cannot take effect yet.\n self.log.debug(\"Base class __init__ executed.\")",
"def config(data_folder=settings.data_folder,\n logs_folder=settings.logs_folder,\n imgs_folder=settings.imgs_folder,\n cache_folder=settings.cache_folder,\n use_cache=settings.use_cache,\n log_file=settings.log_file,\n log_console=settings.log_console,\n log_level=settings.log_level,\n log_name=settings.log_name,\n log_filename=settings.log_filename,\n useful_tags_node=settings.useful_tags_node,\n useful_tags_path=settings.useful_tags_path,\n osm_xml_node_attrs=settings.osm_xml_node_attrs,\n osm_xml_node_tags=settings.osm_xml_node_tags,\n osm_xml_way_attrs=settings.osm_xml_way_attrs,\n osm_xml_way_tags=settings.osm_xml_way_tags,\n default_access=settings.default_access,\n default_crs=settings.default_crs,\n default_user_agent=settings.default_user_agent,\n default_referer=settings.default_referer,\n default_accept_language=settings.default_accept_language,\n nominatim_endpoint=settings.nominatim_endpoint,\n nominatim_key=settings.nominatim_key,\n overpass_endpoint=settings.overpass_endpoint):\n\n # set each global variable to the passed-in parameter value\n settings.use_cache = use_cache\n settings.cache_folder = cache_folder\n settings.data_folder = data_folder\n settings.imgs_folder = imgs_folder\n settings.logs_folder = logs_folder\n settings.log_console = log_console\n settings.log_file = log_file\n settings.log_level = log_level\n settings.log_name = log_name\n settings.log_filename = log_filename\n settings.useful_tags_node = useful_tags_node\n settings.useful_tags_path = useful_tags_path\n settings.useful_tags_node = list(set(useful_tags_node + osm_xml_node_attrs + osm_xml_node_tags))\n settings.useful_tags_path = list(set(useful_tags_path + osm_xml_way_attrs + osm_xml_way_tags))\n settings.osm_xml_node_attrs = osm_xml_node_attrs\n settings.osm_xml_node_tags = osm_xml_node_tags\n settings.osm_xml_way_attrs = osm_xml_way_attrs\n settings.osm_xml_way_tags = osm_xml_way_tags\n settings.default_access = default_access\n settings.default_crs = default_crs\n settings.default_user_agent = default_user_agent\n settings.default_referer = default_referer\n settings.default_accept_language = default_accept_language\n settings.nominatim_endpoint = nominatim_endpoint\n settings.nominatim_key = nominatim_key\n settings.overpass_endpoint = overpass_endpoint\n\n # if logging is turned on, log that we are configured\n if settings.log_file or settings.log_console:\n log('Configured osmnx')",
"def __init__(self, filename=None, directory=None, piece_width=25,\n piece_height=25, debug=False):\n super().__init__(filename)\n self.directory = directory\n\n self.piece_width = piece_width\n self.piece_height = piece_height\n self.palette = self.img.convert('P', palette=Image.ADAPTIVE, colors=16)\n self.regions_with_colors = self.get_avg_color_for_regions()\n\n # Logger\n self.the_logger = None if not debug else Logger(log_file_name='test_log_1.log')",
"def configure_loggers(self):\n self.logger_utils = {}\n if hasattr(self, 'train_metrics') and hasattr(self, 'val_metrics'):\n try:\n self.logger_utils['train_metrics'] = self.train_metrics.compute_update\n self.logger_utils['val_metrics'] = self.val_metrics.compute_update\n except:\n warn('{} is in-built, using pytorch-lightning metrics'.format(self.config.train.metrics))\n self.logger_utils['train_metrics'] = self.train_metrics.compute\n self.logger_utils['val_metrics'] = self.val_metrics.compute\n if 'TensorBoardLogger' in self.config.train.log.keys(): # TODO: Test image logger\n self.logger_utils['image'] = self._pre_process_image",
"def __init__(self, config, world, object_ID, x, y, image,\r\n horizontal_offset, vertical_offset):\r\n \r\n self.config = config\r\n self.world = world\r\n self.object_ID = object_ID\r\n self.x = x\r\n self.y = y\r\n self.image = image\r\n self.horizontal_offset = horizontal_offset\r\n self.vertical_offset = vertical_offset",
"def setup_logger_kwargs(exp_name, seed=None, data_dir=None, datestamp=False):\n\n # Datestamp forcing\n datestamp = datestamp or FORCE_DATESTAMP\n\n # Make base path\n ymd_time = time.strftime(\"%Y-%m-%d_\") if datestamp else ''\n relpath = ''.join([ymd_time, exp_name])\n\n if seed is not None:\n # Make a seed-specific subfolder in the experiment directory.\n if datestamp:\n hms_time = time.strftime(\"%Y-%m-%d_%H-%M-%S\")\n subfolder = ''.join([hms_time, '-', exp_name, '_s', str(seed)])\n else:\n subfolder = ''.join([exp_name, '_s', str(seed)])\n relpath = osp.join(relpath, subfolder)\n print(\"relative path: \", relpath)\n\n print(\"default data dir: \", DEFAULT_DATA_DIR)\n\n data_dir = data_dir or DEFAULT_DATA_DIR\n logger_kwargs = dict(output_dir=osp.join(data_dir, relpath),\n exp_name=exp_name)\n return logger_kwargs",
"def __init__(self, exp_params, stamp_unique=True):\n self._main_thread = True\n self.params = copy.deepcopy(exp_params)\n self.params['class'] = self.__class__.__name__\n self._check_required_params()\n self.__check_exist_path()\n self.__create_folder(stamp_unique)\n set_experiment_logger(self.params['path_exp'], FILE_LOGS)\n # set stream logging to info level\n for lh in logging.getLogger().handlers:\n if isinstance(lh, logging.StreamHandler) and \\\n not isinstance(lh, logging.FileHandler):\n lh.setLevel(logging.INFO)\n logging.info('initialise experiment...')\n logging.info(string_dict(self.params, 'PARAMETERS:'))\n logging.info('COMPUTER: %r', computer_info())",
"def __init__(self, pyconfig):\n self.pylot_cfg = pyconfig\n self.logfile = pyconfig.dir_logs + 'Pylot.log'",
"def __init__(self, *args, **kwargs):\n super(ModeratedGradingAPI, self).__init__(*args, **kwargs)\n self.logger = logging.getLogger(\"py3canvas.ModeratedGradingAPI\")",
"def configure_tracker(self, filename):\n\n try:\n config = open_configuration(filename) \n except IOError:\n eprint('File not found:', filename)\n exit(EX_CONFIG)\n\n # Parse tracker configuration..\n try:\n self.exportas = parse_str(config, 'tp-exportas')\n if not self.exportas in ['hdf', 'csv', 'json']:\n raise ValueError('<tp-exportas> must be either \\'hdf\\', \\'csv\\', or \\'json\\'')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n \n try:\n self.locate_diameter = parse_int(config, 'tp-locate-diameter')\n if self.locate_diameter % 2 == 0:\n raise ValueError('<tp-locate-diameter> must be odd')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError:\n eprint('Attribute <tp-locate-diameter> is required.')\n exit(EX_CONFIG)\n \n try:\n self.locate_featuresdark = parse_bool(config, 'tp-locate-featuresdark')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n \n try:\n self.locate_minmass = parse_int_or_float(config, 'tp-locate-minmass')\n if self.locate_minmass < 0:\n raise ValueError('<tp-locate-minmass> must be non-negative')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_maxsize = parse_int_or_float(config, 'tp-locate-maxsize')\n if self.locate_maxsize <= 0:\n raise ValueError('<tp-locate-maxsize> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_separation = parse_float(config, 'tp-locate-separation')\n if self.locate_separation < 0:\n raise ValueError('<tp-locate-separation> must be non-negative')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_noisesize = parse_float(config, 'tp-locate-noisesize')\n if self.locate_noisesize <= 0:\n raise ValueError('<tp-locate-noisesize> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_smoothingsize = parse_float(config, 'tp-locate-smoothingsize')\n if self.locate_smoothingsize <= 0:\n raise ValueError('<tp-locate-smoothingsize> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_threshold = parse_int_or_float(config, 'tp-locate-threshold')\n if self.locate_threshold <= 0:\n raise ValueError('<tp-locate-threshold> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_percentile = parse_float(config, 'tp-locate-percentile')\n if self.locate_percentile < 0 or self.locate_percentile >= 100.0:\n raise ValueError('<tp-locate-percentile> must be in the interval [0, 100)')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_topn = parse_int(config, 'tp-locate-topn')\n if self.locate_topn <= 0:\n raise ValueError('<tp-locate-topn> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.locate_preprocess = parse_bool(config, 'tp-locate-preprocess')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.link_searchrange = parse_int_or_float(config, 'tp-link-searchrange')\n if self.link_searchrange <= 0:\n raise ValueError('<tp-link-searchrange> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError:\n eprint('Attribute <tp-link-searchrange> is required.')\n exit(EX_CONFIG)\n\n try:\n self.link_memory = parse_int(config, 'tp-link-memory')\n if self.link_memory < 0:\n raise ValueError('<tp-link-memory> must be non-negative')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.link_predict = parse_bool(config, 'tp-link-predict')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.link_adaptivestop = parse_float(config, 'tp-link-adaptivestop')\n if self.link_adaptivestop <= 0:\n raise ValueError('<tp-link-adaptivestop> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.link_adaptivestep = parse_float(config, 'tp-link-adaptivestep')\n if self.link_adaptivestep <= 0 or self.link_adaptivestep >= 1.0:\n raise ValueError('<tp-link-adaptivestep> must be in the interval (0.0, 1.0)')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.filter_stubs_threshold = parse_int(config, 'tp-filter-st-threshold')\n if self.filter_stubs_threshold <= 0:\n raise ValueError('<tp-filter-st-threshold> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n \n try:\n self.filter_clusters_quantile = parse_float(config, 'tp-filter-cl-quantile')\n if self.filter_clusters_quantile <= 0 or self.filter_clusters_quantile >= 1.0:\n raise ValueError('<tp-filter-cl-quantile> must be in the interval (0.0, 1.0)')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n\n try:\n self.filter_clusters_threshold = parse_int(config,\n 'tp-filter-cl-threshold')\n if self.filter_clusters_threshold <= 0:\n raise ValueError('<tp-filter-cl-threshold> must be positive')\n except ValueError as err:\n eprint('Invalid attribute: ', str(err), '.', sep='')\n exit(EX_CONFIG)\n except KeyError: pass\n \n # Parse jobs..\n self.jobs = configure_jobs(config['jobs'])\n if len(self.jobs) == 0:\n eprint('No job specified!')\n exit(EX_CONFIG)",
"def __init__ ( self,\n image_source=None,\n draw_each_event = 0,\n dark_img_file = None,\n output_file = None,\n plot_vrange = None,\n threshold = None,\n thr_area = None ):\n\n # initializations from argument list\n self.img_addr = image_source\n print \"Using image_source = \", self.img_addr\n\n self.draw_each_event = bool(draw_each_event)\n if self.draw_each_event and ( draw_each_event == \"No\" or\n draw_each_event == \"0\" or\n draw_each_event == \"False\" ) : self.draw_each_event = False\n print \"Using draw_each_event = \", self.draw_each_event\n\n\n self.dark_img_file = dark_img_file\n if dark_img_file == \"\" or dark_img_file == \"None\" : self.dark_img_file = None\n print \"Using dark image file: \", self.dark_img_file\n\n self.output_file = output_file\n if output_file == \"\" or output_file == \"None\" : self.output_file = None\n print \"Using output_file: \", self.output_file\n\n self.plot_vmin = None\n self.plot_vmax = None\n if plot_vrange is not None and plot_vrange is not \"\" : \n self.plot_vmin = float(plot_vrange.split(\"-\")[0])\n self.plot_vmax = float(plot_vrange.split(\"-\")[1])\n print \"Using plot_vrange = %f-%f\"%(self.plot_vmin,self.plot_vmax)\n\n self.threshold = None\n if threshold is not None :\n self.threshold = float(threshold)\n print \"Using threshold value \", self.threshold\n\n # subset of image where threshold is applied\n self.thr_area = None\n if thr_area is not None: \n self.thr_area = np.array([0.,0.,0.,0.])\n for i in range (4) : self.thr_area[i] = float(thr_area.split(\",\")[i])\n print \"Using threshold region \", self.thr_area\n\n # initializations of other class variables\n\n # sum of image data\n self.img_data = None\n\n # these will be plotted too\n self.lolimits = []\n self.hilimits = []\n\n # to keep track\n self.n_events = 0\n self.n_img = 0\n\n # load dark image\n self.dark_image = None\n if self.dark_img_file is None :\n print \"No dark-image file provided. The images will not be background subtracted.\"\n else :\n print \"Loading dark image from \", self.dark_img_file\n self.dark_image = np.load(self.dark_img_file)",
"def __init__(self, frame, template, **kwargs):\n\n super(MDParticleFilter, self).__init__(frame, template, **kwargs) # call base class constructor\n\n self.scale_min = kwargs.get('scale_min', 50)\n self.scale_max = kwargs.get('scale_max', 100)\n self.scale_sigma = kwargs.get('scale_sigma', 3)\n self.scale_mean = kwargs.get('scale_mean', 0)\n\n scales = np.random.randint(self.scale_min, self.scale_max+1, self.num_particles)\n\n self.particles = np.insert(self.particles, 2, scales, axis=1)",
"def _CreateBaseDrawConfig( self, ds_range, **kwargs ):\n font_scale = kwargs.get( 'font_scale', 1.0 )\n\n# -- Determine widget size\n# --\n if 'size' in kwargs:\n wd, ht = kwargs[ 'size' ]\n if self.logger.isEnabledFor( logging.DEBUG ):\n self.logger.debug( '%s: size=%d,%d', self.GetTitle(), wd, ht )\n else:\n wd = 1280\n ratio = float( self.cellRange[ -1 ] ) / float( self.cellRange[ -2 ] )\n if ratio >= 3.0:\n ratio = min( ratio, 3.0 )\n ht = 1900\n wd = int( math.ceil( ht / ratio ) )\n else:\n ht = int( math.ceil( wd * ratio ) )\n kwargs[ 'size' ] = ( wd, ht )\n\n# -- Get fonts, scaled if necessary\n# --\n if font_scale == 1.0:\n font_size = self.font.GetPointSize()\n font = self.font\n label_font = self.labelFont\n value_font = self.valueFont\n else:\n font_size = int( self.font.GetPointSize() * font_scale )\n font = self.font.Scaled( font_scale )\n label_font = self.labelFont.Scaled( font_scale )\n value_font = self.valueFont.Scaled( font_scale )\n\n# -- Create Mapper\n# --\n scale_type = kwargs.get( 'scale_type', 'linear' )\n# class_name = \\\n# 'colors.LogNorm' if scale_type == 'log' else \\\n# 'colors.Normalize'\n# params = '( vmin = ds_range[ 0 ], vmax = ds_range[ 1 ], clip = True )'\n# norm = eval( class_name + params )\n if scale_type == 'log':\n norm = colors.LogNorm(\n vmin = max( ds_range[ 0 ], 1.0e-16 ),\n vmax = max( ds_range[ 1 ], 1.0e-16 ),\n clip = True\n )\n else:\n norm = colors.Normalize(\n vmin = ds_range[ 0 ], vmax = ds_range[ 1 ], clip = True\n )\n cmap_name = kwargs.get( 'colormap_name', self.colormapName )\n mapper = cm.ScalarMappable(\n norm = norm,\n cmap = cm.get_cmap( cmap_name ) # Config.defaultCmapName_\n )\n\n# -- Create legend\n# --\n if self.showLegend:\n legend_bmap = self._CreateLegendBitmap(\n ds_range,\n font_size = font_size,\n mapper = mapper,\n ntick_values = 8,\n scale_type = scale_type,\n title = kwargs.get( 'legend_title' )\n )\n# legend_bmap = self._CreateLegendBitmap(\n# ds_range, font_size,\n# gray = kwargs.get( 'gray', False ),\n# scale_type = kwargs.get( 'scale_type', 'linear' ),\n# title = kwargs.get( 'legend_title' )\n# )\n legend_size = ( legend_bmap.GetWidth(), legend_bmap.GetHeight() )\n else:\n legend_bmap = None\n legend_size = ( 0, 0 )\n\n# -- Calculate label size\n# --\n dc = wx.MemoryDC()\n dc.SelectObject( self.emptyBitmap )\n dc.SetFont( label_font )\n if self.showLabels:\n #dc = wx.MemoryDC()\n #dc.SelectObject( self.emptyBitmap )\n #dc.SetFont( label_font )\n label_size = dc.GetTextExtent( \"99\" )\n #dc.SelectObject( wx.NullBitmap )\n else:\n label_size = ( 0, 0 )\n\n dc.SetFont( font )\n font_extent = dc.GetTextExtent( 'X' )\n dc.SelectObject( wx.NullBitmap )\n\n# -- Create dict\n# --\n config = \\\n {\n 'clientSize': kwargs[ 'size' ],\n 'dataRange': ds_range,\n 'font': font,\n# 'fontSmall': font_small,\n 'fontExtent': font_extent,\n 'fontSize': font_size,\n 'labelFont': label_font,\n 'labelSize': label_size,\n 'legendBitmap': legend_bmap,\n 'legendSize': legend_size,\n 'mapper': mapper,\n 'valueFont': value_font\n }\n# if 'size' in kwargs:\n# config[ 'clientSize' ] = kwargs[ 'size' ]\n\n return config",
"def __init__(self, prob_dist, samples, store_logs: bool = ...):\n ...",
"def __init__(self, config, world, object_ID, x, y, direction,\r\n counter_offset, age, images, gender, parent1=None, parent2=None):\r\n \r\n self.config = config\r\n self.world = world\r\n self.object_ID = object_ID\r\n self.x = x\r\n self.y = y\r\n self.direction = direction\r\n self.energy = self.config.initial_energy\r\n self.agent = Agent(self.config, parent1, parent2)\r\n self.images = images\r\n self.heart_image = get_heart()\r\n self.iteration_counter = counter_offset # counter used for animations and aging\r\n self.age = age\r\n self.heart_countdown = 0\r\n self.gender = gender",
"def __init__(self, name, config):\n self.name = name\n self.config = config\n self.logger = logging.getLogger(name)\n if 'type' not in config:\n self.config['type'] = DEFAULT_BACKUP_TYPE\n elif config['type'] not in SUPPORTED_BACKUP_TYPES:\n self.logger.error('Unknown dump type: %s', config['type'])\n sys.exit(-1)\n if 'retention' not in config:\n self.config['retention'] = DEFAULT_RETENTION_DAYS\n else:\n self.config['retention'] = int(config['retention'])"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Before drawing the profile, see whether this object can be trivially skipped. The base method checks if the object is completely off the main image, so the intersection bounds will be undefined. In this case, don't bother drawing the postage stamp for this object. prof The profile to draw. image The image onto which to draw the profile (which may be None). method The method to use in drawImage. offset The offset to apply when drawing. config The configuration dict for the stamp field. base The base configuration dict. logger If given, a logger object to log progress. whether to skip drawing this object. | def updateSkip(self, prof, image, method, offset, config, base, logger):
# NOTE: There are currently unresolved issues with the image size checking of chromatic
# objects. For now, we ignore any possible speed increases and skip the check.
# if isinstance(prof, galsim.ChromaticObject):
# return False
if prof is not None and base.get('current_image',None) is not None:
if image is None:
prof = base['wcs'].toImage(prof, image_pos=base['image_pos'])
# NOTE: Old version:
# N = prof.getGoodImageSize(1.)
if isinstance(prof, galsim.GSObject):
N = prof.getGoodImageSize(1.)
elif isinstance(prof, galsim.ChromaticObject):
# TODO: Finish implementation
# return False
pudb.set_trace()
# Find the suggested image size for each object given the choice of scale, and use the
# maximum just to be safe.
print '\nprof.original = {}'.format(prof.original)
print '\nprof.original.obj_list = {}'.format(prof.original.obj_list)
# print '\nprof.objlist = {}'.format(prof.original.obj_list)
obj_list = prof.original.obj_list
possible_im_sizes = []
for obj in obj_list:
print '\n obj : {}'.format(obj)
possible_im_sizes.append([ ob.getGoodImageSize(1.) for ob in obj])
print 'possible_im_sizes : {}'.format(possible_im_sizes)
N = np.max(possible_im_sizes)
N += 2 + int(np.abs(offset.x) + np.abs(offset.y))
bounds = galsim._BoundsI(1,N,1,N)
else:
bounds = image.bounds
# Set the origin appropriately
stamp_center = base['stamp_center']
if stamp_center:
bounds = bounds.shift(stamp_center - bounds.center)
else:
bounds = bounds.shift(base.get('image_origin',galsim.PositionI(1,1)) -
galsim.PositionI(bounds.xmin, bounds.ymin))
overlap = bounds & base['current_image'].bounds
if not overlap.isDefined():
logger.info('obj %d: skip drawing object because its image will be entirely off '
'the main image.', base['obj_num'])
return True
return False | [
"def draw(self, prof, image, method, offset, config, base, logger, **kwargs):\n # ... draw prof onto the given image (making a new Image if necessary)\n if prof is None:\n return image\n else:\n logger = galsim.config.LoggerWrapper(logger)\n # Setup the kwargs to pass to drawImage\n # (Start with any additional kwargs given as extra kwargs to DrawBasic and add to it.)\n kwargs['image'] = image\n kwargs['offset'] = offset\n kwargs['method'] = method\n if 'wmult' in config and 'wmult' not in kwargs: # pragma: no cover\n kwargs['wmult'] = galsim.config.ParseValue(config, 'wmult', base, float)[0]\n if 'wcs' not in kwargs and 'scale' not in kwargs:\n kwargs['wcs'] = base['wcs'].local(image_pos = base['image_pos'])\n if method == 'phot' and 'rng' not in kwargs:\n kwargs['rng'] = galsim.config.GetRNG(config, base, logger, \"method='phot'\")\n\n # Check validity of extra phot options:\n max_extra_noise = None\n if 'n_photons' in config and 'n_photons' not in kwargs:\n if method != 'phot':\n raise AttributeError('n_photons is invalid with method != phot')\n if 'max_extra_noise' in config:\n logger.warning(\n \"Both 'max_extra_noise' and 'n_photons' are set in config dict, \"+\n \"ignoring 'max_extra_noise'.\")\n kwargs['n_photons'] = galsim.config.ParseValue(config, 'n_photons', base, int)[0]\n elif 'max_extra_noise' in config:\n max_extra_noise = galsim.config.ParseValue(config, 'max_extra_noise', base, float)[0]\n if method != 'phot' and max_extra_noise is not None:\n raise AttributeError('max_extra_noise is invalid with method != phot')\n\n if 'poisson_flux' in config and 'poisson_flux' not in kwargs:\n if method != 'phot':\n raise AttributeError('poisson_flux is invalid with method != phot')\n kwargs['poisson_flux'] = galsim.config.ParseValue(config, 'poisson_flux', base, bool)[0]\n\n if max_extra_noise is not None and 'max_extra_noise' not in kwargs:\n if max_extra_noise < 0.:\n raise ValueError(\"image.max_extra_noise cannot be negative\")\n if 'image' in base and 'noise' in base['image']:\n noise_var = galsim.config.CalculateNoiseVariance(base)\n else:\n raise AttributeError(\"Need to specify noise level when using max_extra_noise\")\n if noise_var < 0.:\n raise ValueError(\"noise_var calculated to be < 0.\")\n max_extra_noise *= noise_var\n kwargs['max_extra_noise'] = max_extra_noise\n\n if logger.isEnabledFor(logging.DEBUG):\n # Don't output the full image array. Use str(image) for that kwarg.\n alt_kwargs = dict([(k,str(kwargs[k]) if isinstance(kwargs[k],galsim.Image) else kwargs[k])\n for k in kwargs])\n logger.debug('obj %d: drawImage kwargs = %s',base.get('obj_num',0), alt_kwargs)\n logger.debug('obj %d: prof = %s',base.get('obj_num',0),prof)\n try:\n # NOTE: Old version:\n # image = prof.drawImage(**kwargs)\n if isinstance(prof, galsim.GSObject):\n image = prof.drawImage(**kwargs)\n elif isinstance(prof, galsim.ChromaticObject):\n bp = {}\n for key in (self._req_bp_fields+self._opt_bp_fields):\n try:\n bp[key] = config['bandpass'][key]\n except KeyError:\n bp[key] = None\n\n bandpass = galsim.Bandpass(blue_limit=bp['blue_limit'], red_limit=bp['red_limit'],\n wave_type=bp['wave_type'], throughput=bp['throughput'],\n zeropoint=bp['zeropoint'])\n\n image = prof.drawImage(bandpass=bandpass, **kwargs)\n\n except Exception as e: # pragma: no cover\n logger.debug('obj %d: prof = %r', base.get('obj_num',0), prof)\n raise\n return image",
"def add_profile(self, skip_divisor=32, ws_in_knots=True, **kwargs):\n\n # I must be a dummy because I can't make\n # this work any other way!!\n if 'bloc' in kwargs:\n bloc = kwargs.pop('bloc')\n else:\n bloc = 0.5\n\n try:\n pres = ma.masked_invalid(self.soundingdata['pres'])\n except KeyError:\n raise KeyError(\"Pres in hPa (PRES) is required!\")\n\n try:\n tc = ma.masked_invalid(self.soundingdata['temp'])\n except KeyError:\n raise KeyError(\"Temperature in C (TEMP) is required!\")\n\n try:\n dwpt = ma.masked_invalid(self.soundingdata['dwpt'])\n except KeyError:\n print(\"Warning: No DWPT available\")\n dwpt = ma.masked_array(zeros(pres.shape), mask=True)\n\n try:\n sknt = self.soundingdata['sknt']\n drct = self.soundingdata['drct']\n rdir = (270.-drct)*(pi/180.)\n\n if ws_in_knots:\n uu = ma.masked_invalid(sknt*cos(rdir))\n vv = ma.masked_invalid(sknt*sin(rdir))\n else:\n uu = ma.masked_invalid(.514444 * sknt * cos(rdir))\n vv = ma.masked_invalid(.514444 * sknt * sin(rdir))\n except KeyError:\n print(\"Warning: No SKNT/DRCT available\")\n uu = ma.masked_array(zeros(pres.shape), mask=True)\n vv = ma.masked_array(zeros(pres.shape), mask=True)\n\n tcprof = self.skewxaxis.plot(tc, pres, zorder=5, **kwargs)\n dpprof = self.skewxaxis.plot(dwpt, pres, zorder=5, ls='--', **kwargs)\n\n # this line should no longer cause an exception\n nbarbs = (~uu.mask).sum()\n\n skip = max(1, int(nbarbs//skip_divisor))\n\n if 'color' in kwargs:\n bcol = kwargs['color']\n else:\n bcol = 'k'\n\n if 'alpha' in kwargs:\n balph = kwargs['alpha']\n else:\n balph = 1.\n\n self.wbax.barbs((zeros(pres.shape)+bloc)[::skip]-0.5, pres[::skip],\n uu[::skip], vv[::skip],\n length=5, color=bcol, alpha=balph, lw=0.5)\n\n self.skewxaxis.other_housekeeping()\n\n return tcprof",
"def testDiagonalProfile(self):\n # Use Plot backend widget to submit mouse events\n widget = self.plot.getWidgetHandle()\n\n self.plot.addImage(\n numpy.arange(100 * 100).reshape(100, -1))\n\n for method in ('sum', 'mean'):\n with self.subTest(method=method):\n # 2 positions to use for mouse events\n pos1 = widget.width() * 0.4, widget.height() * 0.4\n pos2 = widget.width() * 0.6, widget.height() * 0.6\n\n # Trigger tool button for diagonal profile mode\n self.toolBar.lineAction.trigger()\n\n # draw profile line\n widget.setFocus(qt.Qt.OtherFocusReason)\n self.mouseMove(widget, pos=pos1)\n self.qWait(100)\n self.mousePress(widget, qt.Qt.LeftButton, pos=pos1)\n self.qWait(100)\n self.mouseMove(widget, pos=pos2)\n self.qWait(100)\n self.mouseRelease(widget, qt.Qt.LeftButton, pos=pos2)\n self.qWait(100)\n\n manager = self.toolBar.getProfileManager()\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n roi = manager.getCurrentRoi()\n self.assertIsNotNone(roi)\n roi.setProfileLineWidth(3)\n roi.setProfileMethod(method)\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n curveItem = roi.getProfileWindow().getCurrentPlotWidget().getAllCurves()[0]\n if method == 'sum':\n self.assertTrue(curveItem.getData()[1].max() > 10000)\n elif method == 'mean':\n self.assertTrue(curveItem.getData()[1].max() < 10000)\n\n # Remove the ROI so the profile window is also removed\n roiManager = manager.getRoiManager()\n roiManager.removeRoi(roi)\n self.qWait(100)",
"def merge_profile(prof1, prof2):\r\n new_t = []\r\n new_l = []\r\n new_sub_profile = []\r\n #merge common(same object) opt\r\n for l in set(prof1[0]).intersection(set(prof2[0])):\r\n idx1 = prof1[0].index(l)\r\n idx2 = prof2[0].index(l)\r\n new_t.append(prof1[1][idx1] +\r\n prof2[1][idx2])\r\n new_l.append(l)\r\n if hasattr(l, 'merge_profile'):\r\n assert len(prof1[6][idx1]) == len(prof2[6][idx2])\r\n new_sub_profile.append(l.merge_profile(prof1[6][idx1],\r\n prof2[6][idx2]))\r\n else:\r\n new_sub_profile.append(None)\r\n\r\n # merge not common opt\r\n from theano.compat.six import StringIO\r\n for l in set(prof1[0]).symmetric_difference(set(prof2[0])):\r\n #The set trick above only work for the same object optimization\r\n #It don't work for equivalent optimization.\r\n #So we try to merge equivalent optimization here.\r\n new_l_names = [o.name for o in new_l]\r\n if l.name in new_l_names:\r\n idx = new_l_names.index(l.name)\r\n io1 = StringIO()\r\n io2 = StringIO()\r\n l.print_summary(io1)\r\n new_l[idx].print_summary(io2)\r\n if io1.read() == io2.read():\r\n if l in prof1[0]:\r\n p = prof1\r\n else:\r\n p = prof2\r\n new_t[idx] += p[1][p[0].index(l)]\r\n if hasattr(l, 'merge_profile'):\r\n assert len(p[6][p[0].index(l)]) == \\\r\n len(new_sub_profile[idx])\r\n new_sub_profile[idx] = l.merge_profile(\r\n new_sub_profile[idx], p[6][p[0].index(l)])\r\n else:\r\n new_sub_profile[idx] = None\r\n continue\r\n if l in prof1[0]:\r\n p = prof1\r\n else:\r\n p = prof2\r\n new_t.append(p[1][p[0].index(l)])\r\n idx = p[0].index(l)\r\n new_l.append(l)\r\n new_sub_profile.append(p[6][idx])\r\n\r\n new_opt = SeqOptimizer(*new_l)\r\n #We need to assert based on the name as we merge also based on\r\n #the name.\r\n assert set([l.name for l in prof1[0]]).issubset(\r\n set([l.name for l in new_l]))\r\n assert set([l.name for l in prof2[0]]).issubset(\r\n set([l.name for l in new_l]))\r\n assert len(new_t) == len(new_opt) == len(new_sub_profile)\r\n return (new_opt, new_t, prof1[2] + prof2[2],\r\n prof1[3] + prof2[3],\r\n -1, -1, new_sub_profile, [])",
"def test_profiler(self):\n cmdline = [\n \"starfish\",\n \"--profile\",\n \"noop\",\n ]\n if cmdline[0] == 'starfish':\n coverage_cmdline = [\n \"coverage\", \"run\",\n \"-p\",\n \"--source\", \"starfish\",\n \"-m\", \"starfish.starfish\",\n ]\n coverage_cmdline.extend(cmdline[1:])\n cmdline = coverage_cmdline\n env = os.environ.copy()\n env[PROFILER_NOOP_ENVVAR] = \"\"\n subprocess.check_call(cmdline, env=env)",
"def testAlignedProfile(self):\n # Use Plot backend widget to submit mouse events\n widget = self.plot.getWidgetHandle()\n for method in ('sum', 'mean'):\n with self.subTest(method=method):\n # 2 positions to use for mouse events\n pos1 = widget.width() * 0.4, widget.height() * 0.4\n pos2 = widget.width() * 0.6, widget.height() * 0.6\n\n for action in (self.toolBar.hLineAction, self.toolBar.vLineAction):\n with self.subTest(mode=action.text()):\n # Trigger tool button for mode\n action.trigger()\n # Without image\n self.mouseMove(widget, pos=pos1)\n self.mouseClick(widget, qt.Qt.LeftButton, pos=pos1)\n\n # with image\n self.plot.addImage(\n numpy.arange(100 * 100).reshape(100, -1))\n self.mousePress(widget, qt.Qt.LeftButton, pos=pos1)\n self.mouseMove(widget, pos=pos2)\n self.mouseRelease(widget, qt.Qt.LeftButton, pos=pos2)\n\n self.mouseMove(widget)\n self.mouseClick(widget, qt.Qt.LeftButton)\n\n manager = self.toolBar.getProfileManager()\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break",
"def test_remove_spawning_profile():\n center = Coordinates(1 , 1)\n radius = 10\n speed_limit = 20\n\n i = Intersection(center, radius, speed_limit)\n\n default_driver = DriverProfile(\"Default\", 8, 2, 2, 0, 30, 3, 1)\n default_vehicle = VehicleProfile(\"Default\", 5, 15, 2, 2, 1000, 65)\n default_spawn = SpawningProfile(\"Default\", default_driver, default_vehicle)\n spawn2 = SpawningProfile(\"spawn2\", default_driver, default_vehicle)\n spawn_not_in_list = SpawningProfile(\"spawn3\", default_driver, default_vehicle)\n\n i.add_spawning_profile(default_spawn)\n i.add_spawning_profile(spawn2)\n\n assert len(i.get_spawning_profile_list()) == 2\n\n i.remove_spawning_profile(spawn_not_in_list)\n\n assert len(i.get_spawning_profile_list()) == 2\n\n i.remove_spawning_profile(spawn2)\n\n assert len(i.get_spawning_profile_list()) == 1\n\n i.remove_spawning_profile(default_spawn)\n\n assert len(i.get_spawning_profile_list()) == 0\n assert not i.get_spawning_profile_list()",
"def _profile(self) -> None:\n if self.use_case.profile:\n if self._profile_stats is None:\n self._profile_stats = pstats.Stats()\n if self._current_profiler is not None:\n self._current_profiler.disable()\n self._profile_stats.add(self._current_profiler)\n # TODO: use clear() instead of always creating a new profile\n self._current_profiler = cProfile.Profile()\n self._current_profiler.enable()",
"def test_add_spawning_profile():\n center = Coordinates(1 , 1)\n radius = 10\n speed_limit = 20\n\n i = Intersection(center, radius, speed_limit)\n\n assert not i.get_spawning_profile_list()\n\n default_driver = DriverProfile(\"Default\", 8, 2, 2, 0, 30, 3, 1)\n default_vehicle = VehicleProfile(\"Default\", 5, 15, 2, 2, 1000, 65)\n default_spawn = SpawningProfile(\"Default\", default_driver, default_vehicle)\n spawn2 = SpawningProfile(\"spawn2\", default_driver, default_vehicle)\n\n i.add_spawning_profile(default_spawn)\n\n assert i.get_spawning_profile_list()\n assert len(i.get_spawning_profile_list()) == 1\n\n i.add_spawning_profile(spawn2)\n\n assert len(i.get_spawning_profile_list()) == 2",
"def test_remove_spawning_profile_from_intersection():\n tester = TestClass()\n intersections = tester.add_spawning_profile_to_intersection()\n\n for i in intersections:\n if len(i.get_spawning_profile_list()) != 0:\n assert True\n\n for spawn in i.get_spawning_profile_list():\n if spawn.get_spawning_profile_name() == 'Default':\n assert True\n break\n\n tester.delete_spawning_profile_from_intersection()\n\n for i in intersections:\n if len(i.get_spawning_profile_list()) == 0:\n assert True",
"def profile_start(self):\n\n # Start Profiling\n self.pr = cProfile.Profile()\n self.pr.enable()",
"def paintAvatar(self):\n self.paintBody()\n self.paintShoes()\n if self.avatarConfiguration[\"gender\"] == \"boy\":\n self.paintShirt()\n self.paintTrousers()\n else:\n self.paintSkirt()\n self.paintHead()\n self.paintHair()\n self.paintMask()",
"def plt_profile(body, profile_type, res_dir, sub_dir=None, fname=None):\n if fname is None:\n fname = body.name + '-profile-' + profile_type\n # 2D\n if profile_type == PltOpts.DD.value:\n plt_2d(body.x_axis, body.x_profile, body.x_label, body.y_label,\n (body.name + '-profile'), res_dir,\n sub_dir, fname)\n # 3D\n elif profile_type == PltOpts.DDD.value:\n plt_3d(body.x_axis, body.y_axis, body.profile, body.x_label,\n body.y_label, 'profile in mm',\n (body.name + '-profile'), res_dir, sub_dir, fname)",
"def profile_stop(self):\n # If profiling is activated:\n if self.pr is None:\n return False\n\n self.pr.disable()\n s = StringIO.StringIO()\n sortby = 'tottime'\n ps = pstats.Stats(self.pr, stream=s).sort_stats(sortby)\n ps.print_stats()\n\n idaapi.msg(\"%s\\n\" % (s.getvalue(), ))",
"def run(self, draw=None, setup=None, update=None, frames=100, sort=CUMULATIVE, top=30):\n # Register the setup, draw, update functions with the canvas (if given).\n if isinstance(setup, FunctionType):\n self.canvas.set_method(setup, name=\"setup\")\n if isinstance(draw, FunctionType):\n self.canvas.set_method(draw, name=\"draw\")\n if isinstance(update, FunctionType):\n self.canvas.set_method(update, name=\"update\")\n # If enabled, turn Psyco off.\n psyco_stopped = False\n try: \n psyco.stop()\n psyco_stopped = True\n except:\n pass\n # Set the current canvas and the number of frames to profile.\n # The profiler will then repeatedly execute canvas._update() and canvas._draw().\n # Statistics are redirected from stdout to a temporary file.\n global _profile_canvas, _profile_frames\n _profile_canvas = self.canvas\n _profile_frames = frames\n import cProfile\n import pstats\n cProfile.run(\"profile_run()\", \"_profile\")\n p = pstats.Stats(\"_profile\")\n p.stream = open(\"_profile\", \"w\")\n p.sort_stats(sort==SLOWEST and \"time\" or sort).print_stats(top)\n p.stream.close()\n s = open(\"_profile\").read()\n remove(\"_profile\")\n # Restart Psyco if we stopped it.\n if psyco_stopped:\n psyco.profile()\n return s",
"def plt_profile_approx(res_dir, sub_dir):\n for body in gc.get_objects():\n if isinstance(body, ContactBody):\n if body.type_profile == \"File\":\n fname = \"profile-approximation-{}\".format(body.name)\n plt_2d_scatt_line(body.file_x_axis, body.file_x_profile,\n body.x_axis, body.x_profile,\n body.x_label, body.z_label, fname, res_dir,\n sub_dir, fname)",
"def computeBGprofile(self, untfilename, verbal=True, **kwargs):\n \n if self.minreadcoverage is None:\n raise AttributeError('minreadcoverage not set!')\n\n\n bgrate, bgdepth = aFunc.compute1Dprofile(untfilename, self.seqlen, self.minreadcoverage)\n \n\n if self.profile is None:\n self.profile = ReactivityProfile()\n elif verbal:\n print('Overwriting bgrate from values computed from the raw mutation file {}'.format(untfilename))\n \n\n self.profile.backprofile = bgrate\n \n # reset rawprofile as well\n with np.errstate(divide='ignore',invalid='ignore'):\n self.profile.rawprofile = np.sum(self.mutations, axis=0, dtype=float)/np.sum(self.reads, axis=0)\n self.profile.backgroundSubtract(normalize=False)",
"def Profiles(self, ax, p, data, ind, loc='', xloc=[], turb_geo=[],scatter=True):\r\n #TODO找最近的索引值,避免插值\r\n zmax=np.shape(data)[2]\r\n marker=['o','s','v','3','*','h','+','D',\r\n '.','_','|','1','2','3','4','x']\r\n if loc == 'y':\r\n points=np.zeros((len(xloc),p.ny,3))\r\n for m in range(len(xloc)):\r\n points[m,:,0]=xloc[m]\r\n for n in range(p.ny):\r\n points[:,n,1]=p.y[n]\r\n points[:,:,2]=ind\r\n u=interpn([p.x,p.y,p.z_uv[0:zmax]],data,points,bounds_error=False)\r\n for i in range(len(xloc)):\r\n label = \"H:x=\"+str(int(round((xloc[i]-turb_geo[0])/turb_geo[1]))) + 'D'\r\n if scatter==True:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label,marker=marker[i])\r\n else:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label)\r\n if loc == 'z':\r\n points=np.zeros((len(xloc),zmax,3))\r\n for m in range(len(xloc)):\r\n points[m,:,0]=xloc[m]\r\n points[:,:,1]=ind\r\n for n in range(zmax):\r\n points[:,n,2]=p.z_uv[n]\r\n u=interpn([p.x,p.y,p.z_uv[0:zmax]],data,points,bounds_error=False)\r\n for i in range(len(xloc)):\r\n label = \"V:x=\"+str(int(round((xloc[i]-turb_geo[0])/turb_geo[1]))) + 'D'\r\n if scatter==True:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label, linestyle='--',marker=marker[i])\r\n else:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label,linestyle='--')",
"def profile_noInter(x, y, v, point1, point2, size=None, **kwargs):\n # https://stackoverflow.com/questions/7878398/how-to-extract-an-arbitrary-line-of-values-from-a-numpy-array\n\n Xaxis = []\n showfig = False\n for key, value in kwargs.items():\n if key == \"Xaxis\":\n Xaxis = value\n if key == \"showfig\":\n showfig = value\n\n x1, y1 = point1\n x2, y2 = point2\n maxdist = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)\n\n if size is None:\n size = len(v)\n\n distances = np.linspace(0, maxdist, size)\n angle = np.arctan2(y2 - y1, x2 - x1)\n xp = x1 + distances * np.cos(angle)\n yp = y1 + distances * np.sin(angle)\n\n if Xaxis == \"dist\":\n xaxis = distances\n elif Xaxis == \"y\":\n xaxis = xp\n else:\n xaxis = yp\n\n nodes = np.array([x, y]).T\n points_p = np.array([xp, yp]).T\n # find nearest point\n\n # from progressbar import ProgressBar\n # pbar = ProgressBar()\n # vp = []\n # for p in pbar(points_p):\n # ind = _closest_node(p, nodes)\n # vp.append(v[ind])\n\n # from progressbar import ProgressBar\n # pbar = ProgressBar()\n vp = []\n for p in points_p:\n ind = _closest_node(p, nodes)\n vp.append(v[ind])\n\n vp_smooth_dict = _smooth_allfcts(xaxis, vp, showfig)\n\n return xp, yp, distances, vp, vp_smooth_dict"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Draw the profile on the postage stamp image. This is a slightly modified version of `stamp.DrawBasic()` which allows drawing of chromatic objects. prof The profile to draw. image The image onto which to draw the profile (which may be None). method The method to use in drawImage. offset The offset to apply when drawing. config The configuration dict for the stamp field. base The base configuration dict. logger If given, a logger object to log progress. the resulting image | def draw(self, prof, image, method, offset, config, base, logger, **kwargs):
# ... draw prof onto the given image (making a new Image if necessary)
if prof is None:
return image
else:
logger = galsim.config.LoggerWrapper(logger)
# Setup the kwargs to pass to drawImage
# (Start with any additional kwargs given as extra kwargs to DrawBasic and add to it.)
kwargs['image'] = image
kwargs['offset'] = offset
kwargs['method'] = method
if 'wmult' in config and 'wmult' not in kwargs: # pragma: no cover
kwargs['wmult'] = galsim.config.ParseValue(config, 'wmult', base, float)[0]
if 'wcs' not in kwargs and 'scale' not in kwargs:
kwargs['wcs'] = base['wcs'].local(image_pos = base['image_pos'])
if method == 'phot' and 'rng' not in kwargs:
kwargs['rng'] = galsim.config.GetRNG(config, base, logger, "method='phot'")
# Check validity of extra phot options:
max_extra_noise = None
if 'n_photons' in config and 'n_photons' not in kwargs:
if method != 'phot':
raise AttributeError('n_photons is invalid with method != phot')
if 'max_extra_noise' in config:
logger.warning(
"Both 'max_extra_noise' and 'n_photons' are set in config dict, "+
"ignoring 'max_extra_noise'.")
kwargs['n_photons'] = galsim.config.ParseValue(config, 'n_photons', base, int)[0]
elif 'max_extra_noise' in config:
max_extra_noise = galsim.config.ParseValue(config, 'max_extra_noise', base, float)[0]
if method != 'phot' and max_extra_noise is not None:
raise AttributeError('max_extra_noise is invalid with method != phot')
if 'poisson_flux' in config and 'poisson_flux' not in kwargs:
if method != 'phot':
raise AttributeError('poisson_flux is invalid with method != phot')
kwargs['poisson_flux'] = galsim.config.ParseValue(config, 'poisson_flux', base, bool)[0]
if max_extra_noise is not None and 'max_extra_noise' not in kwargs:
if max_extra_noise < 0.:
raise ValueError("image.max_extra_noise cannot be negative")
if 'image' in base and 'noise' in base['image']:
noise_var = galsim.config.CalculateNoiseVariance(base)
else:
raise AttributeError("Need to specify noise level when using max_extra_noise")
if noise_var < 0.:
raise ValueError("noise_var calculated to be < 0.")
max_extra_noise *= noise_var
kwargs['max_extra_noise'] = max_extra_noise
if logger.isEnabledFor(logging.DEBUG):
# Don't output the full image array. Use str(image) for that kwarg.
alt_kwargs = dict([(k,str(kwargs[k]) if isinstance(kwargs[k],galsim.Image) else kwargs[k])
for k in kwargs])
logger.debug('obj %d: drawImage kwargs = %s',base.get('obj_num',0), alt_kwargs)
logger.debug('obj %d: prof = %s',base.get('obj_num',0),prof)
try:
# NOTE: Old version:
# image = prof.drawImage(**kwargs)
if isinstance(prof, galsim.GSObject):
image = prof.drawImage(**kwargs)
elif isinstance(prof, galsim.ChromaticObject):
bp = {}
for key in (self._req_bp_fields+self._opt_bp_fields):
try:
bp[key] = config['bandpass'][key]
except KeyError:
bp[key] = None
bandpass = galsim.Bandpass(blue_limit=bp['blue_limit'], red_limit=bp['red_limit'],
wave_type=bp['wave_type'], throughput=bp['throughput'],
zeropoint=bp['zeropoint'])
image = prof.drawImage(bandpass=bandpass, **kwargs)
except Exception as e: # pragma: no cover
logger.debug('obj %d: prof = %r', base.get('obj_num',0), prof)
raise
return image | [
"def updateSkip(self, prof, image, method, offset, config, base, logger):\n\n # NOTE: There are currently unresolved issues with the image size checking of chromatic\n # objects. For now, we ignore any possible speed increases and skip the check.\n # if isinstance(prof, galsim.ChromaticObject):\n # return False\n\n if prof is not None and base.get('current_image',None) is not None:\n if image is None:\n prof = base['wcs'].toImage(prof, image_pos=base['image_pos'])\n # NOTE: Old version:\n # N = prof.getGoodImageSize(1.)\n if isinstance(prof, galsim.GSObject):\n N = prof.getGoodImageSize(1.)\n elif isinstance(prof, galsim.ChromaticObject):\n # TODO: Finish implementation\n # return False\n pudb.set_trace()\n # Find the suggested image size for each object given the choice of scale, and use the\n # maximum just to be safe.\n print '\\nprof.original = {}'.format(prof.original)\n print '\\nprof.original.obj_list = {}'.format(prof.original.obj_list)\n # print '\\nprof.objlist = {}'.format(prof.original.obj_list)\n obj_list = prof.original.obj_list\n possible_im_sizes = []\n for obj in obj_list:\n print '\\n obj : {}'.format(obj)\n possible_im_sizes.append([ ob.getGoodImageSize(1.) for ob in obj])\n print 'possible_im_sizes : {}'.format(possible_im_sizes)\n N = np.max(possible_im_sizes)\n N += 2 + int(np.abs(offset.x) + np.abs(offset.y))\n bounds = galsim._BoundsI(1,N,1,N)\n else:\n bounds = image.bounds\n\n # Set the origin appropriately\n stamp_center = base['stamp_center']\n if stamp_center:\n bounds = bounds.shift(stamp_center - bounds.center)\n else:\n bounds = bounds.shift(base.get('image_origin',galsim.PositionI(1,1)) -\n galsim.PositionI(bounds.xmin, bounds.ymin))\n\n overlap = bounds & base['current_image'].bounds\n if not overlap.isDefined():\n logger.info('obj %d: skip drawing object because its image will be entirely off '\n 'the main image.', base['obj_num'])\n return True\n\n return False",
"def plt_profile(body, profile_type, res_dir, sub_dir=None, fname=None):\n if fname is None:\n fname = body.name + '-profile-' + profile_type\n # 2D\n if profile_type == PltOpts.DD.value:\n plt_2d(body.x_axis, body.x_profile, body.x_label, body.y_label,\n (body.name + '-profile'), res_dir,\n sub_dir, fname)\n # 3D\n elif profile_type == PltOpts.DDD.value:\n plt_3d(body.x_axis, body.y_axis, body.profile, body.x_label,\n body.y_label, 'profile in mm',\n (body.name + '-profile'), res_dir, sub_dir, fname)",
"def run(self, draw=None, setup=None, update=None, frames=100, sort=CUMULATIVE, top=30):\n # Register the setup, draw, update functions with the canvas (if given).\n if isinstance(setup, FunctionType):\n self.canvas.set_method(setup, name=\"setup\")\n if isinstance(draw, FunctionType):\n self.canvas.set_method(draw, name=\"draw\")\n if isinstance(update, FunctionType):\n self.canvas.set_method(update, name=\"update\")\n # If enabled, turn Psyco off.\n psyco_stopped = False\n try: \n psyco.stop()\n psyco_stopped = True\n except:\n pass\n # Set the current canvas and the number of frames to profile.\n # The profiler will then repeatedly execute canvas._update() and canvas._draw().\n # Statistics are redirected from stdout to a temporary file.\n global _profile_canvas, _profile_frames\n _profile_canvas = self.canvas\n _profile_frames = frames\n import cProfile\n import pstats\n cProfile.run(\"profile_run()\", \"_profile\")\n p = pstats.Stats(\"_profile\")\n p.stream = open(\"_profile\", \"w\")\n p.sort_stats(sort==SLOWEST and \"time\" or sort).print_stats(top)\n p.stream.close()\n s = open(\"_profile\").read()\n remove(\"_profile\")\n # Restart Psyco if we stopped it.\n if psyco_stopped:\n psyco.profile()\n return s",
"def apply_profile(self, profile=None):\n raise NotImplementedError(\n 'operation apply_profile(...) not yet implemented')",
"def addProfile(self, profile, color=None, close=False):\n if close:\n e1 = profile[0] # should always be a point\n if e1[0] != 0.0:\n profile = [(0.0, e1[1])] + profile\n e2 = profile[-1]\n if e2[0] != 0.0:\n if len(e2) == 2:\n profile.append((0.0, e2[1]))\n else:\n # profile ends in an arc\n profile.append((0.0, e2[0][1]))\n # previous line start x/y, for line -> arc\n px1 = py1 = None\n for e1, e2 in windowItr(profile, 2, 1):\n if e2 is None:\n break\n le1 = len(e1)\n le2 = len(e2)\n # line or start -> line\n if le1 == 2 and le2 == 2:\n x1, y1 = e1\n x2, y2 = e2\n self.blendTangent(False)\n patch = Patch.fromRevLineSeg(x1, y1, x2, y2, self)\n if color:\n patch.setColor(color)\n self._patches.append(patch)\n px1 = x1\n py1 = y1\n # line or start -> arc\n elif le1 == 2 and le2 == 3:\n x1, y1 = e1\n (x2, y2), (cx, cy), d = e2\n if px1 is not None:\n self.blendTangent(self._isLineTanToArc(px1, py1, x1, y1,\n cx, cy, d))\n patch = Patch.fromRevArcSeg(x1, y1, x2, y2, cx, cy, d, self)\n if color:\n patch.setColor(color)\n self._patches.append(patch)\n # arc -> line\n elif le1 == 3 and le2 == 2:\n (aex, aey), (cx, cy), d = e1\n lex, ley = e2\n self.blendTangent(self._isLineTanToArc(lex, ley, aex, aey, cx,\n cy, d))\n patch = Patch.fromRevLineSeg(aex, aey, lex, ley, self)\n if color:\n patch.setColor(color)\n self._patches.append(patch)\n px1 = aex\n py1 = aey\n # arc -> arc\n else:\n (x1, y1), (cx1, cy1), d1 = e1\n (x2, y2), (cx2, cy2), d2 = e2\n self.blendTangent(self._isArcTangentToArc(x1, y1, cx1, cy1,\n cx2, cy2))\n patch = Patch.fromRevArcSeg(x1, y1, x2, y2, cx2, cy2, d2,\n self)\n if color:\n patch.setColor(color)\n self._patches.append(patch)\n self._bbox = BBox.fromVertices(self._sharedVertices)",
"def profile_start(self):\n\n # Start Profiling\n self.pr = cProfile.Profile()\n self.pr.enable()",
"def testDiagonalProfile(self):\n # Use Plot backend widget to submit mouse events\n widget = self.plot.getWidgetHandle()\n\n self.plot.addImage(\n numpy.arange(100 * 100).reshape(100, -1))\n\n for method in ('sum', 'mean'):\n with self.subTest(method=method):\n # 2 positions to use for mouse events\n pos1 = widget.width() * 0.4, widget.height() * 0.4\n pos2 = widget.width() * 0.6, widget.height() * 0.6\n\n # Trigger tool button for diagonal profile mode\n self.toolBar.lineAction.trigger()\n\n # draw profile line\n widget.setFocus(qt.Qt.OtherFocusReason)\n self.mouseMove(widget, pos=pos1)\n self.qWait(100)\n self.mousePress(widget, qt.Qt.LeftButton, pos=pos1)\n self.qWait(100)\n self.mouseMove(widget, pos=pos2)\n self.qWait(100)\n self.mouseRelease(widget, qt.Qt.LeftButton, pos=pos2)\n self.qWait(100)\n\n manager = self.toolBar.getProfileManager()\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n roi = manager.getCurrentRoi()\n self.assertIsNotNone(roi)\n roi.setProfileLineWidth(3)\n roi.setProfileMethod(method)\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n curveItem = roi.getProfileWindow().getCurrentPlotWidget().getAllCurves()[0]\n if method == 'sum':\n self.assertTrue(curveItem.getData()[1].max() > 10000)\n elif method == 'mean':\n self.assertTrue(curveItem.getData()[1].max() < 10000)\n\n # Remove the ROI so the profile window is also removed\n roiManager = manager.getRoiManager()\n roiManager.removeRoi(roi)\n self.qWait(100)",
"def set_profile(self, profile='default'):\n\n # parameters used by various subclasses\n # each set is indexed by a name, called a profile\n # Note that each parameter must also be listed in set_params method in order to get set\n self.profile = profile\n self.params = {\n 'default' : {\n 'chans': n.array(range(5,59)), # channels to read\n 'dmarr' : [44.,88.], # dm values to use for dedispersion (only for some subclasses)\n 'pulsewidth' : 0.0, # width of pulse in time (seconds)\n 'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data\n 'pathout': './', # place to put output files\n 'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition\n 'long': -107.6177, # longitude of the array center (vla)\n 'lat': 34.07875 # latitude of the array center (vla)\n },\n 'vlacrab' : {\n 'chans': n.array(range(5,59)), # channels to read\n 'dmarr' : [29.,58.], # dm values to use for dedispersion (only for some subclasses)\n 'pulsewidth' : 0.0, # width of pulse in time (seconds)\n 'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data\n 'pathout': './', # place to put output files\n 'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition\n 'long': -107.6177, # longitude of the array center\n 'lat': 34.07875 # latitude of the array center\n },\n 'psa' : {\n 'chans': n.array(range(140,150)), # channels to read\n 'dmarr' : [0.], # dm values to use for dedispersion (only for some subclasses)\n 'pulsewidth' : 0.0, # width of pulse in time (seconds)\n 'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data\n 'pathout': './', # place to put output files\n 'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition\n 'long': 21.411, # longitude of the array center\n 'lat': -30.721 # latitude of the array center\n },\n 'pocob0329' : {\n 'chans': n.array(range(5,59)), # channels to read\n 'dmarr' : [0, 13.4, 26.8, 40.2, 53.5], # dm values to use for dedispersion (only for some subclasses)\n 'pulsewidth' : 0.005, # width of pulse in time (seconds)\n 'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data\n 'pathout': './', # place to put output files\n 'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition\n 'long': -121.470, # longitude of the array center\n 'lat': 40.817 # latitude of the array center\n },\n 'mwa' : {\n 'chans': n.array(n.arange(128)), # channels to read\n 'dmarr' : [0, 50.], # dm values to use for dedispersion (only for some subclasses)\n 'pulsewidth' : 0.0, # width of pulse in time (seconds)\n 'approxuvw' : True, # flag to make template visibility file to speed up writing of dm track data\n 'pathout': './', # place to put output files\n 'beam_params': [0], # flag=0 or list of parameters for twodgaussian parameter definition\n 'long': 116.671, # longitude of the array center\n 'lat': -26.703 # latitude of the array center\n }\n }\n\n \n self.pathout = self.params[self.profile]['pathout']\n self.chans = self.params[self.profile]['chans']\n self.dmarr = self.params[self.profile]['dmarr']\n self.pulsewidth = self.params[self.profile]['pulsewidth'] * n.ones(len(self.chans))\n self.approxuvw = self.params[self.profile]['approxuvw']\n self.beam_params = self.params[self.profile]['beam_params']\n self.long = self.params[self.profile]['long']\n self.lat = self.params[self.profile]['lat']",
"def plt_profile_approx(res_dir, sub_dir):\n for body in gc.get_objects():\n if isinstance(body, ContactBody):\n if body.type_profile == \"File\":\n fname = \"profile-approximation-{}\".format(body.name)\n plt_2d_scatt_line(body.file_x_axis, body.file_x_profile,\n body.x_axis, body.x_profile,\n body.x_label, body.z_label, fname, res_dir,\n sub_dir, fname)",
"def plt_profiles(res_dir):\n print_it(\"plotting profiles\", PrintOpts.lvl1.value)\n num_cbs = list(\n isinstance(obj, ContactBody) for obj in gc.get_objects()).count(True)\n counter = 0\n for body in gc.get_objects():\n if isinstance(body, ContactBody):\n if body.type_profile == \"File\":\n fname = \"profile-approximation-{}\".format(body.name)\n plt_2d_scatt_line(body.file_x_axis, body.file_x_profile,\n body.x_axis, body.x_profile,\n body.x_label, body.z_label, fname, res_dir,\n SubDir.profiles.value, fname)\n\n if body.roughness_mat is not None:\n fname = \"surface-roughness-{}\".format(body.name)\n plt_3d(body.x_axis, body.y_axis, body.roughness_mat,\n body.x_label, body.y_label,\n 'roughness height in mm',\n 'surface roughness {}'.format(body.name), res_dir,\n sub_dir=SubDir.profiles.value, fname=fname)\n\n plt_profile(body, PltOpts.DD.value, res_dir, SubDir.profiles.value)\n plt_profile(body, PltOpts.DDD.value, res_dir, SubDir.profiles.value)\n counter = print_progress(counter, num_cbs)",
"def add_profile(self, skip_divisor=32, ws_in_knots=True, **kwargs):\n\n # I must be a dummy because I can't make\n # this work any other way!!\n if 'bloc' in kwargs:\n bloc = kwargs.pop('bloc')\n else:\n bloc = 0.5\n\n try:\n pres = ma.masked_invalid(self.soundingdata['pres'])\n except KeyError:\n raise KeyError(\"Pres in hPa (PRES) is required!\")\n\n try:\n tc = ma.masked_invalid(self.soundingdata['temp'])\n except KeyError:\n raise KeyError(\"Temperature in C (TEMP) is required!\")\n\n try:\n dwpt = ma.masked_invalid(self.soundingdata['dwpt'])\n except KeyError:\n print(\"Warning: No DWPT available\")\n dwpt = ma.masked_array(zeros(pres.shape), mask=True)\n\n try:\n sknt = self.soundingdata['sknt']\n drct = self.soundingdata['drct']\n rdir = (270.-drct)*(pi/180.)\n\n if ws_in_knots:\n uu = ma.masked_invalid(sknt*cos(rdir))\n vv = ma.masked_invalid(sknt*sin(rdir))\n else:\n uu = ma.masked_invalid(.514444 * sknt * cos(rdir))\n vv = ma.masked_invalid(.514444 * sknt * sin(rdir))\n except KeyError:\n print(\"Warning: No SKNT/DRCT available\")\n uu = ma.masked_array(zeros(pres.shape), mask=True)\n vv = ma.masked_array(zeros(pres.shape), mask=True)\n\n tcprof = self.skewxaxis.plot(tc, pres, zorder=5, **kwargs)\n dpprof = self.skewxaxis.plot(dwpt, pres, zorder=5, ls='--', **kwargs)\n\n # this line should no longer cause an exception\n nbarbs = (~uu.mask).sum()\n\n skip = max(1, int(nbarbs//skip_divisor))\n\n if 'color' in kwargs:\n bcol = kwargs['color']\n else:\n bcol = 'k'\n\n if 'alpha' in kwargs:\n balph = kwargs['alpha']\n else:\n balph = 1.\n\n self.wbax.barbs((zeros(pres.shape)+bloc)[::skip]-0.5, pres[::skip],\n uu[::skip], vv[::skip],\n length=5, color=bcol, alpha=balph, lw=0.5)\n\n self.skewxaxis.other_housekeeping()\n\n return tcprof",
"def msvfw32_DrawDibProfileDisplay(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"lpbi\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def computeBGprofile(self, untfilename, verbal=True, **kwargs):\n \n if self.minreadcoverage is None:\n raise AttributeError('minreadcoverage not set!')\n\n\n bgrate, bgdepth = aFunc.compute1Dprofile(untfilename, self.seqlen, self.minreadcoverage)\n \n\n if self.profile is None:\n self.profile = ReactivityProfile()\n elif verbal:\n print('Overwriting bgrate from values computed from the raw mutation file {}'.format(untfilename))\n \n\n self.profile.backprofile = bgrate\n \n # reset rawprofile as well\n with np.errstate(divide='ignore',invalid='ignore'):\n self.profile.rawprofile = np.sum(self.mutations, axis=0, dtype=float)/np.sum(self.reads, axis=0)\n self.profile.backgroundSubtract(normalize=False)",
"def Profiles(self, ax, p, data, ind, loc='', xloc=[], turb_geo=[],scatter=True):\r\n #TODO找最近的索引值,避免插值\r\n zmax=np.shape(data)[2]\r\n marker=['o','s','v','3','*','h','+','D',\r\n '.','_','|','1','2','3','4','x']\r\n if loc == 'y':\r\n points=np.zeros((len(xloc),p.ny,3))\r\n for m in range(len(xloc)):\r\n points[m,:,0]=xloc[m]\r\n for n in range(p.ny):\r\n points[:,n,1]=p.y[n]\r\n points[:,:,2]=ind\r\n u=interpn([p.x,p.y,p.z_uv[0:zmax]],data,points,bounds_error=False)\r\n for i in range(len(xloc)):\r\n label = \"H:x=\"+str(int(round((xloc[i]-turb_geo[0])/turb_geo[1]))) + 'D'\r\n if scatter==True:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label,marker=marker[i])\r\n else:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label)\r\n if loc == 'z':\r\n points=np.zeros((len(xloc),zmax,3))\r\n for m in range(len(xloc)):\r\n points[m,:,0]=xloc[m]\r\n points[:,:,1]=ind\r\n for n in range(zmax):\r\n points[:,n,2]=p.z_uv[n]\r\n u=interpn([p.x,p.y,p.z_uv[0:zmax]],data,points,bounds_error=False)\r\n for i in range(len(xloc)):\r\n label = \"V:x=\"+str(int(round((xloc[i]-turb_geo[0])/turb_geo[1]))) + 'D'\r\n if scatter==True:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label, linestyle='--',marker=marker[i])\r\n else:\r\n self.Profile(ax, p, u[i, :], loc=loc, \r\n label=label,linestyle='--')",
"def command_apply_profile(self, profile): \n required_arguments = {\n 'profile': profile,\n 'rsync': self.BINARY_PATHS['rsync'],\n 'pwd': os.path.join(self.env['pwd']),\n 'exclude': '',\n 'cwd': '.'\n }\n\n try:\n files_to_exclude_str = self.profile_config[profile:'ignore']\n except (TypeError,KeyError):\n raise RuntimeError('Missing value in apply_profile command: %s' % str(required_arguments))\n else:\n if ',' in files_to_exclude_str:\n files = [f.strip() for f in files_to_exclude_str.split(',')]\n else:\n files = [f.strip() for f in files_to_exclude_str.split()]\n required_arguments['exclude'] = ' '.join(\"--exclude='%s'\" % f for f in files)\n\n self._previous_arguments = required_arguments\n return '%(rsync)s -rlptD --chmod=ug+rw %(exclude)s %(pwd)s/%(profile)s/ %(cwd)s' % required_arguments",
"def assign_profile(self, objProf):\n self.Profile = objProf\n pass",
"def writeProfile(fname,prof):\n t = np.linspace(0,1,prof.shape[0],endpoint=False)\n fh = open(fname,'w')\n for x in range(prof.shape[0]):\n fh.write('%.7e %.7e\\n' % (t[x],prof[x]))\n fh.close()",
"def paintAvatar(self):\n self.paintBody()\n self.paintShoes()\n if self.avatarConfiguration[\"gender\"] == \"boy\":\n self.paintShirt()\n self.paintTrousers()\n else:\n self.paintSkirt()\n self.paintHead()\n self.paintHair()\n self.paintMask()",
"def CreateProfileLikelihoodPlot(model, data, poi):\n\n nll = model.createNLL(data);\n profile = nll.createProfile(ROOT.RooArgSet(poi)); \n\n frame = poi.frame();\n ROOT.RooStats.HistFactory.FormatFrameForLikelihood(frame)\n\n nll.plotOn(frame, ROOT.RooCmdArg(\"ShiftToZero\",True), \n ROOT.RooCmdArg(\"LineColor\",ROOT.kRed), \n ROOT.RooCmdArg(\"LineStyle\",ROOT.kDashed) );\n profile.plotOn(frame);\n frame.SetMinimum(0);\n frame.SetMaximum(2.);\n canvas = ROOT.TCanvas( \"Profile Likelihood\", \"\", 800,600);\n frame.Draw(\"goff\");\n png_string = CanvasToPngString(canvas)\n return png_string"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Take a draft_dict that was already validated by draft_dict_validator then further sanitize, validate, and transform it. Ultimately return this "further validated" draft dict. It will have a slightly different set of keys the values for which can be used to directly create a Draft object. | def further_validated_draft_dict(
draft_dict: Dict[str, Any], user_profile: UserProfile
) -> Dict[str, Any]:
content = normalize_body(draft_dict["content"])
timestamp = draft_dict.get("timestamp", time.time())
timestamp = round(timestamp, 6)
if timestamp < 0:
# While it's not exactly an invalid timestamp, it's not something
# we want to allow either.
raise JsonableError(_("Timestamp must not be negative."))
last_edit_time = timestamp_to_datetime(timestamp)
topic = ""
recipient_id = None
to = draft_dict["to"]
if draft_dict["type"] == "stream":
topic = truncate_topic(draft_dict["topic"])
if "\0" in topic:
raise JsonableError(_("Topic must not contain null bytes"))
if len(to) != 1:
raise JsonableError(_("Must specify exactly 1 stream ID for stream messages"))
stream, sub = access_stream_by_id(user_profile, to[0])
recipient_id = stream.recipient_id
elif draft_dict["type"] == "private" and len(to) != 0:
to_users = get_user_profiles_by_ids(set(to), user_profile.realm)
try:
recipient_id = recipient_for_user_profiles(to_users, False, None, user_profile).id
except ValidationError as e: # nocoverage
raise JsonableError(e.messages[0])
return {
"recipient_id": recipient_id,
"topic": topic,
"content": content,
"last_edit_time": last_edit_time,
} | [
"def validate_dict(cls, valuedict, exceptioncls, typedict):\n if not isinstance(valuedict, dict):\n raise DraftValidationError('The draft string must contain a json map/object/dict.')\n for key, typecls in typedict.iteritems():\n if not key in valuedict:\n raise exceptioncls('{0} is required.'.format(key))\n if not isinstance(valuedict[key], typecls):\n raise exceptioncls('{0} must be of type: {1}.'.format(key, str(typecls)[7:-2]))",
"def _draft_from_response(data):\n return Draft(\n uuid=UUID(data['uuid']),\n bundle_uuid=UUID(data['bundle_uuid']),\n name=data['name'],\n updated_at=dateutil.parser.parse(data['staged_draft']['updated_at']),\n files={\n path: DraftFile(path=path, **file)\n for path, file in data['staged_draft']['files'].items()\n },\n links={\n name: DraftLinkDetails(\n name=name,\n direct=LinkReference(**link[\"direct\"]),\n indirect=[LinkReference(**ind) for ind in link[\"indirect\"]],\n modified=link[\"modified\"],\n )\n for name, link in data['staged_draft']['links'].items()\n }\n )",
"def do_edit_draft(draft_id: int, draft_dict: Dict[str, Any], user_profile: UserProfile) -> None:\n try:\n draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile)\n except Draft.DoesNotExist:\n raise ResourceNotFoundError(_(\"Draft does not exist\"))\n valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)\n draft_object.content = valid_draft_dict[\"content\"]\n draft_object.topic = valid_draft_dict[\"topic\"]\n draft_object.recipient_id = valid_draft_dict[\"recipient_id\"]\n draft_object.last_edit_time = valid_draft_dict[\"last_edit_time\"]\n draft_object.save()\n\n event = {\"type\": \"drafts\", \"op\": \"update\", \"draft\": draft_object.to_dict()}\n send_event(user_profile.realm, event, [user_profile.id])",
"def decode_draftstring(cls, draftstring):\n try:\n return json.loads(draftstring)\n except ValueError, e:\n raise DraftValidationError('Could not decode config string as JSON.')",
"def _get_draft(self):\n review_request = self.create_review_request(publish=True)\n return ReviewRequestDraft.create(review_request)",
"def _convert_states_v27_dict_to_v28_dict(cls, draft_change_list):\n for i, change in enumerate(draft_change_list):\n if (change.cmd == exp_domain.CMD_EDIT_STATE_PROPERTY and\n change.property_name ==\n exp_domain.STATE_PROPERTY_CONTENT_IDS_TO_AUDIO_TRANSLATIONS_DEPRECATED): # pylint: disable=line-too-long\n draft_change_list[i] = exp_domain.ExplorationChange({\n 'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,\n 'property_name': (\n exp_domain.STATE_PROPERTY_RECORDED_VOICEOVERS),\n 'state_name': change.state_name,\n 'new_value': {\n 'voiceovers_mapping': change.new_value\n }\n })\n\n return draft_change_list",
"def _convert_states_v29_dict_to_v30_dict(cls, draft_change_list):\n for i, change in enumerate(draft_change_list):\n if (change.cmd == exp_domain.CMD_EDIT_STATE_PROPERTY and\n change.property_name ==\n exp_domain.STATE_PROPERTY_INTERACTION_ANSWER_GROUPS):\n draft_change_list[i] = exp_domain.ExplorationChange({\n 'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,\n 'property_name': (\n exp_domain.STATE_PROPERTY_INTERACTION_ANSWER_GROUPS),\n 'state_name': change.state_name,\n 'new_value': {\n 'rule_specs': change.new_value['rule_specs'],\n 'outcome': change.new_value['outcome'],\n 'training_data': change.new_value['training_data'],\n 'tagged_skill_misconception_id': None\n }\n })\n return draft_change_list",
"def _read_draft_from_file(self, src_file):\n\n with open (src_file, 'r') as infile:\n data_in = json.load(infile)\n\n # deserialize 'json' -> 'dict'\n record = self.draft_serializer.load(data_in).data\n\n return record",
"def get(self, oauth, resource_id, draft_id):\n d = Deposition.get(resource_id, user=current_user)\n return d.type.marshal_draft(d.get_draft(draft_id))",
"def standardize(self):\n dict_util.standardize(self)",
"def do_create_drafts(draft_dicts: List[Dict[str, Any]], user_profile: UserProfile) -> List[Draft]:\n draft_objects = []\n for draft_dict in draft_dicts:\n valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)\n draft_objects.append(\n Draft(\n user_profile=user_profile,\n recipient_id=valid_draft_dict[\"recipient_id\"],\n topic=valid_draft_dict[\"topic\"],\n content=valid_draft_dict[\"content\"],\n last_edit_time=valid_draft_dict[\"last_edit_time\"],\n )\n )\n\n created_draft_objects = Draft.objects.bulk_create(draft_objects)\n\n event = {\n \"type\": \"drafts\",\n \"op\": \"add\",\n \"drafts\": [draft.to_dict() for draft in created_draft_objects],\n }\n send_event(user_profile.realm, event, [user_profile.id])\n\n return created_draft_objects",
"def __get_normalizer(self, schema_key: str, original_validator: Callable):\n\n def normalizator(validator_instance: Callable, property_value: Any, instance: Any, schema: Dict[str, Any]):\n \"\"\"\n Jsonschema validator callable it uses for validating instance. We\n override default Draft7Validator to perform value transformation\n before validation take place. We do not take any action except\n logging warn if object does not conform to json schema, just using\n jsonschema algorithm to traverse through object fields.\n Look\n https://python-jsonschema.readthedocs.io/en/stable/creating/?highlight=validators.create#jsonschema.validators.create\n validators parameter for detailed description.\n :\n \"\"\"\n\n def resolve(subschema):\n if \"$ref\" in subschema:\n _, resolved = validator_instance.resolver.resolve(subschema[\"$ref\"])\n return resolved\n return subschema\n\n # Transform object and array values before running json schema type checking for each element.\n # Recursively normalize every value of the \"instance\" sub-object,\n # if \"instance\" is an incorrect type - skip recursive normalization of \"instance\"\n if schema_key == \"properties\" and isinstance(instance, dict):\n for k, subschema in property_value.items():\n if k in instance:\n subschema = resolve(subschema)\n instance[k] = self.__normalize(instance[k], subschema)\n # Recursively normalize every item of the \"instance\" sub-array,\n # if \"instance\" is an incorrect type - skip recursive normalization of \"instance\"\n elif schema_key == \"items\" and isinstance(instance, list):\n subschema = resolve(property_value)\n for index, item in enumerate(instance):\n instance[index] = self.__normalize(item, subschema)\n\n # Running native jsonschema traverse algorithm after field normalization is done.\n yield from original_validator(validator_instance, property_value, instance, schema)\n\n return normalizator",
"def _transform_post_content_lst(self, post_id, post_dict): \n # TODO: For Gods sake this is the laziest error-catching I have ever written please make this less horrible:\n \n try: \n # Post dict autor dicts:\n author_dicts = {\n \"id\": post_id,\n \"author_gold\": post_dict[\"author\"].is_gold,\n \"mod_status\": post_dict[\"author\"].is_mod,\n \"verified_email_status\": post_dict[\"author\"].has_verified_email,\n \"acc_created_on\": self._format_datetime(post_dict[\"author\"].created_utc),\n \"comment_karma\": post_dict[\"author\"].comment_karma,\n \"author\": post_dict[\"author\"].name\n }\n\n self.logger.info(f\"Author Data Extracted Sucessfully from Post Dict\", \"reddit\", \"pipeline\", 200)\n\n except Exception as e:\n # Post dict autor dicts:\n author_dicts = {\n \"id\": post_id,\n \"author_gold\": None,\n \"mod_status\": None,\n \"verified_email_status\": None,\n \"acc_created_on\": None,\n \"comment_karma\": None,\n \"author\": None\n }\n self.logger.warning(f\"Author Dictionary Data Extraction Failed w/ Error: {e} author_dict set to None\", \"reddit\", \"pipeline\", 301)\n\n\n # Updating the main post_dict with the new author_dict content:\n post_dict.update(author_dicts)\n \n return post_dict",
"def fromDict(d):\n ed = EntryDict()\n for k in d:\n ed[k] = d[k]\n return ed",
"def from_json(cls, json_draft_pick):\n football_player_id = json_draft_pick.get('football_player_id')\n order = json_draft_pick.get('order')\n if football_player_id and order:\n return cls(football_player_id=football_player_id, order=order)\n else:\n raise ValidationError('insufficient data to create DraftPick')",
"def preprocess_id_dict(self, id_dict):\n if self.id_dict_preprocessing is not None:\n return self.id_dict_preprocessing(id_dict)\n return id_dict",
"def _get_schema_dictionary_with_ignored_fields_removed(self, dictionary_descriptor):\n\n in_post_processing_dictionary_descriptor = dictionary_descriptor\n\n for ignored_property in self.ignored_properties:\n if ignored_property in in_post_processing_dictionary_descriptor.keys():\n del in_post_processing_dictionary_descriptor[ignored_property]\n\n for key, value in in_post_processing_dictionary_descriptor.items():\n if isinstance(value, dict):\n post_processed_sub_dictionary_descriptor = \\\n self._get_schema_dictionary_with_ignored_fields_removed(\n value)\n in_post_processing_dictionary_descriptor[key] = post_processed_sub_dictionary_descriptor\n\n return in_post_processing_dictionary_descriptor",
"def validate_insert(cls, document: dict) -> dict:\n if document is None:\n return {\"\": [\"No data provided.\"]}\n\n if not isinstance(document, dict):\n return {\"\": [\"Must be a dictionary.\"]}\n\n new_document = copy.deepcopy(document)\n\n errors = {}\n\n field_names = [field.name for field in cls.__fields__]\n unknown_fields = [\n field_name for field_name in new_document if field_name not in field_names\n ]\n for unknown_field in unknown_fields:\n known_field, field_value = cls._to_known_field(\n unknown_field, new_document[unknown_field]\n )\n if known_field:\n new_document.setdefault(known_field.name, {}).update(field_value)\n elif not cls._skip_unknown_fields:\n errors.update({unknown_field: [\"Unknown field\"]})\n\n for field in cls.__fields__:\n errors.update(field.validate_insert(new_document))\n\n return errors",
"def post():\n try:\n\n # Quick check: must provide an account ID.\n account_id = resource_utils.get_account_id(request)\n if account_id is None:\n return resource_utils.account_required_response()\n\n # Verify request JWT and account ID\n if not authorized(account_id, jwt):\n return resource_utils.unauthorized_error_response(account_id)\n\n request_json = request.get_json(silent=True)\n # Disable schema validation: draft may be partial/incomplele.\n # valid_format, errors = schema_utils.validate(request_json, 'draft', 'ppr')\n # if not valid_format:\n # return validation_error_response(errors, VAL_ERROR)\n\n # Save new draft statement: BusinessException raised if failure.\n token: dict = g.jwt_oidc_token_info\n draft = Draft.create_from_json(request_json, account_id, token.get('username', None))\n try:\n draft.save()\n except Exception as db_exception: # noqa: B902; return nicer default error\n return resource_utils.db_exception_response(db_exception, account_id, 'POST draft')\n\n return draft.json, HTTPStatus.CREATED\n\n except BusinessException as exception:\n return resource_utils.business_exception_response(exception)\n except Exception as default_exception: # noqa: B902; return nicer default error\n return resource_utils.default_exception_response(default_exception)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create drafts in bulk for a given user based on the draft dicts. Since currently, the only place this method is being used (apart from tests) is from the create_draft view, we assume that the drafts_dicts are syntactically valid (i.e. they satisfy the draft_dict_validator). | def do_create_drafts(draft_dicts: List[Dict[str, Any]], user_profile: UserProfile) -> List[Draft]:
draft_objects = []
for draft_dict in draft_dicts:
valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)
draft_objects.append(
Draft(
user_profile=user_profile,
recipient_id=valid_draft_dict["recipient_id"],
topic=valid_draft_dict["topic"],
content=valid_draft_dict["content"],
last_edit_time=valid_draft_dict["last_edit_time"],
)
)
created_draft_objects = Draft.objects.bulk_create(draft_objects)
event = {
"type": "drafts",
"op": "add",
"drafts": [draft.to_dict() for draft in created_draft_objects],
}
send_event(user_profile.realm, event, [user_profile.id])
return created_draft_objects | [
"def list_drafts(user):\n identity = get_identity_for_user(user)\n service = get_record_service()\n recids = [\n dm.json[\"id\"]\n for dm in service.draft_cls.model_cls.query.all()\n if dm is not None and dm.json is not None\n ]\n\n for recid in recids:\n try:\n draft = service.read_draft(id_=recid, identity=identity)\n click.secho(\n \"{} - {}\".format(draft.id, draft.data[\"metadata\"][\"title\"]), fg=\"green\"\n )\n except:\n pass",
"def create_draft(auth, subject, body, addresses, user_id, cc_addresses=[], attachments_list=None):\r\n data = {}\r\n data['Subject'] = subject\r\n data['Body'] = {}\r\n data['Body']['ContentType'] = 'HTML'\r\n data['Body']['Content'] = body\r\n data['ToRecipients'] = [{'EmailAddress': {'Address': addr}} for addr in addresses]\r\n data['ccRecipients'] = [{'EmailAddress': {'Address': addr}} for addr in cc_addresses]\r\n if attachments_list is not None:\r\n data['Attachments'] = attachments_list\r\n\r\n params = json.dumps(data).encode('utf8')\r\n\r\n url = \"{api_url}/{user_id}/messages\".format(api_url=API_URL, user_id=user_id)\r\n\r\n headers = {\r\n 'Content-Type': 'application/json',\r\n 'Authorization': 'Bearer {}'.format(auth.access_token)\r\n }\r\n req = urllib.request.Request(url, params, headers)\r\n try:\r\n resp = urllib.request.urlopen(req)\r\n resp_data = json.load(resp)\r\n\r\n logging.getLogger(__name__).info(\"Draft created\")\r\n\r\n return resp_data['id']\r\n except urllib.error.HTTPError as err:\r\n raise AzureError(err)",
"def test_create_draft(app, service, identity_simple, input_data):\n draft = service.create(identity_simple, input_data)\n draft_dict = draft.to_dict()\n\n assert draft.id\n\n for key, value in input_data.items():\n assert draft[key] == value\n\n # Check for pid and parent pid\n assert draft['id']\n assert draft['parent']['id']\n assert draft['is_published'] is False\n assert draft['versions']['is_latest_draft'] is True\n assert draft['versions']['is_latest'] is False\n assert 'errors' not in draft_dict",
"def further_validated_draft_dict(\n draft_dict: Dict[str, Any], user_profile: UserProfile\n) -> Dict[str, Any]:\n\n content = normalize_body(draft_dict[\"content\"])\n\n timestamp = draft_dict.get(\"timestamp\", time.time())\n timestamp = round(timestamp, 6)\n if timestamp < 0:\n # While it's not exactly an invalid timestamp, it's not something\n # we want to allow either.\n raise JsonableError(_(\"Timestamp must not be negative.\"))\n last_edit_time = timestamp_to_datetime(timestamp)\n\n topic = \"\"\n recipient_id = None\n to = draft_dict[\"to\"]\n if draft_dict[\"type\"] == \"stream\":\n topic = truncate_topic(draft_dict[\"topic\"])\n if \"\\0\" in topic:\n raise JsonableError(_(\"Topic must not contain null bytes\"))\n if len(to) != 1:\n raise JsonableError(_(\"Must specify exactly 1 stream ID for stream messages\"))\n stream, sub = access_stream_by_id(user_profile, to[0])\n recipient_id = stream.recipient_id\n elif draft_dict[\"type\"] == \"private\" and len(to) != 0:\n to_users = get_user_profiles_by_ids(set(to), user_profile.realm)\n try:\n recipient_id = recipient_for_user_profiles(to_users, False, None, user_profile).id\n except ValidationError as e: # nocoverage\n raise JsonableError(e.messages[0])\n\n return {\n \"recipient_id\": recipient_id,\n \"topic\": topic,\n \"content\": content,\n \"last_edit_time\": last_edit_time,\n }",
"def drafts():\n pass",
"def draft_registrations(auth, **kwargs):\n campaign = kwargs.get('campaign', None)\n drafts = utils.drafts_for_user(auth.user, campaign)\n return {\n 'draftRegistrations': [\n {\n 'dateUpdated': iso8601format(draft.datetime_updated),\n 'dateInitiated': iso8601format(draft.datetime_initiated),\n 'node': {\n 'title': draft.branched_from.title,\n },\n 'initiator': {\n 'name': draft.initiator.fullname,\n },\n 'url': draft.branched_from.web_url_for(\n 'edit_draft_registration_page',\n draft_id=draft._id,\n ),\n }\n for draft in drafts\n ],\n }",
"def create_sprint_stories(self, board_id, sprint_id, user_stories):\n project_id = self._get_board_project_id(board_id)\n self.logger.info(\"Creating user stories in project {} - sprint {}\".format(project_id, sprint_id))\n created_stories = []\n for user_story in user_stories:\n self.logger.info(\"Creating user story {}\".format(user_story.get(\"subject\")))\n created_story = self._create_user_story(project_id, user_story.get(\"subject\"),\n user_story.get(\"description\"),\n user_story.get(\"tags\"),\n user_story.get(\"total_points\"))\n self.logger.info(\"Adding user story {} to sprint {}\".format(created_story.key, sprint_id))\n self.jira.add_issues_to_sprint(sprint_id, [created_story.key])\n for task in user_story.get(\"tasks\"):\n created_task = self._create_user_story_task(project_id, created_story.id, task.get(\"subject\"),\n task.get(\"description\"), task.get(\"finished_date\"))\n # Add as comment user story finished date\n self._add_comment(created_task.id, \"Finished date: '{}'\".format(task.get('finished_date')))\n self._add_comment(created_task.id, \"Taiga status: '{}'\".format(task.get(\"status\")))\n # Update task status\n self._update_task_status(created_task.key, task.get(\"status\"))\n created_stories.append({\"key\": created_story.key,\n \"is_closed\": user_story.get(\"is_closed\"),\n \"status\": user_story.get(\"status\")})\n # Add as comment user story finished date\n self._add_comment(created_story.id, \"Finished date: '{}'\".format(user_story.get('finish_date')))\n self._add_comment(created_story.id, \"Taiga status: '{}'\".format(user_story.get(\"status\")))\n return created_stories",
"def action_set_draft(self, cr, uid, ids, context=None):\n for letter in self.browse(cr, uid, ids, context=context):\n self.write(cr, uid, [letter.id], {'state': 'draft'}, context=context)\n return True",
"def draft(self, user=None):\r\n self.status = self.STATUS.draft\r\n self.save(force_update=True, user=user)",
"def user_draft_id(self, user_draft_id):\n\n self._user_draft_id = user_draft_id",
"def drafts(self):\n if self._drafts is None:\n if self._initialize_drafts():\n self._save_drafts()\n return self._drafts",
"def _initialize_drafts(self):\n drafts = memcache.get('user_drafts:' + self.email)\n if drafts is not None:\n self._drafts = drafts\n ##logging.info('HIT: %s -> %s', self.email, self._drafts)\n return False\n # We're looking for the Issue key id. The ancestry of comments goes:\n # Issue -> PatchSet -> Patch -> Comment.\n issue_ids = set(comment.key().parent().parent().parent().id()\n for comment in gql(Comment,\n 'WHERE author = :1 AND draft = TRUE',\n self.user))\n self._drafts = list(issue_ids)\n ##logging.info('INITIALIZED: %s -> %s', self.email, self._drafts)\n return True",
"def _ResolveContacts(self, contact_dicts, contact_ids, reason=None):\r\n for contact_dict, (user_exists, user_id, webapp_dev_id) in zip(contact_dicts, contact_ids):\r\n if not user_exists:\r\n # Check if previous invocation of this operation already created the user.\r\n user = yield gen.Task(User.Query, self._client, user_id, None, must_exist=False)\r\n if user is None:\r\n # Create prospective user.\r\n request = {'user_id': user_id,\r\n 'webapp_dev_id': webapp_dev_id,\r\n 'identity_key': contact_dict['identity'],\r\n 'reason': reason}\r\n yield Operation.CreateNested(self._client, 'CreateProspectiveOperation.Execute', request)",
"def send_drafts(drafts, progress):\n msg_prefix = progress.GetMessage()\n gmail_service = get_gmail_service()\n for n, draft in enumerate(drafts):\n draft_id = draft['draft_id']\n if progress.WasCancelled():\n break\n msg = f\"{msg_prefix} {n}/{len(drafts)}...\"\n progress.Update(n, newmsg=msg)\n family_name = draft['family']['last_name']\n logger.info(f'Sending draft ID {draft_id} for family {family_name}')\n send_draft(gmail_service, 'me', draft_id)",
"def create_draft(convo_ID, template_ID):\n # Get response template through helper function.\n # Make an API request to reply to a conversation with the content in that template\n response_template = get_canned_response(template_ID)\n url = \"https://api2.frontapp.com/conversations/\" + convo_ID + \"/drafts\"\n payload = {\n \"body\": response_template[\"body\"],\n \"subject\": response_template[\"subject\"],\n \"author_id\": \"tea_188ud\", # [needs to change later on]\n \"channel_id\": \"cha_14tfp\", # [also will need to be changed for team based settings]\n }\n files = []\n headers = {\"Authorization\": BEARER_TOKEN}\n requests.request(\"POST\", url, headers=headers, json=payload, files=files)",
"def post():\n try:\n\n # Quick check: must provide an account ID.\n account_id = resource_utils.get_account_id(request)\n if account_id is None:\n return resource_utils.account_required_response()\n\n # Verify request JWT and account ID\n if not authorized(account_id, jwt):\n return resource_utils.unauthorized_error_response(account_id)\n\n request_json = request.get_json(silent=True)\n # Disable schema validation: draft may be partial/incomplele.\n # valid_format, errors = schema_utils.validate(request_json, 'draft', 'ppr')\n # if not valid_format:\n # return validation_error_response(errors, VAL_ERROR)\n\n # Save new draft statement: BusinessException raised if failure.\n token: dict = g.jwt_oidc_token_info\n draft = Draft.create_from_json(request_json, account_id, token.get('username', None))\n try:\n draft.save()\n except Exception as db_exception: # noqa: B902; return nicer default error\n return resource_utils.db_exception_response(db_exception, account_id, 'POST draft')\n\n return draft.json, HTTPStatus.CREATED\n\n except BusinessException as exception:\n return resource_utils.business_exception_response(exception)\n except Exception as default_exception: # noqa: B902; return nicer default error\n return resource_utils.default_exception_response(default_exception)",
"def test_draft_by_user(self):\r\n r = self.F.RunFactory.create(status=\"active\")\r\n u = self.F.UserFactory.create()\r\n\r\n r.draft(user=u)\r\n\r\n self.assertEqual(self.refresh(r).modified_by, u)",
"def do_edit_draft(draft_id: int, draft_dict: Dict[str, Any], user_profile: UserProfile) -> None:\n try:\n draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile)\n except Draft.DoesNotExist:\n raise ResourceNotFoundError(_(\"Draft does not exist\"))\n valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)\n draft_object.content = valid_draft_dict[\"content\"]\n draft_object.topic = valid_draft_dict[\"topic\"]\n draft_object.recipient_id = valid_draft_dict[\"recipient_id\"]\n draft_object.last_edit_time = valid_draft_dict[\"last_edit_time\"]\n draft_object.save()\n\n event = {\"type\": \"drafts\", \"op\": \"update\", \"draft\": draft_object.to_dict()}\n send_event(user_profile.realm, event, [user_profile.id])",
"def create(self, dictionaries):\n \n return self.ep.post(self.endpoint, params=dictionaries)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Edit/update a single draft for a given user. Since the only place this method is being used from (apart from tests) is the edit_draft view, we assume that the drafts_dict is syntactically valid (i.e. it satisfies the draft_dict_validator). | def do_edit_draft(draft_id: int, draft_dict: Dict[str, Any], user_profile: UserProfile) -> None:
try:
draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile)
except Draft.DoesNotExist:
raise ResourceNotFoundError(_("Draft does not exist"))
valid_draft_dict = further_validated_draft_dict(draft_dict, user_profile)
draft_object.content = valid_draft_dict["content"]
draft_object.topic = valid_draft_dict["topic"]
draft_object.recipient_id = valid_draft_dict["recipient_id"]
draft_object.last_edit_time = valid_draft_dict["last_edit_time"]
draft_object.save()
event = {"type": "drafts", "op": "update", "draft": draft_object.to_dict()}
send_event(user_profile.realm, event, [user_profile.id]) | [
"def user_draft_id(self, user_draft_id):\n\n self._user_draft_id = user_draft_id",
"def draft(self, user=None):\r\n self.status = self.STATUS.draft\r\n self.save(force_update=True, user=user)",
"def put(self, request):\r\n user = request.user\r\n check_user_status(user)\r\n\r\n user_id = user.id\r\n validate(instance=request.data,\r\n schema=schemas.restaurant_edit_draft_schema)\r\n body = request.data\r\n PendingRestaurant.field_validate_draft(body)\r\n restaurant = PendingRestaurant.edit_draft(user_id, body)\r\n return JsonResponse(model_to_json(restaurant))",
"def edit_draft(self):\r\n EmptyPromise(\r\n lambda: self.q(css='.create-draft').present,\r\n 'Wait for edit draft link to be present'\r\n ).fulfill()\r\n\r\n self.q(css='.create-draft').first.click()\r\n\r\n EmptyPromise(\r\n lambda: self.q(css='.editing-draft-alert').present,\r\n 'Wait for draft mode to be activated'\r\n ).fulfill()",
"def edit_user(user_id):\n\n db_user = User.query.get_or_404(user_id)\n\n return render_template(\"edit_user.html\",\n headline=f\"Edit Blogly {db_user.get_full_name()}\",\n user=db_user)",
"def edit_user(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template('users/edit.html', user=user)",
"def further_validated_draft_dict(\n draft_dict: Dict[str, Any], user_profile: UserProfile\n) -> Dict[str, Any]:\n\n content = normalize_body(draft_dict[\"content\"])\n\n timestamp = draft_dict.get(\"timestamp\", time.time())\n timestamp = round(timestamp, 6)\n if timestamp < 0:\n # While it's not exactly an invalid timestamp, it's not something\n # we want to allow either.\n raise JsonableError(_(\"Timestamp must not be negative.\"))\n last_edit_time = timestamp_to_datetime(timestamp)\n\n topic = \"\"\n recipient_id = None\n to = draft_dict[\"to\"]\n if draft_dict[\"type\"] == \"stream\":\n topic = truncate_topic(draft_dict[\"topic\"])\n if \"\\0\" in topic:\n raise JsonableError(_(\"Topic must not contain null bytes\"))\n if len(to) != 1:\n raise JsonableError(_(\"Must specify exactly 1 stream ID for stream messages\"))\n stream, sub = access_stream_by_id(user_profile, to[0])\n recipient_id = stream.recipient_id\n elif draft_dict[\"type\"] == \"private\" and len(to) != 0:\n to_users = get_user_profiles_by_ids(set(to), user_profile.realm)\n try:\n recipient_id = recipient_for_user_profiles(to_users, False, None, user_profile).id\n except ValidationError as e: # nocoverage\n raise JsonableError(e.messages[0])\n\n return {\n \"recipient_id\": recipient_id,\n \"topic\": topic,\n \"content\": content,\n \"last_edit_time\": last_edit_time,\n }",
"def edit_draft(self, message_id):\n return Draft(self, message_id).fetch()",
"def show_edit_form(user_id):\n user = User.query.get_or_404(user_id)\n return render_template('edit.html', user=user)",
"def userEdit(self):\n # check that we actually have json\n if hasattr(cherrypy.request, 'json'):\n data = cherrypy.request.json\n else:\n raise cherrypy.HTTPError(400, 'No data was given')\n\n # the newly changed fields\n myData = dict()\n\n # the MongoDB _id of the record to change\n myID = checkValidID(data)\n\n # the optional projectNumbers of the user\n if 'projectNumbers' in data:\n myData['projectNumbers'] = checkProjectNumbers(data)\n\n # optional keys, string\n for key in (\"firstName\", \"lastName\", \"netID\", \"course\"):\n if key in data:\n myData[key] = checkValidData(key, data, str)\n\n # update the document\n updateQuery = {'_id': ObjectId(myID)}\n updateRule = {'$set': myData}\n self._updateDocument(updateQuery, updateQuery, updateRule, collection=self.colUsers)\n\n myUser = self.colUsers.find_one({'_id': ObjectId(myID)})\n\n\n # TODO what if user doesn't have netID or course?\n # TODO separate templates for notifying students or managers or admin?\n self.email_handler.notifyUserEdit(**{\n 'email': myUser['email'],\n 'projectNumbers': myUser['projectNumbers'],\n 'firstName': myUser['firstName'],\n 'lastName': myUser['lastName'],\n 'netID': myUser['netID'],\n 'course': myUser['course']\n\n })",
"def user_edit(self):\n\n a = authenticated_userid(self.request)\n id = int(self.request.matchdict.get('id'))\n\n \"\"\" User one (1) is a bit special...\"\"\"\n if id is 1 and a is not 1:\n return HTTPNotFound()\n\n u = User.by_id(id)\n if not u:\n return HTTPNotFound()\n\n form = UserEditForm(self.request.POST, u,\n csrf_context=self.request.session)\n\n if self.request.method == 'POST' and form.validate():\n form.populate_obj(u)\n if u.password:\n u.password = u.pm.encode(form.password.data)\n else:\n del u.password\n self.request.session.flash('User %s updated' %\n (u.email), 'status')\n return HTTPFound(location=self.request.route_url('users'))\n return {'title': 'Edit user',\n 'form': form,\n 'id': id,\n 'myid': a,\n 'action': 'user_edit'}",
"def test_draft_by_user(self):\r\n r = self.F.RunFactory.create(status=\"active\")\r\n u = self.F.UserFactory.create()\r\n\r\n r.draft(user=u)\r\n\r\n self.assertEqual(self.refresh(r).modified_by, u)",
"def show_user_edit(user_id):\n\n user = User.query.get_or_404(user_id)\n return render_template(\"edit-user.html\", user=user)",
"def editUser(user_id):\n\n # user = User.query.order_by(User.last_name, User.first_name).all()[int(id) - 1]\n user = User.query.get_or_404(user_id)\n\n return render_template('editUser.html', user=user)",
"def show_edit_form(user_id):\n\n user = User.query.get_or_404(user_id)\n\n return render_template(\"users/edit_user.html\", user=user)",
"def put(self, user_id):\r\n return update_user(request, user_id)",
"def show_edit_user_page(user_id):\n user = User.query.get_or_404(user_id)\n return render_template('edit_user.html', user=user)",
"def put(document_id):\n try:\n if document_id is None:\n return resource_utils.path_param_error_response('document ID')\n\n # Quick check: must provide an account ID.\n account_id = resource_utils.get_account_id(request)\n if account_id is None:\n return resource_utils.account_required_response()\n\n # Verify request JWT and account ID\n if not authorized(account_id, jwt):\n return resource_utils.unauthorized_error_response(account_id)\n\n request_json = request.get_json(silent=True)\n # Disable schema validation: draft may be partial/incomplele.\n # valid_format, errors = schema_utils.validate(request_json, 'draft', 'ppr')\n # if not valid_format:\n # return validation_error_response(errors, VAL_ERROR)\n\n # Save draft statement update: BusinessException raised if failure.\n try:\n draft = Draft.update(request_json, document_id)\n draft.save()\n return draft.json, HTTPStatus.OK\n except BusinessException as exception:\n return resource_utils.business_exception_response(exception)\n except Exception as db_exception: # noqa: B902; return nicer default error\n return resource_utils.db_exception_response(db_exception, account_id, 'PUT draft id=' + document_id)\n\n except Exception as default_exception: # noqa: B902; return nicer default error\n return resource_utils.default_exception_response(default_exception)",
"def update( self, trans, id, payload, **kwd ):\n current_user = trans.user\n user_to_update = self.user_manager.by_id( self.decode_id( id ) )\n\n # only allow updating other users if they're admin\n editing_someone_else = current_user != user_to_update\n is_admin = trans.api_inherit_admin or self.user_manager.is_admin( current_user )\n if editing_someone_else and not is_admin:\n raise exceptions.InsufficientPermissionsException( 'you are not allowed to update that user', id=id )\n\n self.user_deserializer.deserialize( user_to_update, payload, user=current_user, trans=trans )\n return self.user_serializer.serialize_to_view( user_to_update, view='detailed' )"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete a draft belonging to a particular user. | def do_delete_draft(draft_id: int, user_profile: UserProfile) -> None:
try:
draft_object = Draft.objects.get(id=draft_id, user_profile=user_profile)
except Draft.DoesNotExist:
raise ResourceNotFoundError(_("Draft does not exist"))
draft_id = draft_object.id
draft_object.delete()
event = {"type": "drafts", "op": "remove", "draft_id": draft_id}
send_event(user_profile.realm, event, [user_profile.id]) | [
"def delete_draft(draft_uuid):\n api_request('delete', api_url('drafts', str(draft_uuid)))",
"def delete(self,\n draft_id,\n ):\n return self._invoke('delete',\n {\n 'draft_id': draft_id,\n })",
"def _delete_draft_message(draft):\n if draft is not None:\n draft.key.delete()\n return HttpTextResponse('OK')",
"def test_delete_draft_unsubmitted(\n draft, running_app, service, requests_service):\n # Delete the draft\n req_id = draft.data['parent']['review']['id']\n res = service.delete_draft(running_app.superuser_identity, draft.id)\n\n # Request was also deleted\n with pytest.raises(NoResultFound):\n requests_service.read(running_app.superuser_identity, req_id)",
"def test_delete_draft_when_submitted(\n draft, running_app, service):\n service.review.submit(running_app.superuser_identity, draft.id).to_dict()\n\n # Delete the draft\n with pytest.raises(ReviewStateError):\n service.delete_draft(running_app.superuser_identity, draft.id)",
"def delete_drafts(request):\n query = models.Comment.query(\n models.Comment.author == request.user, models.Comment.draft == True,\n ancestor=request.issue.key)\n keys = query.fetch(keys_only=True)\n ndb.delete_multi(keys)\n request.issue.calculate_draft_count_by_user()\n request.issue.put()\n return HttpResponseRedirect(\n reverse(publish, args=[request.issue.key.id()]))",
"def delete_user(user):\n logging.debug('{CRUD_operations} BEGIN function delete_user()')\n logging.debug('{CRUD_operations} Data received: user: %s', user)\n user.is_deleted = True\n logging.debug('{CRUD_operations} END function delete_user()')",
"def delete_user(self, user):\n # noinspection PyUnresolvedReferences\n self.delete(user)",
"def delete_user(request, project_id, project_user_id):\n projectuser = get_object_or_404(\n ProjectUser, project_user=project_user_id)\n\n projectuser.delete()\n\n messages.success(\n request, f'{projectuser.project_user} '\n f'deleted and will no longer have access to the Project')\n\n return redirect(reverse('project_admin', args=[project_id]))",
"def delete_user():",
"def DeleteDraft(host, change):\n path = _GetChangePath(change)\n try:\n FetchUrl(host, path, reqtype='DELETE', ignore_204=True, ignore_404=False)\n except GOBError as e:\n # On success, gerrit returns status 204; anything else is an error.\n if e.http_status != 204:\n raise\n else:\n raise GOBError(\n 200, 'Unexpectedly received a 200 http status while deleting draft %r'\n % change)",
"def delete_user(self, user_id):\r\n self._client.DarkBot.Orders.delete_one({\"_id\": user_id})",
"def delete_item(self, id: str, user: User) -> bool:",
"def delete_user(session, user):\n ret = session.query(Log).filter(Log.pseudo == user).delete()\n session.commit()\n\n LOGGER.debug(str(ret) + \" messages deleted\")\n return ret",
"def user_draft_id(self, user_draft_id):\n\n self._user_draft_id = user_draft_id",
"def deleteUser(self, query):\n result = self.collection.find_one_and_delete(query)\n\n return result",
"def delete(self, version: int, user: unicode) -> None:\n ...",
"def test_delete_draft_application_as_valid_user_success(self, application_type, user):\n draft = self.applications[application_type]\n headers = self.users[user]\n number_of_applications = BaseApplication.objects.all().count()\n url = reverse(\"applications:application\", kwargs={\"pk\": draft.id})\n\n response = self.client.delete(url, **headers)\n\n self.assertEqual(response.status_code, HTTP_200_OK)\n self.assertEqual(response.json()[\"status\"], strings.Applications.Generic.DELETE_DRAFT_APPLICATION)\n self.assertEqual(number_of_applications - 1, BaseApplication.objects.all().count())\n self.assertTrue(draft not in BaseApplication.objects.all())",
"def delete(self, request, *args, **kwargs):\n thread = self.get_thread()\n if not thread:\n raise NotFound(code=status.HTTP_404_NOT_FOUND)\n\n thread.userthread_set.filter(user=request.user).update(deleted=True)\n return Response(status=status.HTTP_200_OK)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get zero version `0.0.0` | def zero(cls: Type[_R]) -> _R:
return cls("0.0.0") | [
"def get_initial_version(self):\n if self.config.semver:\n return \"0.0.0\"\n else:\n return \"0\"",
"def get_version():\n ver = '0.0.0'\n req = restcall(0, 'config', 10.0)\n if req['text'] is not None:\n try: \n tree = ET.fromstring(req['text'])\n ver = tree.findall('app_version')[0].text\n if ver is None:\n ver = '0.0.0'\n _LOGGER.info(\"ISY: firmware version: %s\", ver)\n except ET.ParseError:\n _LOGGER.error(\"No version information found on ISY.\")\n return ver",
"def make_zero_filled_version(self, value):\n match = re.search(self.firmware_pattern, value)\n try:\n if match:\n # value = 'esp8266_0.0.6.bin'\n # firmware_pattern = r'^(.*?)_([0-9]+)\\.([0-9]+)\\.([0-9]+)\\.bin'\n major = match.group(2).zfill(3)\n minor = match.group(3).zfill(3)\n patch = match.group(4).zfill(5)\n return '%s%s%s' % (major, minor, patch)\n except IndexError:\n pass\n return value",
"def test_parse_version(self):\n version = VersionNumberScaleMeasurement.parse_version(None)\n self.assertEqual(Version(\"0\"), version)",
"def get_simple_version(version_string):\n if not version_string:\n version_string = ''\n return VersionString(re.sub('[<=>]', '', version_string))",
"def __getNullVersion(self):\n print(\"Can't get version\")\n return \"unknownVendor\", \"unknownRelease\"",
"def test_empty_version_string(self):\n self.assertEqual(selectors._convert_tpr(\"\"), Version(\"0\"))",
"def looseversion(cls):\n ver = cls.version()\n if ver is None:\n return LooseVersion(\"0.0.0\")\n\n vinfo = ver.rstrip().split(\"-\")\n try:\n int(vinfo[-1], 16)\n except ValueError:\n githash = \"\"\n else:\n githash = \".\" + vinfo[-1]\n\n # As of FreeSurfer v6.0.0, the final component is a githash\n if githash:\n if vinfo[3] == \"dev\":\n # This will need updating when v6.0.1 comes out\n vstr = \"6.0.0-dev\" + githash\n elif vinfo[5][0] == \"v\":\n vstr = vinfo[5][1:]\n elif len([1 for val in vinfo[3] if val == \".\"]) == 2:\n \"version string: freesurfer-linux-centos7_x86_64-7.1.0-20200511-813297b\"\n vstr = vinfo[3]\n else:\n raise RuntimeError(\"Unknown version string: \" + ver)\n # Retain pre-6.0.0 heuristics\n elif \"dev\" in ver:\n vstr = vinfo[-1] + \"-dev\"\n else:\n vstr = ver.rstrip().split(\"-v\")[-1]\n\n return LooseVersion(vstr)",
"def get_version_number():\n return [0, 1, 0]",
"def delete_closing_zero(model_version: str) -> str:\r\n if model_version[-2:] == \".0\":\r\n return model_version[:-2]\r\n return model_version",
"def test_get_version(self):\n pass",
"def get_version(version=None):\n parts = 2 if version[2] == 0 else 3\n return '.'.join(str(x) for x in version[:parts])",
"def version_info(): \n return VERSION_s",
"def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['orionld version']\n except Exception as e:\n pass\n return ''",
"def Version(self) -> _n_0_t_12:",
"def get_version():\n return \".\".join(map(str, VERSION))",
"def get_version_number():\n return [0, 2, 0]",
"def test_get_short_version(self):\n pass",
"def _get_version_string(self):\n vstring = \"0.0.0\"\n if len(self.lines) == 0:\n self.lines.append(\"# Copyright %d %s\\n\" % (YEAR, AUTHOR))\n self.lines/append(\"\\n\")\n self.lines.append(\"version = %s\\n\" % vstring)\n\n # locate version string\n lc = 0\n for l in self.lines:\n m = version_re.match(l)\n if m:\n vstring = m.group(1)\n break\n else:\n lc+=1\n\n self.lc = lc\n self.vstring = vstring\n print(\"%s found on line %d\" % (vstring, lc))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get next micro version. | def bump_micro(self: _R, inc: int = 1) -> _R:
if not self.is_stable:
return self.get_stable().bump_micro(inc - 1)
return self._replace(
BaseVersion(
epoch=0,
release=(self.major, self.minor, self.micro + inc),
pre=None,
post=None,
dev=None,
local=None,
)
) | [
"def next_version(major=False, minor=False, patch=True):\n try:\n r = Release.objects.latest()\n except Release.DoesNotExist:\n return Version('0.0.0')\n\n v = r.version\n if major:\n v = v.next_major()\n elif minor:\n v = v.next_minor()\n else:\n v = v.next_patch()\n return v",
"def next_version(self):\n try:\n release = self.release_set.order_by('-created')[0]\n except IndexError:\n return \"0.1.0\"\n\n major, minor, bugfix = release.version.split(\".\")\n\n return \"{}.{}.{}\".format(major, int(minor) + 1, bugfix)",
"def _get_next_version(self):\n current_head = self.entity.get_change()\n if current_head:\n return current_head.version + 1\n return 1",
"def test_get_next_version(self):\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, MINOR, PATCH)))\n self.assertEqual(None, ver)",
"def next_version(v: str) -> str:\n vobj = Version(v)\n if vobj.is_prerelease:\n return str(vobj.base_version)\n vs = list(vobj.release)\n vs[1] += 1\n vs[2:] = [0] * len(vs[2:])\n s = \".\".join(map(str, vs))\n if vobj.epoch:\n s = f\"{vobj.epoch}!{s}\"\n return s",
"def test_get_next_version_MINOR(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR, MINOR + 1, MAJOR, MINOR + 1, 0): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, MINOR, PATCH)))\n self.assertEqual('%d.%d-%d' % (MAJOR, MINOR + 1, 0), ver)",
"def next_version(self, name):\n return self.current_version(name) + 1",
"def test_get_next_version_PATCH(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR, MINOR, MAJOR, MINOR, PATCH + 1): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, MINOR, PATCH)))\n self.assertEqual('%d.%d-%d' % (MAJOR, MINOR, PATCH + 1), ver)",
"def test_get_next_version_MAJOR(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR + 1, 0, MAJOR + 1, 0, 0): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, MINOR, PATCH)))\n self.assertEqual('%d.%d-%d' % (MAJOR + 1, 0, 0), ver)",
"def microversion(self, microversion):\n\n self._microversion = microversion",
"def get_stable(self: _R) -> _R:\n return self._replace(\n BaseVersion(\n epoch=0,\n release=(self.major, self.minor, self.micro),\n pre=None,\n post=None,\n dev=None,\n local=None,\n )\n )",
"def test_get_next_version_PATCH99(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR, MINOR, MAJOR, MINOR, 100): '',\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR, MINOR + 1, MAJOR, MINOR + 1, 0): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, MINOR, 99)))\n self.assertEqual('%d.%d-%d' % (MAJOR, MINOR + 1, 0), ver)",
"def get_prerelease_package_version(self, production: bool = False) -> str:\n rc = 1\n if describe := get_git_describe(CONFIG.mpy_path.as_posix()):\n ver, rc, _ = describe.split(\"-\")\n base = bump_version(Version(ver), minor_bump=True)\n rc = int(rc)\n return str(bump_version(base, rc=rc))\n else:\n raise ValueError(\"cannot determine next version number micropython\")",
"def get_next_version(self, sid):\n return self.file_system.get_next_version(sid)",
"def test_get_next_version_MAJOR99(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (100, 0, 100, 0, 0): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((99, MINOR, PATCH)))\n self.assertEqual(None, ver)",
"def test_most_recent_micro_release_considered(self):\n today = datetime.date.today()\n day = datetime.timedelta(1)\n r = Release.objects.create(version='1.8',\n date=today - 15 * day)\n d = DocumentRelease.objects.create(release=r)\n r2 = Release.objects.create(version='1.8.1',\n date=today - 5 * day)\n\n # The EOL date of the first release is set automatically.\n r.refresh_from_db()\n self.assertEqual(r.eol_date, r2.date)\n\n # Since 1.8.1 is still supported, docs show up as supported.\n self.assertTrue(d.is_supported)\n self.assertFalse(d.is_dev)",
"def get_version_by_number(version_manager, version_number, request):\n return version_manager.versions[version_number - 1]",
"def get_next_package_version(self, prod: bool = False) -> str:\n base = Version(self.pkg_version)\n if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):\n self.pkg_version = str(pypi_versions[-1])\n return self.bump()",
"def test_get_next_version_MINOR99(self):\n self._uri({\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR, 100, MAJOR, 100, 0): '',\n '%d.%d/maintained/%d.%d-%d/' % (MAJOR + 1, 0, MAJOR + 1, 0, 0): '',\n })\n ver = self.u.get_next_version(version=U.UCS_Version((MAJOR, 99, 0)))\n self.assertEqual('%d.%d-%d' % (MAJOR + 1, 0, 0), ver)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get stable version from pre or post release. | def get_stable(self: _R) -> _R:
return self._replace(
BaseVersion(
epoch=0,
release=(self.major, self.minor, self.micro),
pre=None,
post=None,
dev=None,
local=None,
)
) | [
"def latest_stable_release_version(self):\n\n result = requests.get(constants.CHROMEDRIVER_CHROMIUM_URL)\n soup = BeautifulSoup(result.content, 'html.parser')\n ul = soup.select_one(constants.UL_RELEASES)\n for li in ul:\n text = li.text.replace(u'\\u00A0', ' ')\n if text[:len(constants.LATEST_STABLE_RELEASE_STR)].lower() == constants.LATEST_STABLE_RELEASE_STR.lower():\n try:\n release = li.a['href'].split('path=')[-1:][0][:-1]\n except TypeError:\n return\n self.__check_release(release)\n return release",
"def release_version(self):\n if self.tag_distance == 0:\n return self.latest_tag\n return None",
"def next_version(major=False, minor=False, patch=True):\n try:\n r = Release.objects.latest()\n except Release.DoesNotExist:\n return Version('0.0.0')\n\n v = r.version\n if major:\n v = v.next_major()\n elif minor:\n v = v.next_minor()\n else:\n v = v.next_patch()\n return v",
"def get_latest(self, pre_ok=False, major=None):\n\n if self.having_asset:\n # only formal releases which we enumerated above already, have assets\n # so there is no point looking in the tags/graphql below\n # return whatever we got\n return None\n\n # formal release may not exist at all, or be \"late/old\" in case\n # actual release is only a simple tag so let's try /tags\n ret = self.find_in_tags(pre_ok, major)\n\n return ret",
"def is_release():\n return VERSION[-1]",
"def get_prerelease_package_version(self, production: bool = False) -> str:\n rc = 1\n if describe := get_git_describe(CONFIG.mpy_path.as_posix()):\n ver, rc, _ = describe.split(\"-\")\n base = bump_version(Version(ver), minor_bump=True)\n rc = int(rc)\n return str(bump_version(base, rc=rc))\n else:\n raise ValueError(\"cannot determine next version number micropython\")",
"def latest_beta_release_version(self):\n\n result = requests.get(constants.CHROMEDRIVER_CHROMIUM_URL)\n soup = BeautifulSoup(result.content, 'html.parser')\n ul = soup.select_one(constants.UL_RELEASES)\n for li in ul:\n text = li.text.replace(u'\\u00A0', ' ')\n if text[:len(constants.LATEST_BETA_RELEASE_STR)].lower() == constants.LATEST_BETA_RELEASE_STR.lower():\n try:\n release = li.a['href'].split('path=')[-1:][0][:-1]\n except TypeError:\n return\n self.__check_release(release)\n return release",
"def stable():\n env.branch = 'stable'",
"def tbb_stable_version():\n response = requests.get(TBB_VERSION_URL)\n return response.json()['stable']['latest_version']",
"def stable():\r\n env.branch = 'stable'",
"def get_initial_version(self):\n if self.config.semver:\n return \"0.0.0\"\n else:\n return \"0\"",
"def getEarliestAppropriateStepVersion(self, codeVers=None):\n if codeVers is None:\n codeVers = VersionBase.parse('Zenoss %s' % VERSION)\n if codeVers.micro >= 70:\n # We are in a dev/beta release. Anything back through the start\n # of this dev/beta cycle is appropriate.\n earliestAppropriate = Version(codeVers.major, codeVers.minor, 70)\n elif codeVers.minor > 0:\n # We are in a regular release that is not a N.0 version.\n # Anything back through the previous dev/beta cycle is\n # appropriate\n earliestAppropriate = Version(codeVers.major, codeVers.minor-1, 70)\n else:\n # This is a X.0.Y release. This is tough because we don't know\n # what the minor version was for the last release of version X-1.\n # We take the reasonable guess that the last version of X-1 that\n # we see migrate steps for was indeed the last minor release\n # of X-1.\n beforeThis = Version(codeVers.major)\n # self.allSteps is ordered by version, so just look back through\n # all steps for the first one that predates beforeThis.\n for s in reversed(self.allSteps):\n if s.version < beforeThis:\n lastPrevious = s.version\n break\n else:\n # We couldn't find any migrate step that predates codeVers.\n # Something is wrong, this should never happen.\n raise MigrationFailed('Unable to determine the appropriate '\n 'migrate script versions.')\n if lastPrevious.micro >= 70:\n earliestAppropriate = Version(lastPrevious.major,\n lastPrevious.minor, 70)\n else:\n earliestAppropriate = Version(lastPrevious.major,\n lastPrevious.minor-1, 70)\n return earliestAppropriate",
"def next_version(self):\n try:\n release = self.release_set.order_by('-created')[0]\n except IndexError:\n return \"0.1.0\"\n\n major, minor, bugfix = release.version.split(\".\")\n\n return \"{}.{}.{}\".format(major, int(minor) + 1, bugfix)",
"def get_version():\n return 'DEV'",
"def test_head_is_post_release(self):\n silent_call('git', 'init')\n self._set_author()\n silent_call('git', 'commit', '--allow-empty', '-m', 'Initial Commit')\n silent_call('git', 'tag', 'r1.2.3')\n silent_call('git', 'commit', '--allow-empty', '-m', 'another commit')\n self.assertTrue( versiontag.get_version().startswith('r1.2.3-1-') )\n self.assertEqual(versiontag.get_version(pypi=True), '1.2.3.post1')\n\n silent_call('git', 'commit', '--allow-empty', '-m', 'another commit')\n self.assertTrue(versiontag.get_version().startswith('r1.2.3-2-'))\n self.assertEqual(versiontag.get_version(pypi=True), '1.2.3.post2')\n\n silent_call('git', 'tag', 'r1.2.4')\n self.assertTrue( versiontag.get_version().startswith('r1.2.4') )\n self.assertEqual(versiontag.get_version(pypi=True), '1.2.4')",
"def test_request_estable_version(self):\n current_stable_version = get_stable_version()\n self.assertIsNotNone(current_stable_version)",
"def get_next_package_version(self, prod: bool = False) -> str:\n base = Version(self.pkg_version)\n if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):\n self.pkg_version = str(pypi_versions[-1])\n return self.bump()",
"def _get_sonic_release(self):\n\n output = self.command(\"sonic-cfggen -y /etc/sonic/sonic_version.yml -v release\")\n if len(output['stdout_lines']) == 0:\n # get release from OS version\n if self.os_version:\n return self.os_version.split('.')[0][0:6]\n return 'none'\n return output[\"stdout_lines\"][0].strip()",
"def get_release():\n # Try to get the current release from the version.json file generated by the\n # CI during the Docker image build\n try:\n with open(os.path.join(BASE_DIR, \"version.json\"), encoding=\"utf8\") as version:\n return json.load(version)[\"version\"]\n except FileNotFoundError:\n return \"NA\" # Default: not available"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
helper function for checkPass returns the first element of charList found that works for the password at index i if it fails to find a character at i, prints i and returns an empty string instead of returning i. | def findChar(username, url, charList, i):
for ch in charList:
if(checkPasswordCharacter(ch, username, url, index = i)):
return ch
#only runs if no ch in charList match:
# return i #oof, there's no match if i is out of bounds, e.g. len(password) < i
print("Missing: " + i) #so I know when it's not a match
return "" #return an empty string instead
# Note to self: should not return an _ because it'll match an _ if wildCards are true (default).
# If wildCards is false, this will just skip characters that don't match anything! | [
"def validate(password):\n length = len(password)\n invalid_chars = [\" \", \"_\", \"-\"]\n spec_chars = list(\"!#$%&'()*+,./:;<=>?@[]^`{|}~\")\n\n # Checks for validitity of the password\n if length < 8:\n return \"Invalid\"\n else:\n for i in password:\n for j in invalid_chars:\n if i == j:\n return \"Invalid\"\n\n # Checks for a letter in the string\n i = 0\n is_alpha = False\n while i < len(password) and is_alpha is False:\n if password[i].isalpha() is True:\n is_alpha = True\n else:\n is_alpha = False\n i += 1\n\n # Checks for a lower case letter in the string\n i = 0\n is_lower = False\n while i < len(password) and is_lower is False:\n if password[i].islower() is True:\n is_lower = True\n else:\n is_lower = False\n i += 1\n\n # Checks for a upper case letter in the string\n i = 0\n is_upper = False\n while i < len(password) and is_upper is False:\n if password[i].isupper() is True:\n is_upper = True\n else:\n is_upper = False\n i += 1\n\n # Checks for an integer in the string\n i = 0\n is_numeric = False\n while i < len(password) and is_numeric is False:\n if password[i].isnumeric() is True:\n is_numeric = True\n else:\n is_numeric = False\n i += 1\n\n # If the first four conditions (above) are true,\n # it will check for a special character in the string\n spec_ch = False\n for i in password:\n for j in spec_chars:\n leave_loop = False\n while spec_ch is False and leave_loop is False:\n if i == j:\n spec_ch = True\n else:\n spec_ch = False\n leave_loop = True\n\n if is_alpha and is_lower and is_upper and is_numeric and spec_ch is True:\n return \"Secure\"\n else:\n return \"Insecure\"\n\n pass",
"def valid_password(lower, upper, letter, password):\n # Note the -1 to turn 1 indexing into 0 indexing\n matches = [idx for idx in (lower, upper) if password[idx - 1] == letter]\n return len(matches) == 1",
"def check_pass(password):\n # big_chain : length of longest chain of repeated symbols\n # c_start : index at which big_chain starts\n big_chain = 0\n cur_loc = 0\n for symb in password:\n if big_chain == 0:\n l_symb = symb\n cur_chain = 1\n big_chain = 1\n c_start = 0\n cur_c = cur_loc\n cur_loc += 1\n continue\n if symb == l_symb:\n cur_chain += 1\n else:\n cur_chain = 1\n cur_c = cur_loc\n if cur_chain > big_chain:\n big_chain = cur_chain\n c_start = cur_c\n cur_loc += 1\n l_symb = symb\n\n # return or repeat, need big_chain, c_start\n if big_chain < 2:\n return False\n if big_chain == 2:\n return True\n return (check_pass(password[:c_start])\n or check_pass(password[c_start+big_chain:]))",
"def rule_1(password):\n characters = list(password)\n for index in range(len(characters) - 2):\n a, b, c = characters[index:index + 3]\n if ord(a) + 1 == ord(b) == ord(c) - 1:\n return True\n return False",
"def find_pass(pass_list, service):\r\n for pass_info in pass_list:\r\n if pass_info[1] == service:\r\n return pass_info[2]",
"def legal_password(s):\n pass",
"def find_password( door_id ):\n\n\tpassword = [ '', '', '', '', '', '', '', '' ]\n\tincrementor = 0\n\t\n\tfor _i in range( 8 ):\n\t\tchar = ''\n\t\twhile not char:\n\t\t\t#_do_stupid_movie_password_animation( password, _i )\n\n\t\t\tinput = door_id + str( incrementor )\n\t\t\tm = hashlib.md5( )\n\t\t\tm.update( input.encode( 'utf-8' ) )\n\t\t\thash = m.hexdigest( )\n\n\t\t\tif hash.startswith( '00000' ):\n\t\t\t\tloc = hash[ 5 ]\n\t\t\t\tchar = hash[ 6 ]\n\t\t\t\tif loc.isdigit( ):\n\t\t\t\t\tloc = int( loc )\n\t\t\t\t\tif 0 <= loc <= ( len( password ) - 1 ) and not password[ loc ]:\n\t\t\t\t\t\tpassword[ loc ] = char\t\n\t\t\t\t\telse:\n\t\t\t\t\t\tchar = ''\n\t\t\t\telse:\n\t\t\t\t\tchar = ''\n\t\t\t\n\t\t\tincrementor += 1\n\n\tpassword = ''.join( password )\n\treturn password",
"def is_valid_corporate_password(password, letter, i, j):\n has_letter_at_i = password[i - 1] == letter\n has_letter_at_j = password[j - 1] == letter\n if has_letter_at_i and has_letter_at_j:\n return False\n return has_letter_at_i or has_letter_at_j",
"def passwd_prompt():\n\n print(\"Passwords MUST contain AT LEAST: one lower-case letter,\" \n \"one number, one symbol, and be a MINIMUM of 8 characters in length,\"\n \"e.g. r!ght2oE\")\n\n while True:\n\n passy = getpass.getpass(prompt=\"Enter password for user: \")\n confirm_passy = getpass.getpass(prompt=\"To confirm, \" \\\n \"re-enter password: \")\n\n # check for the following conditions: \n # user input matches\n # length of input is at least 8 characters\n # input contains at least 1 number \n # input contains at least 1 letter \n # input contains at least 1 symbol \n \n if passy != confirm_passy \\\n or len(passy) <8 \\\n or not re.search('\\d', passy) \\\n or not re.search(r\"[a-z]\",passy) \\\n or not re.search(r\"[ !#$%&'()*+,-./[\\\\\\]^_`{|}~\"+r'\"]', passy): \n \n print(TRY_AGAIN)\n continue \n \n else:\n print(\"Password meets complexity requirement. Continuing...\") \n return passy",
"def find_valid_passwords(values: List[str]) -> int:\n search_reg = re.compile(\n r\"\\b(?P<first>[0-9]+)-(?P<second>[0-9]+)\\s(?P<letter>[a-z]):\\s(?P<password>[a-z]+)\")\n valid_password_count = 0\n\n for value in values:\n results = search_reg.search(value)\n target_char = results.group(\"letter\")\n password = results.group(\"password\")\n first_index = int(results.group(\"first\")) - 1\n second_index = int(results.group(\"second\")) - 1\n\n if (target_char == password[first_index]) != (target_char == password[second_index]):\n valid_password_count += 1\n\n return valid_password_count",
"def check_password_level(password: str) -> int:\n\n contain_digit = any(c.isdigit() for c in password)\n contain_lower = any(c.islower() for c in password)\n contain_upper = any(c.isupper() for c in password)\n contain_punctuation = any(c in string.punctuation for c in password)\n longer_than_eight = len(password) >= 8\n\n if contain_lower and contain_upper and contain_digit and contain_punctuation:\n return 4\n elif contain_lower and contain_digit and (contain_upper or longer_than_eight):\n return 3\n elif contain_lower and (contain_digit or longer_than_eight):\n return 2\n elif contain_lower:\n return 1\n else:\n return 1",
"def method2(password):\n score = 0\n pattern1 = \"^[a-zA-Z].*[a-zA-Z]$\"\n pattern2 = \"[0-9]\"\n pattern3 = \"[\\S]{5,20}\"\n if re.search(pattern3, password):\n score += 1\n if re.search(pattern1, password):\n score += 1\n if re.search(pattern2, password):\n score +=1\n if \".\" in password:\n score +=1\n if \"-\" in password:\n score +=1\n return passRank[score]",
"def validate2(positions, letter, passowrd):\n return sum(map(lambda a: a == letter, [\n passowrd[i - 1] for i in positions if i - 1 < len(passowrd)])) == 1",
"def num_pw_found(byte_string):\n hasher = hashlib.sha1()\n hasher.update(byte_string)\n digest = hasher.hexdigest().upper()\n pw_list = requests.get('https://api.pwnedpasswords.com/range/{}'.format(digest[:5]))\n for line in pw_list.text.split('\\n'):\n info = line.split(':')\n if info[0] == digest[5:]:\n return int(info[1])\n return 0",
"def password_validator(password):\n letters = [char for char in password if char.isalpha()]\n digits = [char for char in password if char.isdigit()]\n special = [char for char in password if char in \"!@#$%^&*()_-+=?<>\"]\n\n if \" \" in password:\n return None\n\n strength = 0\n strength += len(letters) > 0\n strength += len(digits) > 0\n strength += len(special) > 0\n strength += len(password) >= 8\n\n if len(password) < 8:\n if len(password) == len(letters):\n return 2\n return min(strength, 2)\n\n return strength",
"def testPassword(cryptPass, dictionaryFile):\n #salt = cryptPass[0:2]\n salt = crypt.mksalt(crypt.METHOD_SHA512) # Updated for SHA512 encrypted passwords\n dictFile = open(dictionaryFile, 'r')\n for word in dictFile.readlines():\n word = word.strip('\\n')\n cryptWord = crypt.crypt(word, salt)\n \n if cryptWord == cryptPass:\n print('[+] Found Password: ' + word + '\\n')\n return\n print('[-] Password Not Found.\\n')\n return",
"def matchpassword(username, password): # create function called matchpassword\n \n List = [] # Initialize list\n\n try:\n f = open(\"C:\\Portable Python 3.2.5.1\\password.txt\",\"r\") # opens password.txt\n List = f.readlines() # Reads password.txt into a list\n f.close() # Closes password.txt file\n except IOError:\n print(\"I/O error: Unable to read in File f\") # Exception if I/O Error\n\n for x in range(0,len(List),2): # Loop thru list to determine if match\n Listlower = List[x].lower()\n if((username.lower() + '\\n' == Listlower) and (password + '\\n' == List[x + 1])):\n return 'True'\n else:\n continue\n return 'False'",
"def password_generator(password_lenght):\r\n password = \"\"\r\n\r\n try:\r\n if password_lenght >=1:\r\n for i in range(password_lenght):\r\n choice = random.choice(symbols)\r\n password += str(choice)\r\n print(f\"Your password is: {password} \\nTnank you!\")\r\n return password\r\n else:\r\n return 0\r\n except Exception:\r\n pass",
"def extremely_stupid_naive_brute_force_crap():\n keystrokes = [l.strip() for l in open(\"keylog.txt\")]\n for i in range(1000, 10000000):\n if i % 10000 == 0:\n print i\n password = str(i)\n if all(is_subsequence(password, keys) for keys in keystrokes):\n print password\n break"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
List of characters in database names | def makeDatabaseList():
charList = []
for ch in lower:
# ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch)
for ch in numbers:
ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch)
for ch in special:
ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch)
for ch in other:
ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch)
if(caseSensitive):
for ch in upper:
# ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch, url)
if(wildCards):
for ch in wildCards:
# ch = str(ch)
if(characterInDatabaseName(ch, url)):
charList.append(ch, url)
return charList | [
"def list_database_names(self) -> List[str]:\n \n ret: List[str] = []\n if (resp := self.conn.get(path='_all_dbs')).status == 200:\n ret = resp.get_data()\n return ret",
"def get_DBList(self):\n db_list = []\n ori_db_list = self.DBClient.get_list_database()\n for num in range(1,len(ori_db_list)):\n db_list.append(ori_db_list[num]['name'])\n return db_list",
"def _database_titles(self):\n self.db.cursor.execute(\"SELECT title FROM anime\")\n db_titles = map(lambda x: x[0], self.db.cursor.fetchall())\n return db_titles",
"def list_databases(self):\n lines = output_lines(self.exec_psql('\\\\list'))\n return [line.split('|') for line in lines]",
"def column_names(self):\n try:\n return [column[0].decode().lower() for column in self.cursor.description]\n except:\n return [column[0].lower() for column in self.cursor.description]",
"def list_all_database():\n try:\n conn=psycopg2.connect(\"dbname='postgres'\")\n except:\n print \"Can not connect to database.\"\n cur = conn.cursor()\n try:\n cur.execute(\"\"\"SELECT datname FROM pg_database\"\"\")\n except:\n print \"Can not select databases.\"\n\n rows = cur.fetchall()\n for row in rows:\n db_name = row[0]\n print(db_name)\n\n conn.close()",
"def _get_database_name(database):\n # make sure the return is only one data type\n filenames = []\n if database is not None:\n if not isinstance(database, list):\n database = [database]\n for db in database:\n filenames += glob.glob(db)\n\n return filenames",
"def sql_identifiers() -> SearchStrategy[str]:\n return text(\n min_size=1,\n alphabet=characters(\n # Control characters are largely illegal.\n # Names are case insensitive so don't even generate uppercase.\n blacklist_categories=(\"Cs\", \"Lu\"),\n # Maybe ] should be allowed but I don't know how to quote it. '\n # certainly should be but Python sqlite3 module has lots of\n # problems with it.\n # ; is disallowed because sqlparse can't parse statements using it\n # in an identifier.\n blacklist_characters=(\"\\x00\", \"]\", \"'\", \";\"),\n ),\n )",
"def _get_db_names(self, dbs, strict=True):\n dbs = utils.coerce_to_list(dbs)\n db_names = [utils.get_name(db) for db in dbs]\n if strict:\n good_dbs = self.instance.list_databases()\n good_names = [utils.get_name(good_db) for good_db in good_dbs]\n bad_names = [db_name for db_name in db_names\n if db_name not in good_names]\n if bad_names:\n bad = \", \".join(bad_names)\n raise exc.NoSuchDatabase(\"The following database(s) were not \"\n \"found: %s\" % bad)\n return db_names",
"def get_sql_name(text):\n # Normalize identifier\n text = ''.join(c.lower() if c.isalnum() else ' ' for c in text)\n text = '_'.join(text.split())\n return text",
"def rdb_names(names):\n r = []\n for name in names:\n if name.lower() in ('rv', 'vrad', 'radvel'):\n r.append('vrad')\n elif name.lower() in ('rve', 'svrad', 'error', 'err'):\n r.append('svrad')\n elif name.lower() in ('fwhm'):\n r.append('fwhm')\n elif name.lower() in ('bis', 'bisspan', 'bis_span'):\n r.append('bis_span')\n elif name.lower() in ('contrast'):\n r.append('contrast')\n else:\n r.append(name)\n return r",
"def _cmd_db_list(self):\n print \"Available DBs:\"\n pass #TODO",
"def getSchemataNames():",
"def get_all_collection_names(self):\n select_list = [SQLBinaryExpr(SQLColumnExpr(SQLTableExpr(TABLE_NAME_COLL), COL_NAME_COLL_NAME),\n OP_AS, COL_NAME_COLL_NAME)]\n\n entries = self.select_generic_data(select_list=select_list, table_list=[TABLE_NAME_COLL])\n return [entrie[COL_NAME_COLL_NAME] for entrie in entries]",
"def ConcatDBNames(self,dblist):\r\n log = Log('ConcatDBNames')\r\n outstr = ''\r\n first = True\r\n for db in dblist:\r\n if first:\r\n outstr = db\r\n first = False\r\n else:\r\n outstr = outstr + ',' + db\r\n return outstr",
"def __getDBList__():\n\tall_tables = connection.introspection.table_names()\n\ttables_to_use = []\n\tfor i in all_tables:\n\t\tif \"InscripcionPostgrado\" in i:\n\t\t\ttables_to_use.append(i.split(\"_\",1)[1])\n\treturn tables_to_use",
"def db_collations_choices(self):\n # To avoid pre-mature initialization of db-context.\n from django.db import connection\n\n with connection.cursor() as cursor:\n cursor.execute(\"SELECT collname, collcollate FROM pg_collation\")\n rows = cursor.fetchall()\n return ((name, \"{} ({})\".format(name, collate)) for name, collate in rows)",
"def get_list_database(self):\n ret = yield self.query(\"SHOW DATABASES\")\n raise Return(list(ret.get_points()))",
"def __retrieve_string_specialities(self) -> list:\n\n cur = self.con.cursor()\n cur.execute('SELECT id, name, color_id FROM public.app_string_specialtyact')\n result = cur.fetchall()\n return result"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
returns list of characters that appear in any username | def userNameCharacters(url, tableName, caseSensitive = False, wildCards = True):
"""
sqlzoo characters
['a', 'c', 'd', 'e', 'h', 'i', 'j', 'k', 'n', 'o', 'p', 'r', 't', 'w', '_', '%']
"""
lst = []
for ch in special:
if(checkUsernameCharacter(ch, url, tableName, notLike = False, notLikeName = "", index = "no index")):
lst.append(ch)
for ch in lower:
if(checkUsernameCharacter(ch, url, tableName, notLike = False, notLikeName = "", index = "no index")):
lst.append(ch)
for ch in numbers:
if(checkUsernameCharacter(ch, url, tableName, notLike = False, notLikeName = "", index = "no index")):
lst.append(ch)
for ch in other:
if(checkUsernameCharacter(ch, url, tableName, notLike = False, notLikeName = "", index = "no index")):
lst.append(ch)
if(caseSensitive):
for ch in upper:
if(checkUsernameCharacter(ch, url, tableName, notLike = False, notLikeName = "", index = "no index")):
lst.append(ch)
if(wildCards):
for ch in wildcards:
lst.append(ch) #it'll match if there's users
return lst | [
"def all_users():\n\treturn [unicode(name[:-4]).lower() for name in os.listdir(os.path.join(WORLD_DIR, 'players'))]",
"def check_username(username):\n if username:\n if not re.match('[a-z]', username[0]):\n return ['username_error_badfirstchar']\n # Technically both these conditions might hold. However, the common\n # case seems to be that somebody starts typing their name beginning\n # with an upper-case letter, and it's probably sufficient to just\n # issue the first error in that case.\n elif not re.match('^[-a-z0-9_]+$', username):\n return ['username_error_badchar']\n return []",
"def valid(text, chars=\"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789\"):\n li = []\n\n for c in text:\n if c in chars:\n li.append(c);\n\n return \"\".join(li)",
"def get_jyutping(characters):\n result = []\n for ch in characters:\n result.append(search_single(ch))\n return result",
"def getUserNames():\r\n out = []\r\n fl = glob.glob(\"./tweets/*\")\r\n regex = re.compile(\"user=\\\"(.+?)\\\"\")\r\n for f in fl:\r\n fin = codecs.open(f, \"r\", \"utf-8\")\r\n xml = fin.read()\r\n fin.close()\r\n out.extend( regex.findall(xml) )\r\n return set(out)",
"def containing(letter, text):\n return([word for word in text if word.count(letter) >= 1])",
"def _check_char_classes(self, password):\n return [key for key, val in self.CHARACTER_CLASSES.items() if not re.match(val, password)]",
"def wikitext_to_usernames(wikitext):\n return filter(first, ((line_to_username(l), l) for l in wikitext.splitlines()))",
"def check_username_acceptable_characters(cls, value):\n assert bool(re.search('^[a-zA-Z0-9_-]*$', value)), \\\n f'username = {value}, must contain only letters digits, \\\ndashes or underscores'\n return value",
"def find_single_letters(question):\n if re.findall(r\"\\bletter\\b|\\bletters\\b\", question):\n matches = re.findall(r\"\\b[A-Za-z]\\b\", question)\n\n return [m for m in matches]\n\n return []",
"def is_valid_username(username):\n invalid_chars = [\" \", \";\", \"'\", '\"', '\\\\']\n for ch in invalid_chars:\n if ch in username:\n return False\n return True",
"def invalid_character(username: str) -> bool:\n\n invalid_chars = string.punctuation\n\n if any(char in invalid_chars for char in username):\n\n return True\n\n return False",
"def clean_username(username):\n bad_characters = \" !#$%^&*()[]'\\\"\"\n\n for char in bad_characters:\n if char in username:\n username = username.replace(char, '')\n\n return username",
"def get_blocked_usernames_list():\n return []",
"def get_pure_user_words(user_words: List[str], letters: List[str], words_from_dict: List[str]) -> List[str]:\r\n unknown_words = []\r\n for wordd in user_words:\r\n if wordd not in words_from_dict:\r\n unknown_words.append(wordd)\r\n forbidden_letters = [i for i in string.ascii_lowercase]\r\n for i in letters:\r\n try:\r\n forbidden_letters.remove(i)\r\n except:\r\n pass\r\n word_list = []\r\n letstr = \"\"\r\n for i in letters:\r\n letstr += i\r\n for word in unknown_words:\r\n if len(word) >= 4 and len(word) <= 9:\r\n count = 0\r\n for let in word:\r\n if let in forbidden_letters:\r\n count += 1\r\n if word.count(let) > letstr.count(let):\r\n count += 1\r\n if letters[4] not in word:\r\n count += 1\r\n if count == 0:\r\n word_list.append(word)\r\n return word_list",
"def get_usernames(self):\n users = self.get_users()\n return [user.username for user in users]",
"def get_characters_by_letter(self, letter):\n letter_list = []\n for character in self.get_characters():\n if character[0].lower() == letter.lower():\n letter_list.append(character)\n return letter_list",
"def _get_allowed_letters(self) -> list:\n\n dictionary = self._load_dictionary()\n allowed_letters = []\n for word in dictionary:\n for letter in word:\n if letter not in allowed_letters:\n allowed_letters.append(letter)\n\n return sorted(allowed_letters)",
"def get_unique_characters(text):\n return sorted(list(set(text)))"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
generates all subsequences of ch with length k | def generateSubSequences(k, ch):
seq = ["".join(c) for c in itertools.product(ch, repeat = k)]
# discussion about the best way to do this:
# https://stackoverflow.com/questions/7074051/what-is-the-best-way-to-generate-all-possible-three-letter-strings
return seq | [
"def gc_content_subseq(dna_seq, k=100): #testar\n # complete\n #DONE\n list=[] \n for s in range(0,len(dna_seq)-(k-1),k):\n list.append(gc_content(dna_seq[s:s+k])) \n return list",
"def get_all_substrings(s, k):\n return [s[ptr:ptr + k] for ptr in range(len(s) - k + 1)]",
"def generate_kmers(k):\n len_k = int(k)\n if len_k < 0:\n raise ValueError(\"Must be a positive integer\")\n combos = list(itertools.product('ACTG', repeat=len_k))\n seqs = []\n for seq in combos:\n seqs.append(''.join(seq))\n return seqs",
"def all_k_grams(k, alph=nucleobases):\n return list(product(alph, repeat=k))",
"def subset_permutations(n, k):\n if k <= 0 or k > n:\n return []\n return list(generate_subset_permutations(list(range(n)), k))",
"def kmer_list(s, k):\n kmerslist = []\n strlength = len(s)\n tempstr = strlength - k + 1\n for element in range(0, tempstr):\n kmerslist.append(s[element:element + k])\n return kmerslist",
"def KmerComposition(dnastring, k):\n kmers = []\n for n in range(len(dnastring)-k +1):\n kmer = dnastring[n:n+k]\n kmers.append(kmer)\n return kmers",
"def split_kmer(sequence,k):\n seg=len(sequence) // k\n i=0\n kmers=[]\n while i < seg:\n kmer=sequence[i*k:k*(i+1)]\n i+=1\n kmers.append(kmer)\n return kmers",
"def generate_random_subset_permutation(n, k):\n return generate_random_permutation(n)[0:k]",
"def k_gram(words: list, k=5) -> list:\n\n temp = \"$$\".join(words)\n ris = []\n for i in range(len(temp) - k + 1):\n ris.append(temp[i:i + k])\n return ris",
"def increasing_subset_permutations(n, k):\n if k <= 0 or k > n:\n return []\n return list(generate_increasing_subset_permutations(list(range(n)), k))",
"def iter_kmers(alphabet, k):\n\n\talphabets = [alphabet for i in xrange(k)]\n\n\tfor kmer in itertools.product(*alphabets):\n\t\tyield ''.join(kmer)",
"def get_all_possible_words(k):\n\n return list(itertools.product([0, 1], repeat=k))",
"def kmer_set(s, k):\n kmerset = set()\n strlength = len(s)\n tempstr = strlength - k + 1\n for element in range(0, tempstr):\n kmerset.add(s[element:element + k])\n return kmerset",
"def gen_ksub1_subsets(s):\n\n index = 0\n k = len(s)\n\n result = []\n while index < k:\n\n exceptEle = s[index]\n temp = copy.deepcopy(s)\n temp.remove(exceptEle)\n\n result.append(temp)\n\n index += 1\n\n return result",
"def get_kmers(seq, k):\n\n return [seq[i:i+k] for i in range(len(seq)-k+1)]",
"def generate_subset_permutations(array, k):\n if k == 0:\n yield tuple()\n else:\n for i in range(len(array)):\n for perm in generate_subset_permutations(array[0:i] + array[i + 1:len(array)], k - 1):\n yield (array[i],) + perm",
"def kmers(s, k=3):\n for i in range(len(s)-k+1):\n yield s[i:i+k]",
"def getPermutation(self, n: int, k: int) -> str:\n ans = []\n self.generate(ans, [], [i + 1 for i in range(n)])\n return ans[k - 1]"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Adds a user's mysql tables back into the OCF database. | def _add_mysql(user, options, dump = None):
# Access the new username with user["username"]
pass | [
"def insert_db():\n populate_tables()",
"def create_tables_and_apply_patches(self):\n\n if self.authorized and not self.db_tables_initiated:\n with self.connection.cursor() as cursor:\n for statement in self.parse_mysql_sql_file():\n cursor.execute(statement)\n\n PyFunceble.LOGGER.info(\n \"Created the missing tables. Applied all patched\"\n )\n\n self.db_tables_initiated = True",
"def drop_tables(self):\n con = self.connect()\n cursor = con.cursor()\n cursor.execute(\"\"\"DROP TABLE IF EXISTS users CASCADE\"\"\")\n cursor.close()\n con.commit()\n con.close()",
"def init_userGroupsDB():\n \n db = connect_db()#Connect to the database\n with open('userGroupSchema.sql', mode='r') as f:#Open the schema file\n db.cursor().executescript(f.read())#Apply the schema to the database\n db.commit()#Actually make the changes\n db.close()#Close the database connection",
"def add_tables(engine):\n db.BASE.metadata.create_all(engine)",
"def initialize():\n db.connect()\n db.create_tables([TimeSheets, Users], safe=True)",
"def init_tables():\n # drop_table_m_candidates()\n # drop_table_m_qiita_users()\n create_table_m_candidates()\n create_table_m_qiita_users()",
"def upgrade():\n if context.config.get_main_option('db_type') != 'mysql+mysqlconnector':\n return\n\n conn = op.get_bind()\n conn.execute('alter table data engine=myisam')",
"def create_db_tables(self):\n tables = (\n \"\"\"CREATE TABLE IF NOT EXISTS users(\n id SERIAL PRIMARY KEY,\n firstname VARCHAR(50) NOT NULL,\n lastname VARCHAR(50) NOT NULL,\n isAdmin BOOLEAN NOT NULL,\n email VARCHAR(50) NOT NULL,\n phonenumber VARCHAR(10) NOT NULL,\n username VARCHAR(50) NOT NULL,\n passwd TEXT NOT NULL)\n \"\"\",\n )\n try:\n con_values = self.connect_questioner_db()\n cur = con_values[0]\n con = con_values[1]\n \"\"\"\n This creates tables one after the other and\n then commits the changes.\n \"\"\"\n for table in tables:\n cur.execute(table)\n print('TABLE CREATED')\n con.commit()\n except (Exception, psycopg2.DatabaseError) as error:\n print(error)",
"def unlockTables(self):\n if self.dbType=='mysql':\n query = \"UNLOCK TABLES\" \n\t self.updateDBAndLog(query)\n\telif self.dbType=='sqlite':\n\t self.db.commit()",
"def create_tables():\n db.create_all()",
"def create_tables():\n db.create_all()",
"def create_schema(self):\n\t\tself.pg_eng.create_schema()\n\t\tself.logger.info(\"Importing mysql schema\")\n\t\tself.pg_eng.build_tab_ddl()\n\t\tself.pg_eng.create_tables()",
"def initdb(self):\n DBASE.init(self.db)\n DBASE.connect()\n\n if not Users.table_exists():\n Users.create_table()\n Artists.create_table()\n Tags.create_table()\n Friends.create_table()\n WeeklyArtistChart.create_table()\n ArtistTags.create_table()\n Artists.create(name='')\n\n DBASE.set_autocommit(False)\n return",
"def create_users_tables(cls):\n cursor = Database.connect_to_db()\n sql_command = \"\"\"CREATE TABLE IF NOT EXISTS \"public\".\"users\" (\n id SERIAL ,\n firstname VARCHAR(255) NOT NULL,\n lastname VARCHAR(255) NOT NULL,\n othername VARCHAR(255) NOT NULL,\n email VARCHAR(255) NOT NULL,\n phonenumber VARCHAR(255) NOT NULL,\n passporturl TEXT NOT NULL,\n roles VARCHAR(255) NOT NULL,\n nationalid VARCHAR(255) NOT NULL,\n county VARCHAR(255) NOT NULL,\n password VARCHAR(255) NOT NULL,\n date_created VARCHAR(80),\n date_modified VARCHAR(80),\n PRIMARY KEY (id)\n )\n \"\"\"\n cursor.execute(sql_command)",
"def init_tables(self):\n\n settings.Base.metadata.tables[\n 'session_master'].drop(bind=settings.engine)\n settings.Base.metadata.tables['uurl'].drop(bind=settings.engine)\n\n settings.Base.metadata.tables[\n 'session_master'].create(bind=settings.engine)\n settings.Base.metadata.tables['uurl'].create(bind=settings.engine)\n\n logging.info(\"Sessionization Tables created\")",
"def populate_table(\n user, created_at, tweet, retweet_count, id_str, my_database=DATABASE):\n\n dbconnect = connect_db(DATABASE)\n\n cursor = dbconnect.cursor()\n cursor.execute(\"USE airflowdb\")\n\n # add content here\n\n try:\n query=\"INSERT INTO tweets (user, created_at, tweet, retweet_count, id_str) VALUES (%s, %s, %s, %s, %s)\"\n \n cursor.execute(query, (user, created_at, tweet, retweet_count, id_str))\n \n dbconnect.commit()\n print(\"commited\")\n\n except mysql.Error as e:\n print(e)\n dbconnect.rollback()\n\n cursor.close()\n dbconnect.close()\n\n return",
"def sync_tables():\n sync_table(ShoppingList)\n sync_table(User)\n sync_table(Category)\n sync_table(Feed)\n sync_table(News)\n sync_table(Photo)\n sync_table(Profile)\n sync_table(Video)\n sync_type(FeedPhoto)\n sync_type(NewsPhoto)",
"def migrations(database: 'Session'):\n rows = []\n for row in database.execute(f'pragma table_info({PacuSession.__tablename__})').fetchall():\n rows.append(row[1])\n\n # If more rows are added here change this to update the db dynamically. Not worrying about this for now since it\n # doesn't happen often, and we'll likely rewrite things before then.\n if \"user_agent_suffix\" not in rows:\n column_type = PacuSession.user_agent_suffix.type.compile(engine.dialect)\n database.execute('ALTER TABLE %s ADD COLUMN user_agent_suffix %s' % (PacuSession.__tablename__, column_type))\n\n for svc in PacuSession.aws_data_field_names:\n if svc not in rows:\n column = getattr(PacuSession, svc)\n column_type = column.type.compile(engine.dialect)\n database.execute(\n 'ALTER TABLE %s ADD COLUMN %s %s DEFAULT \"{}\" NOT NULL'\n % (PacuSession.__tablename__, svc, column_type)\n )"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Class for handling all minidump symbolizing code on Android. | def __init__(self, dump_finder, build_dir, symbols_dir=None):
# Map from minidump path (string) to minidump_dump output (string).
self._minidump_dump_output = {}
# Map from minidump path (string) to the directory that should be used when
# looking for symbol binaries (string).
self._minidump_symbol_binaries_directories = {}
# We use the OS/arch of the host, not the device.
super(AndroidMinidumpSymbolizer, self).__init__(
platform.system().lower(), platform.machine(), dump_finder, build_dir,
symbols_dir=symbols_dir) | [
"def __init__(self, dump_finder, build_dir, symbols_dir=None):\n # Map from minidump path (string) to minidump_dump output (string).\n self._minidump_dump_output = {}\n # We use the OS/arch of the host, not the device.\n super(AndroidMinidumpSymbolizer, self).__init__(\n platform.system().lower(), platform.machine(), dump_finder, build_dir,\n symbols_dir=symbols_dir)",
"def _binaries_to_symbolize(self):\n raise NotImplementedError()",
"def debugger_disassemble():",
"def obfuscate():\n smali_file_list = u.load_smali_file() # Load smali files\n change_all_field(set(find_all_field(smali_file_list)), smali_file_list, set(find_all_landroid_ljava_over(smali_file_list)))",
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n # The main lib.unstripped directory is usually the correct one, but in the\n # case of mixed bitness (e.g. with Monochrome), a different directory might\n # be necessary.\n # We can pretty easily check the architecture the minidump came from.\n # However, trying to use that to only use a single directory is prone to\n # causing symbolization to fail due to using stale symbols, e.g. if the\n # bitness for a build directory changes without `gn clean` being run in\n # between. So, return all valid symbol locations. This could result in a\n # bit of unnecessary symbol dumping, but guaranteeing a symbolized minidump\n # is worth the few extra seconds.\n default_path = os.path.join(self._build_dir, 'lib.unstripped')\n arm_path = os.path.join(\n self._build_dir, 'android_clang_arm', 'lib.unstripped')\n arm64_path = os.path.join(\n self._build_dir, 'android_clang_arm64', 'lib.unstripped')\n binary_paths = [os.path.join(default_path, lib) for lib in libraries]\n binary_paths.extend([os.path.join(arm_path, lib) for lib in libraries])\n binary_paths.extend([os.path.join(arm64_path, lib) for lib in libraries])\n return binary_paths",
"def test_llvm_strip(self):\n self.assertEqual(\n self.ndk.llvm_strip,\n f\"/opt/android/android-ndk/toolchains/llvm/prebuilt/{self.ndk.host_tag}/bin/llvm-strip\",\n )",
"def main():\n\n try:\n # Check the Python version before importing anything heavier than\n # the util functions. This lets us provide a nice message about\n # incompatibility rather than having the interpreter crash if it\n # reaches unsupported syntax from a newer Python version.\n check_python_version()\n\n from pythonforandroid.toolchain import ToolchainCL\n ToolchainCL()\n except BuildInterruptingException as exc:\n handle_build_exception(exc)",
"def test_llvm_objdump(self):\n self.assertEqual(\n self.ndk.llvm_objdump,\n f\"/opt/android/android-ndk/toolchains/llvm/prebuilt/{self.ndk.host_tag}/bin/llvm-objdump\",\n )",
"def backdoor(self):\n if sys.platform in ('darwin','ios'):\n result = self._backdoor_darwin()\n elif os.name is 'nt':\n result = self._backdoor_windows()\n elif 'linux' in sys.platform or 'bsd' in sys.platform:\n result = self._backdoor_linux()\n else:\n result = \"Platform '{}' is not supported for this module\".format(sys.platform)\n return result",
"def _boilerplate_to_python(indent):\n indent_str = \" \" * indent\n boilerplate = indent_str + \"import core.vba_library\\n\"\n boilerplate = indent_str + \"import core.vba_context\\n\"\n boilerplate += indent_str + \"from core.utils import safe_print\\n\"\n boilerplate += indent_str + \"from core.utils import safe_str_convert\\n\"\n boilerplate += indent_str + \"from core.utils import plus\\n\"\n boilerplate += indent_str + \"from core.utils import eq\\n\"\n boilerplate += indent_str + \"from core.utils import neq\\n\"\n boilerplate += indent_str + \"from core.utils import lt\\n\"\n boilerplate += indent_str + \"from core.utils import lte\\n\"\n boilerplate += indent_str + \"from core.utils import gt\\n\"\n boilerplate += indent_str + \"from core.utils import gte\\n\"\n boilerplate += indent_str + \"import core.utils\\n\"\n boilerplate += indent_str + \"from core.python_jit import update_array\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_num\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_int\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_str\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_int_list\\n\\n\"\n boilerplate += indent_str + \"try:\\n\"\n boilerplate += indent_str + \" \" * 4 + \"vm_context\\n\"\n boilerplate += indent_str + \"except (NameError, UnboundLocalError):\\n\"\n boilerplate += indent_str + \" \" * 4 + \"vm_context = context\\n\"\n return boilerplate",
"def debugger_add_sw_breakpoint():",
"def activate_pragmas_per_connection(sender, connection, **kwargs):\n\n if connection.vendor == \"sqlite\":\n if connection.alias == NOTIFICATIONS:\n broken_db = False\n try:\n cursor = connection.cursor()\n quick_check = cursor.execute(\"PRAGMA quick_check\").fetchone()[0]\n broken_db = quick_check != \"ok\"\n except DatabaseError:\n broken_db = True\n if broken_db:\n repair_sqlite_db(connection)\n cursor = connection.cursor()\n\n # Shorten the default WAL autocheckpoint from 1000 pages to 500\n cursor.executescript(CONNECTION_PRAGMAS)\n\n # We don't turn on the following pragmas, because they have negligible\n # performance impact. For reference, here's what we've tested:\n\n # Don't ensure that the OS has fully flushed\n # our data to disk.\n # cursor.execute(\"PRAGMA synchronous=OFF;\")\n\n # Store cross-database JOINs in memory.\n # cursor.execute(\"PRAGMA temp_store=MEMORY;\")",
"def _switch_arch(\n lineno: int, line: Text, context: Context, arch: Text,) -> Context:\n try:\n module = importlib.import_module('.' + arch, 'tsasm.codegen')\n context = context._replace(\n arch=arch,\n codegen=getattr(module, 'get_codegen')(),\n encode_str=getattr(module, 'encode_str'))\n except (ModuleNotFoundError, AttributeError):\n raise Error(lineno, line,\n 'Failed to load a code-generation library for architecture '\n '{!r}'.format(arch))\n return context",
"def __init__(self, static_lib):\n # TODO: Support dump for reading symbols from static libraries\n assert not static_lib and \"static libs not yet supported with dump\"\n self.tool = self.find_tool()\n if self.tool is None:\n print(\"ERROR: Could not find dump\")\n sys.exit(1)\n self.flags = ['-n', '-v']\n object_mode = environ.get('OBJECT_MODE')\n if object_mode == '32':\n self.flags += ['-X32']\n elif object_mode == '64':\n self.flags += ['-X64']\n else:\n self.flags += ['-X32_64']",
"def available_symbologies():\n consts = [d[8:] for d in dir(zint) if d.startswith('BARCODE_')]\n\n return [d for d in consts if d not in IGNORE_ZINT_CONSTS]",
"def check_scram_arch():\n scram_arch = os.environ['SCRAM_ARCH']\n if not scram_arch.startswith('slc6'):\n logger.warning(\n 'Detected SCRAM_ARCH = {0}; '\n 'There might be incompatibility issues later on by not '\n 'using slc6!!'\n .format(scram_arch)\n )",
"def strip_debug_commands(data):\n\n # strip block comments\n strippedCode = re.sub(re.compile('<#.*?#>', re.DOTALL), '', data)\n\n # strip debug statements\n # noinspection PyPep8,PyTypeChecker\n strippedCode = \"\\n\".join([line for line in strippedCode.split('\\n')\n if not line.strip().lower().startswith(\"print-debug \")])\n\n return strippedCode",
"def _GetSymbolBinaryDirectory(self, minidump, libraries):\n if minidump in self._minidump_symbol_binaries_directories:\n return self._minidump_symbol_binaries_directories[minidump]\n\n # Get the processor architecture reported by the minidump.\n arch = None\n matcher = re.compile(_PROCESSOR_ARCH_REGEX)\n for line in self._GetMinidumpDumpOutput(minidump).splitlines():\n match = matcher.match(line)\n if match:\n arch = match.groupdict()['arch'].lower()\n break\n if not arch:\n logging.error('Unable to find processor architecture for minidump %s',\n minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n if arch not in _BREAKPAD_ARCH_TO_FILE_REGEX:\n logging.error(\n 'Unsupported processor architecture %s for minidump %s. This is '\n 'likely fixable by adding the correct mapping for the architecture '\n 'in android_minidump_symbolizer._BREAKPAD_ARCH_TO_FILE_REGEX.',\n arch, minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n\n # Look for a directory that contains binaries with the correct architecture.\n matcher = re.compile(_BREAKPAD_ARCH_TO_FILE_REGEX[arch])\n symbol_dir = None\n for symbol_subdir in _POSSIBLE_SYMBOL_BINARY_DIRECTORIES:\n possible_symbol_dir = os.path.join(self._build_dir, symbol_subdir)\n if not os.path.exists(possible_symbol_dir):\n continue\n for f in os.listdir(possible_symbol_dir):\n if f not in libraries:\n continue\n binary_path = os.path.join(possible_symbol_dir, f)\n stdout = subprocess.check_output(\n ['file', binary_path], stderr=subprocess.STDOUT)\n if matcher.match(stdout):\n symbol_dir = possible_symbol_dir\n break\n\n if not symbol_dir:\n logging.error(\n 'Unable to find suitable symbol binary directory for architecture %s.'\n 'This is likely fixable by adding the correct directory to '\n 'android_minidump_symbolizer._POSSIBLE_SYMBOL_BINARY_DIRECTORIES.',\n arch)\n self._minidump_symbol_binaries_directories[minidump] = symbol_dir\n return symbol_dir",
"def activate_pragmas_on_start():\n from django.db import connection\n\n if connection.vendor == \"sqlite\":\n cursor = connection.cursor()\n\n # http://www.sqlite.org/wal.html\n # WAL's main advantage allows simultaneous reads\n # and writes (vs. the default exclusive write lock)\n # at the cost of a slight penalty to all reads.\n cursor.execute(START_PRAGMAS)\n connection.close()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a list of paths to binaries where symbols may be located. | def GetSymbolBinaries(self, minidump):
libraries = self._ExtractLibraryNamesFromDump(minidump)
symbol_binary_dir = self._GetSymbolBinaryDirectory(minidump, libraries)
if not symbol_binary_dir:
return []
return [os.path.join(symbol_binary_dir, lib) for lib in libraries] | [
"def find_binaries():\n\n builddir = Path(__file__).parent.parent / \"builddir\"\n\n bins = []\n\n for folder in [\"examples\", \"tests\", \"tools\"]:\n for path in sorted((builddir / folder).rglob(\"*\")):\n if path.stem.startswith(\"xnvme_single\"):\n continue\n if path.stem.startswith(\"xnvme_dev\"):\n continue\n if path.stem.startswith(\"xnvme_enum\"):\n continue\n if path.is_file() and path.stat().st_mode & os.X_OK:\n bins.append(path.name)\n\n return bins",
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n # The main lib.unstripped directory is usually the correct one, but in the\n # case of mixed bitness (e.g. with Monochrome), a different directory might\n # be necessary.\n # We can pretty easily check the architecture the minidump came from.\n # However, trying to use that to only use a single directory is prone to\n # causing symbolization to fail due to using stale symbols, e.g. if the\n # bitness for a build directory changes without `gn clean` being run in\n # between. So, return all valid symbol locations. This could result in a\n # bit of unnecessary symbol dumping, but guaranteeing a symbolized minidump\n # is worth the few extra seconds.\n default_path = os.path.join(self._build_dir, 'lib.unstripped')\n arm_path = os.path.join(\n self._build_dir, 'android_clang_arm', 'lib.unstripped')\n arm64_path = os.path.join(\n self._build_dir, 'android_clang_arm64', 'lib.unstripped')\n binary_paths = [os.path.join(default_path, lib) for lib in libraries]\n binary_paths.extend([os.path.join(arm_path, lib) for lib in libraries])\n binary_paths.extend([os.path.join(arm64_path, lib) for lib in libraries])\n return binary_paths",
"def LookupSymbols(path, arch, base, addresses):\r\n atos = subprocess.Popen(\r\n ['xcrun', 'atos', '-arch', arch, '-l', base, '-o', path ] + addresses,\r\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\r\n symbols = []\r\n for line in atos.stdout:\r\n symbols.append(kSymbolRE.sub('', line.strip()))\r\n return symbols",
"def executables(self):\n return []",
"def _GetDefaultBinPathExcludes(self):\n if sys.platform == \"win32\":\n import cx_Freeze.util\n systemDir = cx_Freeze.util.GetSystemDir()\n windowsDir = cx_Freeze.util.GetWindowsDir()\n return [windowsDir, systemDir, os.path.join(windowsDir, \"WinSxS\")]\n elif sys.platform == \"darwin\":\n return [\"/lib\", \"/usr/lib\", \"/System/Library/Frameworks\"]\n else:\n return [\"/lib\", \"/lib32\", \"/lib64\", \"/usr/lib\", \"/usr/lib32\",\n \"/usr/lib64\"]",
"def _GetDefaultBinPathExcludes(self):\r\n if sys.platform == \"win32\":\r\n import cx_Freeze.util\r\n systemDir = cx_Freeze.util.GetSystemDir()\r\n windowsDir = cx_Freeze.util.GetWindowsDir()\r\n return [windowsDir, systemDir, os.path.join(windowsDir, \"WinSxS\")]\r\n elif sys.platform == \"darwin\":\r\n return [\"/lib\", \"/usr/lib\", \"/System/Library/Frameworks\"]\r\n else:\r\n return [\"/lib\", \"/lib32\", \"/lib64\", \"/usr/lib\", \"/usr/lib32\",\r\n \"/usr/lib64\"]",
"def get_bin_locations():\n from plugincode.location_provider import get_location\n\n cmd_loc = get_location(EXTRACTCODE_7ZIP_EXE)\n libdir = get_location(EXTRACTCODE_7ZIP_LIBDIR)\n if not (cmd_loc and libdir) or not os.path.isfile(cmd_loc) or not os.path.isdir(libdir):\n raise Exception(\n 'CRITICAL: 7zip executable is not installed. '\n 'Unable to continue: you need to install a valid extractcode-7z '\n 'plugin with a valid executable available.'\n )\n\n return libdir, cmd_loc",
"def which_all(name):\n rv = []\n path = os.getenv('PATH')\n for p in path.split(':'):\n pathname = pathjoin(p, name)\n if is_executable(pathname):\n rv.append(pathname)\n return rv",
"def discover_new_binaries(self):\n\n bins = []\n\n self._log.debug(\"Discovering new binaries.. this might take a while.. take a coffee.\")\n for role, data_key, name_file in zip(self._roles, self._data_keys, self._name_files):\n if not name_file or not data_key:\n continue\n\n if role == Role.SETTER:\n try:\n cmd = f\"grep -r '\" + name_file + \"' \" + self._fw_path + \" | grep Binary | awk '{print $3}'\"\n except:\n fp = open('/mnt/shared/eccolo_il_', 'w')\n fp.write(f'namefile {str(name_file)}\\n')\n fp.write(f'fw_path {str(self._fw_path)}\\n')\n fp.close()\n continue\n\n o, e = run_command(cmd)\n candidate_bins = list(set([x for x in o.decode().split('\\n') if x]))\n for b in candidate_bins:\n if LIB_KEYWORD in b or b in bins:\n continue\n self._log.debug(f\"Adding {os.path.basename(b)}\")\n bins.append(b)\n\n return list(set(bins))",
"def get_scripts():\n paths = ['bin/addartobj',\n 'bin/convgauss',\n 'bin/ds9reg2fits',\n 'bin/fitshead',\n 'bin/mcmcangcorr',\n 'bin/radprof',\n 'bin/sexcat2fits',\n 'bin/simbgim',\n 'bin/stackmasks',\n 'bin/xyonds9']\n #paths.extend(glob.glob('bin/*.py')]\n return paths",
"def dir_bin():\n return abspath('bin')",
"def get_executables():\n global _CACHED_EXECUTABLES # pylint: disable=global-statement\n if _CACHED_EXECUTABLES is not None:\n return _CACHED_EXECUTABLES\n\n if 'PATH' in os.environ:\n paths = os.environ['PATH'].split(':')\n else:\n paths = ['/usr/bin', '/bin']\n\n from stat import S_IXOTH, S_IFREG\n paths_seen = set()\n _CACHED_EXECUTABLES = set()\n for path in paths:\n if path in paths_seen:\n continue\n paths_seen.add(path)\n try:\n content = os.listdir(path)\n except OSError:\n continue\n for item in content:\n abspath = path + '/' + item\n try:\n filestat = os.stat(abspath)\n except OSError:\n continue\n if filestat.st_mode & (S_IXOTH | S_IFREG):\n _CACHED_EXECUTABLES.add(item)\n return _CACHED_EXECUTABLES",
"def get_executables(files):\n exec_files = []\n for file in files:\n if \"executable\" in magic.from_file(file):\n exec_files.append(file)\n return exec_files",
"def get_python_paths(cfg: defs.Config) -> List[pathlib.Path]:\n\n def query_program(prog: str) -> List[pathlib.Path]:\n \"\"\"Query a Python interpreter for its search paths.\"\"\"\n cfg.diag(lambda: f\"Querying {prog} for its library search paths\")\n cmd = [\n prog,\n \"-c\",\n \"import sys; print('\\\\n'.join(path for path in sys.path if path))\",\n ]\n cfg.diag(lambda: f\"- about to execute {cmd!r}\")\n try:\n return [\n pathlib.Path(line)\n for line in subprocess.check_output(\n cmd, encoding=\"UTF-8\", env=cfg.utf8_env\n ).splitlines()\n ]\n except FileNotFoundError:\n cfg.diag(lambda: f\"Apparently there is no {prog} on this system\")\n return []\n except (OSError, subprocess.CalledProcessError) as err:\n raise defs.OSIEnvError(f\"Could not execute {cmd!r}: {err}\") from err\n\n return list(itertools.chain(*(query_program(prog) for prog in (\"python3\", \"python2\"))))",
"def _GetSymbolBinaryDirectory(self, minidump, libraries):\n if minidump in self._minidump_symbol_binaries_directories:\n return self._minidump_symbol_binaries_directories[minidump]\n\n # Get the processor architecture reported by the minidump.\n arch = None\n matcher = re.compile(_PROCESSOR_ARCH_REGEX)\n for line in self._GetMinidumpDumpOutput(minidump).splitlines():\n match = matcher.match(line)\n if match:\n arch = match.groupdict()['arch'].lower()\n break\n if not arch:\n logging.error('Unable to find processor architecture for minidump %s',\n minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n if arch not in _BREAKPAD_ARCH_TO_FILE_REGEX:\n logging.error(\n 'Unsupported processor architecture %s for minidump %s. This is '\n 'likely fixable by adding the correct mapping for the architecture '\n 'in android_minidump_symbolizer._BREAKPAD_ARCH_TO_FILE_REGEX.',\n arch, minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n\n # Look for a directory that contains binaries with the correct architecture.\n matcher = re.compile(_BREAKPAD_ARCH_TO_FILE_REGEX[arch])\n symbol_dir = None\n for symbol_subdir in _POSSIBLE_SYMBOL_BINARY_DIRECTORIES:\n possible_symbol_dir = os.path.join(self._build_dir, symbol_subdir)\n if not os.path.exists(possible_symbol_dir):\n continue\n for f in os.listdir(possible_symbol_dir):\n if f not in libraries:\n continue\n binary_path = os.path.join(possible_symbol_dir, f)\n stdout = subprocess.check_output(\n ['file', binary_path], stderr=subprocess.STDOUT)\n if matcher.match(stdout):\n symbol_dir = possible_symbol_dir\n break\n\n if not symbol_dir:\n logging.error(\n 'Unable to find suitable symbol binary directory for architecture %s.'\n 'This is likely fixable by adding the correct directory to '\n 'android_minidump_symbolizer._POSSIBLE_SYMBOL_BINARY_DIRECTORIES.',\n arch)\n self._minidump_symbol_binaries_directories[minidump] = symbol_dir\n return symbol_dir",
"def get_symbols(obj_path):\n cmd = ['nm', obj_path]\n res = subprocess.run(cmd, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, check=True)\n\n return res.stdout.decode()",
"def locate_scripts():\n scripts = []\n bin_dir = os.path.join(os.getcwd(), 'bin')\n if not os.path.isdir(bin_dir):\n return scripts\n for item in os.listdir(bin_dir):\n full_path = os.path.join(bin_dir, item)\n if os.path.isfile(full_path):\n with open(full_path) as f:\n first_line = next(f)\n if first_line.startswith('#!'):\n scripts.append(full_path)\n return scripts",
"def getpaths(self,libname):\r\n if os.path.isabs(libname):\r\n yield libname\r\n else:\r\n # FIXME / TODO return '.' and os.path.dirname(__file__)\r\n for path in self.getplatformpaths(libname):\r\n yield path\r\n\r\n path = ctypes.util.find_library(libname)\r\n if path: yield path",
"def _binaries_to_symbolize(self):\n raise NotImplementedError()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Extracts library names that may contain symbols from the minidump. This is a duplicate of the logic in Chromium's //build/android/stacktrace/crashpad_stackwalker.py. | def _ExtractLibraryNamesFromDump(self, minidump):
default_library_name = 'libmonochrome.so'
minidump_dump_output = self._GetMinidumpDumpOutput(minidump)
if not minidump_dump_output:
logging.warning(
'Could not get minidump_dump output, defaulting to library %s',
default_library_name)
return [default_library_name]
library_names = []
module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
r'"(?P<library_name>lib[^. ]+.so)"')
in_module = False
for line in minidump_dump_output.splitlines():
line = line.lstrip().rstrip('\n')
if line == 'MDRawModule':
in_module = True
continue
if line == '':
in_module = False
continue
if in_module:
m = module_library_line_re.match(line)
if m:
library_names.append(m.group('library_name'))
if not library_names:
logging.warning(
'Could not find any library name in the dump, '
'default to: %s', default_library_name)
return [default_library_name]
return library_names | [
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n # The main lib.unstripped directory is usually the correct one, but in the\n # case of mixed bitness (e.g. with Monochrome), a different directory might\n # be necessary.\n # We can pretty easily check the architecture the minidump came from.\n # However, trying to use that to only use a single directory is prone to\n # causing symbolization to fail due to using stale symbols, e.g. if the\n # bitness for a build directory changes without `gn clean` being run in\n # between. So, return all valid symbol locations. This could result in a\n # bit of unnecessary symbol dumping, but guaranteeing a symbolized minidump\n # is worth the few extra seconds.\n default_path = os.path.join(self._build_dir, 'lib.unstripped')\n arm_path = os.path.join(\n self._build_dir, 'android_clang_arm', 'lib.unstripped')\n arm64_path = os.path.join(\n self._build_dir, 'android_clang_arm64', 'lib.unstripped')\n binary_paths = [os.path.join(default_path, lib) for lib in libraries]\n binary_paths.extend([os.path.join(arm_path, lib) for lib in libraries])\n binary_paths.extend([os.path.join(arm64_path, lib) for lib in libraries])\n return binary_paths",
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n symbol_binary_dir = self._GetSymbolBinaryDirectory(minidump, libraries)\n if not symbol_binary_dir:\n return []\n\n return [os.path.join(symbol_binary_dir, lib) for lib in libraries]",
"def get_platform_und_symbols():\n ret = None\n if osname_is_freebsd():\n ret = sorted([\"environ\", \"__progname\"])\n if is_verbose():\n print(\"Checking for required UND symbols... \" + str(ret))\n return ret",
"def extract_symbols(lib_file, static_lib=None):\n if static_lib is None:\n static_lib = is_static_library(lib_file)\n if sys.platform.startswith('aix'):\n extractor = AIXDumpExtractor(static_lib=static_lib)\n elif ReadElfExtractor.find_tool() and not static_lib:\n extractor = ReadElfExtractor(static_lib=static_lib)\n else:\n extractor = NMExtractor(static_lib=static_lib)\n return extractor.extract(lib_file)",
"def get_uuids_from_symbol_paths(symbol_path_lines_in_leaks):\n symbol_uuids = []\n\n for path in symbol_path_lines_in_leaks:\n # Parse this kind of line\n # 0x2421a8000 - 0x24221dffb com.apple.SocialLayer arm64e \\\n # <1e69ad92900f3f188e4860ac71bcf18f> /System/Library/PrivateFrameworks/SocialLayer.framework/SocialLayer\n tokens = path.split()\n\n # Sixth word is UUID\n uuid = tokens[5]\n binary_name = tokens[6]\n\n # Are we interested in downloading Dsym for this binary\n should_download_dsym = False\n for f in FILTERED_DSYMS:\n if binary_name.find(f) != -1:\n should_download_dsym = True\n break\n\n if not should_download_dsym:\n continue\n\n # UUID should have < and >\n if uuid.find(\"<\") == -1 or uuid.find(\">\") == -1:\n print(f\"Format of leaks has changed. Can't find UUID in line: {path}\")\n sys.exit(-1)\n\n # Remove < and > to get uuid\n uuid = uuid.replace(\"<\", \"\")\n uuid = uuid.replace(\">\", \"\")\n\n symbol_uuids.append(uuid)\n\n return symbol_uuids",
"def _GetSymbolBinaryDirectory(self, minidump, libraries):\n if minidump in self._minidump_symbol_binaries_directories:\n return self._minidump_symbol_binaries_directories[minidump]\n\n # Get the processor architecture reported by the minidump.\n arch = None\n matcher = re.compile(_PROCESSOR_ARCH_REGEX)\n for line in self._GetMinidumpDumpOutput(minidump).splitlines():\n match = matcher.match(line)\n if match:\n arch = match.groupdict()['arch'].lower()\n break\n if not arch:\n logging.error('Unable to find processor architecture for minidump %s',\n minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n if arch not in _BREAKPAD_ARCH_TO_FILE_REGEX:\n logging.error(\n 'Unsupported processor architecture %s for minidump %s. This is '\n 'likely fixable by adding the correct mapping for the architecture '\n 'in android_minidump_symbolizer._BREAKPAD_ARCH_TO_FILE_REGEX.',\n arch, minidump)\n self._minidump_symbol_binaries_directories[minidump] = None\n return None\n\n # Look for a directory that contains binaries with the correct architecture.\n matcher = re.compile(_BREAKPAD_ARCH_TO_FILE_REGEX[arch])\n symbol_dir = None\n for symbol_subdir in _POSSIBLE_SYMBOL_BINARY_DIRECTORIES:\n possible_symbol_dir = os.path.join(self._build_dir, symbol_subdir)\n if not os.path.exists(possible_symbol_dir):\n continue\n for f in os.listdir(possible_symbol_dir):\n if f not in libraries:\n continue\n binary_path = os.path.join(possible_symbol_dir, f)\n stdout = subprocess.check_output(\n ['file', binary_path], stderr=subprocess.STDOUT)\n if matcher.match(stdout):\n symbol_dir = possible_symbol_dir\n break\n\n if not symbol_dir:\n logging.error(\n 'Unable to find suitable symbol binary directory for architecture %s.'\n 'This is likely fixable by adding the correct directory to '\n 'android_minidump_symbolizer._POSSIBLE_SYMBOL_BINARY_DIRECTORIES.',\n arch)\n self._minidump_symbol_binaries_directories[minidump] = symbol_dir\n return symbol_dir",
"def extract_mangling(sym):\n\n # Walks over ocurrences of sym in the file\n for i in (mangled for mangled in extract_mangling.exports if mangled.find(sym) >= 0):\n match = MANGLING_RE.search(i)\n if match is not None:\n sigString = match.group('signature')\n colonPos = sigString.rfind('::')\n pos = sigString.find(sym)\n symLen = pos + len(sym)\n if colonPos != -1 and colonPos != pos-2: # Skips inexact substring matches like StartItem when search for Item\n continue\n if pos > 0 and sigString[symLen] == '(': # Given a string 'funcName()...' - checks for '(' after name\n yield DecoratedSymbol(match.group('decorated'), sigString)",
"def extractFunctionNamesFromELF(pathOrElfObject):\r\n funcs = extractFunctionsFromELF(pathOrElfObject)\r\n names = {}\r\n for func in funcs.values():\r\n if func.name not in names:\r\n names[func.name] = []\r\n \r\n names[func.name].append(func.addr)\r\n \r\n return names",
"def __init__(self, dump_finder, build_dir, symbols_dir=None):\n # Map from minidump path (string) to minidump_dump output (string).\n self._minidump_dump_output = {}\n # Map from minidump path (string) to the directory that should be used when\n # looking for symbol binaries (string).\n self._minidump_symbol_binaries_directories = {}\n # We use the OS/arch of the host, not the device.\n super(AndroidMinidumpSymbolizer, self).__init__(\n platform.system().lower(), platform.machine(), dump_finder, build_dir,\n symbols_dir=symbols_dir)",
"def LookupSymbols(path, arch, base, addresses):\r\n atos = subprocess.Popen(\r\n ['xcrun', 'atos', '-arch', arch, '-l', base, '-o', path ] + addresses,\r\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\r\n symbols = []\r\n for line in atos.stdout:\r\n symbols.append(kSymbolRE.sub('', line.strip()))\r\n return symbols",
"def __init__(self, dump_finder, build_dir, symbols_dir=None):\n # Map from minidump path (string) to minidump_dump output (string).\n self._minidump_dump_output = {}\n # We use the OS/arch of the host, not the device.\n super(AndroidMinidumpSymbolizer, self).__init__(\n platform.system().lower(), platform.machine(), dump_finder, build_dir,\n symbols_dir=symbols_dir)",
"def _ExtractLibraryLoadAddressesFromLogcat(logs):\n browser_libs = LibraryLoadMap()\n renderer_libs = LibraryLoadMap()\n for m in re_library_address.finditer(logs):\n process_type, lib_name, lib_address = m.groups()\n lib_address = int(lib_address, 16)\n if process_type == 'BROWSER':\n browser_libs[lib_name] = lib_address\n elif process_type == 'RENDERER':\n renderer_libs[lib_name] = lib_address\n else:\n assert False, 'Invalid process type'\n\n return browser_libs, renderer_libs",
"def get_libraries_names():\n rpm_packages_path = path.join(PMDK_PATH, 'rpm', SYSTEM_ARCHITECTURE)\n libraries_names = [elem.split('-')[0] for elem in listdir(rpm_packages_path)\n if PMDK_VERSION in elem]\n return set(libraries_names)",
"def _LoadGlobalSymbolsFromDump(dump_obj):\n symbols = set()\n for key in (\"elf_functions\", \"elf_objects\"):\n symbols.update(\n symbol.get(\"name\", \"\") for symbol in dump_obj.get(key, []) if\n symbol.get(\"binding\", \"global\") == \"global\")\n return symbols",
"def _func_addrs_from_prologues(self):\n\n # Pre-compile all regexes\n regexes = []\n for ins_regex in self.project.arch.function_prologs:\n r = re.compile(ins_regex)\n regexes.append(r)\n # EDG says: I challenge anyone bothering to read this to come up with a better\n # way to handle CPU modes that affect instruction decoding.\n # Since the only one we care about is ARM/Thumb right now\n # we have this gross hack. Sorry about that.\n thumb_regexes = []\n if hasattr(self.project.arch, \"thumb_prologs\"):\n for ins_regex in self.project.arch.thumb_prologs:\n # Thumb prologues are found at even addrs, but their actual addr is odd!\n # Isn't that great?\n r = re.compile(ins_regex)\n thumb_regexes.append(r)\n\n # Construct the binary blob first\n unassured_functions = []\n\n is_arm = is_arm_arch(self.project.arch)\n\n for start_, bytes_ in self._binary.memory.backers():\n for regex in regexes:\n # Match them!\n for mo in regex.finditer(bytes_):\n position = mo.start() + start_\n if (not is_arm and position % self.project.arch.instruction_alignment == 0) or (\n is_arm and position % 4 == 0\n ):\n mapped_position = AT.from_rva(position, self._binary).to_mva()\n if self._addr_in_exec_memory_regions(mapped_position):\n unassured_functions.append(mapped_position)\n # HACK part 2: Yes, i really have to do this\n for regex in thumb_regexes:\n # Match them!\n for mo in regex.finditer(bytes_):\n position = mo.start() + start_\n if position % self.project.arch.instruction_alignment == 0:\n mapped_position = AT.from_rva(position, self._binary).to_mva()\n if self._addr_in_exec_memory_regions(mapped_position):\n unassured_functions.append(mapped_position + 1)\n\n l.info(\"Found %d functions with prologue scanning.\", len(unassured_functions))\n return unassured_functions",
"def getAuxiliarySymbols(self) -> List[ghidra.app.util.bin.format.pe.debug.DebugCOFFSymbolAux]:\n ...",
"def get_memory_tool_labels(stacktrace):\n # Remove stack frames and paths to source code files. This helps to avoid\n # confusion when function names or source paths contain a memory tool token.\n data = ''\n for line in stacktrace.split('\\n'):\n if STACKFRAME_LINE_REGEX.match(line):\n continue\n data += line + '\\n'\n\n labels = [t['label'] for t in MEMORY_TOOLS_LABELS if t['token'] in data]\n return labels",
"def get_functions_from_so_files(pmdk_path):\n functions_from_so_files = []\n path_to_so_files = path.join(pmdk_path, 'src', 'nondebug')\n for elem in listdir(path_to_so_files):\n # Exclude libvmmalloc.so, because does not include names of functions\n # of PMDK library.\n if elem.endswith('.so') and elem != 'libvmmalloc.so':\n process = check_output(\n 'nm ' + elem + ' | grep \" T \"', cwd=path_to_so_files,\n shell=True)\n out = process.decode('UTF-8')\n for line in out.split(linesep):\n if line:\n name = line.split(' ')[2].strip()\n # Exclude'_pobj_debug_notice', because it is a name of\n # a function which is not described in the man page\n # by design.\n if name != '_pobj_debug_notice':\n functions_from_so_files.append(name)\n return functions_from_so_files",
"def get_symbols(doc, lib):\n\n basename = lib.replace(\".dll\", \"\").lower()\n filename = os.path.join(get_hopper_script_dir(), basename + \".txt\")\n if not os.path.exists(filename):\n doc.log(\"Symbol file not found: %s\" % filename)\n return None\n\n symbols = {}\n with open(filename, \"r\") as fp:\n for i, line in enumerate(fp, 1):\n match = symbol_line.match(line)\n if not match:\n doc.log(\"Skipping line %d: Malformed\" % i)\n continue\n\n ordinal, name = match.group(1), match.group(2)\n if ordinal and name:\n symbols[ordinal] = name\n\n return symbols"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the directory that should contain symbol binaries for |minidump|. | def _GetSymbolBinaryDirectory(self, minidump, libraries):
if minidump in self._minidump_symbol_binaries_directories:
return self._minidump_symbol_binaries_directories[minidump]
# Get the processor architecture reported by the minidump.
arch = None
matcher = re.compile(_PROCESSOR_ARCH_REGEX)
for line in self._GetMinidumpDumpOutput(minidump).splitlines():
match = matcher.match(line)
if match:
arch = match.groupdict()['arch'].lower()
break
if not arch:
logging.error('Unable to find processor architecture for minidump %s',
minidump)
self._minidump_symbol_binaries_directories[minidump] = None
return None
if arch not in _BREAKPAD_ARCH_TO_FILE_REGEX:
logging.error(
'Unsupported processor architecture %s for minidump %s. This is '
'likely fixable by adding the correct mapping for the architecture '
'in android_minidump_symbolizer._BREAKPAD_ARCH_TO_FILE_REGEX.',
arch, minidump)
self._minidump_symbol_binaries_directories[minidump] = None
return None
# Look for a directory that contains binaries with the correct architecture.
matcher = re.compile(_BREAKPAD_ARCH_TO_FILE_REGEX[arch])
symbol_dir = None
for symbol_subdir in _POSSIBLE_SYMBOL_BINARY_DIRECTORIES:
possible_symbol_dir = os.path.join(self._build_dir, symbol_subdir)
if not os.path.exists(possible_symbol_dir):
continue
for f in os.listdir(possible_symbol_dir):
if f not in libraries:
continue
binary_path = os.path.join(possible_symbol_dir, f)
stdout = subprocess.check_output(
['file', binary_path], stderr=subprocess.STDOUT)
if matcher.match(stdout):
symbol_dir = possible_symbol_dir
break
if not symbol_dir:
logging.error(
'Unable to find suitable symbol binary directory for architecture %s.'
'This is likely fixable by adding the correct directory to '
'android_minidump_symbolizer._POSSIBLE_SYMBOL_BINARY_DIRECTORIES.',
arch)
self._minidump_symbol_binaries_directories[minidump] = symbol_dir
return symbol_dir | [
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n # The main lib.unstripped directory is usually the correct one, but in the\n # case of mixed bitness (e.g. with Monochrome), a different directory might\n # be necessary.\n # We can pretty easily check the architecture the minidump came from.\n # However, trying to use that to only use a single directory is prone to\n # causing symbolization to fail due to using stale symbols, e.g. if the\n # bitness for a build directory changes without `gn clean` being run in\n # between. So, return all valid symbol locations. This could result in a\n # bit of unnecessary symbol dumping, but guaranteeing a symbolized minidump\n # is worth the few extra seconds.\n default_path = os.path.join(self._build_dir, 'lib.unstripped')\n arm_path = os.path.join(\n self._build_dir, 'android_clang_arm', 'lib.unstripped')\n arm64_path = os.path.join(\n self._build_dir, 'android_clang_arm64', 'lib.unstripped')\n binary_paths = [os.path.join(default_path, lib) for lib in libraries]\n binary_paths.extend([os.path.join(arm_path, lib) for lib in libraries])\n binary_paths.extend([os.path.join(arm64_path, lib) for lib in libraries])\n return binary_paths",
"def get_bin_dir():\n return os.path.abspath(os.path.join(get_root_dir(), 'bin/'))",
"def GetSymbolBinaries(self, minidump):\n libraries = self._ExtractLibraryNamesFromDump(minidump)\n symbol_binary_dir = self._GetSymbolBinaryDirectory(minidump, libraries)\n if not symbol_binary_dir:\n return []\n\n return [os.path.join(symbol_binary_dir, lib) for lib in libraries]",
"def dir_bin():\n return abspath('bin')",
"def _get_debug_file_directory():\n result = gdb.execute(\"show debug-file-directory\", to_string=True, from_tty=False)\n expr = r'The directory where separate debug symbols are searched for is \"(.*)\".\\n'\n\n match = re.search(expr, result)\n\n if match:\n return match.group(1)\n return \"\"",
"def glance_bin_root():\n return str(values.get(\"glance_code_root\")) + \"/bin/\"",
"def find_dolphin_dir():\n\n \n if platform == \"darwin\": # macOS\n candidates = ['~/Library/Application Support/dolphin']\n elif platform == \"linux\" or platform == \"linux2\": # Linux\n candidates = ['~/.local/share/dolphin-emu']\n\n for candidate in candidates:\n path = os.path.expanduser(candidate)\n if os.path.isdir(path):\n return path\n return None",
"def binpath (self):\n return self._basepath + '.bin'",
"def pathToBaseNanoporeDir():\n import marginAlign\n i = absSymPath(__file__)\n return os.path.split(os.path.split(os.path.split(i)[0])[0])[0]",
"def BinaryPath(name):\n return os.path.join(OLDISIM_DIR, BINARY_BASE, name)",
"def find_dolphin_dir():\n candidates = ['~/.dolphin-emu', '~/.local/share/dolphin-emu']\n for candidate in candidates:\n path = os.path.expanduser(candidate)\n if os.path.isdir(path):\n return path\n return None",
"def get_local_bin_path(appname):\n check_appname(appname)\n return os.path.join(SAMPLES_BIN_PATH, APPS[appname].bin_path)",
"def getDebugDirectory(self) -> ghidra.app.util.bin.format.pe.debug.DebugDirectory:\n ...",
"def archbin(self):\n return join_path(\"platforms\", self.foam_arch, \"bin\")",
"def get_crash_dumps_path(self):\n\t\treturn call_sdk_function('PrlApi_GetCrashDumpsPath')",
"def get_exec_path():\n if hasattr(sys, \"frozen\"): # compiled by py2exe\n return os.path.dirname(sys.executable)\n else:\n return os.path.dirname(sys.path[0]) # should be path to /fpdb",
"def find_tool():\n return shutil.which('dump')",
"def find_bin(root, bin_name, debug=False):\n if pathlib.Path(bin_name).exists():\n return pathlib.Path(bin_name).resolve()\n build_dir = root / \"build\"\n for sub_dir in [\"debug\" if debug else \"release\", \"\"]:\n bin = build_dir / sub_dir / bin_name\n if bin.is_file():\n return bin.resolve()\n return None",
"def get_condor_bin_dir(config):\n condor_root = config['condor-root']\n if condor_root:\n return osp.join(condor_root, 'bin')\n else:\n return ''"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Runs minidump_dump on the given minidump. Caches the result for reuse. | def _GetMinidumpDumpOutput(self, minidump):
if minidump in self._minidump_dump_output:
logging.debug('Returning cached minidump_dump output for %s', minidump)
return self._minidump_dump_output[minidump]
dumper_path = local_first_binary_manager.GetInstance().FetchPath(
'minidump_dump')
if not os.access(dumper_path, os.X_OK):
logging.warning('Cannot run minidump_dump because %s is not found.',
dumper_path)
return None
# Using subprocess.check_output with stdout/stderr mixed can result in
# errors due to log messages showing up in the minidump_dump output. So,
# use Popen and combine into a single string afterwards.
p = subprocess.Popen(
[dumper_path, minidump], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
stdout = stdout + '\n' + stderr
if p.returncode != 0:
# Dumper errors often do not affect stack walkability, just a warning.
# It's possible for the same stack to be symbolized multiple times, so
# add a timestamp suffix to prevent artifact collisions.
now = datetime.datetime.now()
suffix = now.strftime('%Y-%m-%d-%H-%M-%S')
artifact_name = 'dumper_errors/%s-%s' % (
os.path.basename(minidump), suffix)
logging.warning(
'Reading minidump failed, but likely not actually an issue. Saving '
'output to artifact %s', artifact_name)
artifact_logger.CreateArtifact(artifact_name, stdout)
if stdout:
self._minidump_dump_output[minidump] = stdout
return stdout | [
"def _GetMinidumpDumpOutput(self, minidump):\n if minidump in self._minidump_dump_output:\n logging.debug('Returning cached minidump_dump output for %s', minidump)\n return self._minidump_dump_output[minidump]\n\n dumper_path = os.path.join(self._build_dir, 'minidump_dump')\n if not os.access(dumper_path, os.X_OK):\n logging.warning('Cannot run minidump_dump because %s is not found.',\n dumper_path)\n return None\n\n stdout = None\n try:\n stdout = subprocess.check_output(\n [dumper_path, minidump], stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n stdout = e.output\n # Dumper errors often do not affect stack walkability, just a warning.\n # It's possible for the same stack to be symbolized multiple times, so\n # add a timestamp suffix to prevent artifact collisions.\n now = datetime.datetime.now()\n suffix = now.strftime('%Y-%m-%d-%H-%M-%S')\n artifact_name = 'dumper_errors/%s-%s' % (\n os.path.basename(minidump), suffix)\n logging.warning(\n 'Reading minidump failed, but likely not actually an issue. Saving '\n 'output to artifact %s', artifact_name)\n artifact_logger.CreateArtifact(artifact_name, stdout)\n if stdout:\n self._minidump_dump_output[minidump] = stdout\n return stdout",
"def testPullMinidumps(self):\n def GetDumpLocation(_=None):\n return '/sdcard/dumps/'\n\n platform_backend = self._browser_backend.platform_backend\n time_offset = platform_backend.GetDeviceHostClockOffset()\n platform_backend.GetDumpLocation = GetDumpLocation\n remote_path = posixpath.join(GetDumpLocation(), 'Crashpad', 'pending')\n self._browser_backend.device.RunShellCommand(['mkdir', '-p', remote_path])\n # Android's implementation of \"touch\" doesn't support setting time via\n # Unix timestamps, only via dates, which are affected by timezones. So,\n # figure out what the device's timestamp for January 2nd, 1970 is and use\n # that to calculate the expected local timestamp. January 2nd is used\n # instead of January 1st so that we can't get accidentally get a negative\n # timestamp if the host-device clock offset is negative.\n remote_dump_file = posixpath.join(remote_path, 'test_dump')\n self._browser_backend.device.RunShellCommand(\n ['touch', '-d', '1970-01-02T00:00:00', remote_dump_file])\n device_mtime = self._browser_backend.device.RunShellCommand(\n ['stat', '-c', '%Y', remote_dump_file], single_line=True)\n device_mtime = int(device_mtime.strip())\n try:\n self._browser_backend.PullMinidumps()\n finally:\n self._browser_backend.device.RemovePath(GetDumpLocation(), recursive=True)\n\n local_path = os.path.join(\n self._browser_backend._tmp_minidump_dir, 'test_dump')\n self.assertTrue(os.path.exists(local_path))\n self.assertEqual(os.path.getmtime(local_path), device_mtime - time_offset)",
"def test_save_linux_mini_dump(self):\n self.build()\n exe = self.getBuildArtifact(\"a.out\")\n core = self.getBuildArtifact(\"core.dmp\")\n core_sb = self.getBuildArtifact(\"core_sb.dmp\")\n try:\n target = self.dbg.CreateTarget(exe)\n process = target.LaunchSimple(\n None, None, self.get_process_working_directory()\n )\n self.assertState(process.GetState(), lldb.eStateStopped)\n\n # get neccessary data for the verification phase\n process_info = process.GetProcessInfo()\n expected_pid = process_info.GetProcessID() if process_info.IsValid() else -1\n expected_number_of_modules = target.GetNumModules()\n expected_modules = target.modules\n expected_number_of_threads = process.GetNumThreads()\n expected_threads = []\n\n for thread_idx in range(process.GetNumThreads()):\n thread = process.GetThreadAtIndex(thread_idx)\n thread_id = thread.GetThreadID()\n expected_threads.append(thread_id)\n\n # save core and, kill process and verify corefile existence\n self.runCmd(\n \"process save-core --plugin-name=minidump --style=stack \" + core\n )\n self.assertTrue(os.path.isfile(core))\n\n # validate savinig via SBProcess\n error = process.SaveCore(core_sb, \"minidump\", lldb.eSaveCoreStackOnly)\n self.assertTrue(error.Success())\n self.assertTrue(os.path.isfile(core_sb))\n\n error = process.SaveCore(core_sb, \"minidump\", lldb.eSaveCoreFull)\n self.assertTrue(error.Fail())\n error = process.SaveCore(core_sb, \"minidump\", lldb.eSaveCoreDirtyOnly)\n self.assertTrue(error.Fail())\n\n self.assertSuccess(process.Kill())\n\n # To verify, we'll launch with the mini dump\n target = self.dbg.CreateTarget(None)\n process = target.LoadCore(core)\n\n # check if the core is in desired state\n self.assertTrue(process, PROCESS_IS_VALID)\n self.assertTrue(process.GetProcessInfo().IsValid())\n self.assertEqual(process.GetProcessInfo().GetProcessID(), expected_pid)\n self.assertTrue(target.GetTriple().find(\"linux\") != -1)\n self.assertTrue(target.GetNumModules(), expected_number_of_modules)\n self.assertEqual(process.GetNumThreads(), expected_number_of_threads)\n\n for module, expected in zip(target.modules, expected_modules):\n self.assertTrue(module.IsValid())\n module_file_name = module.GetFileSpec().GetFilename()\n expected_file_name = expected.GetFileSpec().GetFilename()\n # skip kernel virtual dynamic shared objects\n if \"vdso\" in expected_file_name:\n continue\n self.assertEqual(module_file_name, expected_file_name)\n self.assertEqual(module.GetUUIDString(), expected.GetUUIDString())\n\n for thread_idx in range(process.GetNumThreads()):\n thread = process.GetThreadAtIndex(thread_idx)\n self.assertTrue(thread.IsValid())\n thread_id = thread.GetThreadID()\n self.assertTrue(thread_id in expected_threads)\n finally:\n # Clean up the mini dump file.\n self.assertTrue(self.dbg.DeleteTarget(target))\n if os.path.isfile(core):\n os.unlink(core)\n if os.path.isfile(core_sb):\n os.unlink(core_sb)",
"def dumbcache_dump(self, cache_dir=r'data\\cache'):\n obj = self\n\n DUMBCACHE = os.path.join(r'..', cache_dir, r'br_store.dmp')\n with open(DUMBCACHE, 'wb') as f:\n pkl.dump(obj, f)",
"def load_dmidump(self):\n\n if not self.file:\n return self.dmidecode_output()\n\n with open(self.file, 'r') as fp:\n return fp.read()",
"def dump_memory(self):\n if not self.pid:\n log.warning(\"No vaild pid specified memory dump aborted\")\n return False\n if not self.is_alive():\n log.warning(\"The process with pid %d not alive , memory dump aborted\", self.pid)\n return False\n bin_path = os.path.join(\"bin\", \"procmem\")\n dump_path = tempfile.mktemp()\n idx = self.dumpmem[self.pid] = self.dumpmem.get(self.pid, 0) + 1\n file_name = os.path.join(\"memory\", \"%s-%s.dmp\" % (self.pid, idx))\n cmd = [bin_path, \"--pid\", self.pid, \"--ouput\", file_name]\n log.info(\"linux process dump memory \")\n try:\n procmem = subprocess.Popen(cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE) \n except Exception: \n log.error(\"Failed to dump process %s and process_name %s\", self.pid, self.process_name)\n upload_to_host(dump_path, file_name)\n os.unlink(dump_path)\n log.info(\"Memory dump of process with pid %d completed\", self.pid)\n return True",
"def PullDumps(self, host_dir):\n # The device/emulator's clock might be off from the host, so calculate an\n # offset that can be added to the host time to get the corresponding device\n # time.\n # The offset is (device_time - host_time), so a positive value means that\n # the device clock is ahead.\n time_offset = self.GetDeviceHostClockOffset()\n\n stdout, _ = self.RunCmdOnDevice([\n 'ls', '-1',\n cmd_helper.SingleQuote(self.ExpandUser(self.MINIDUMP_DIR))\n ])\n device_dumps = stdout.splitlines()\n for dump_filename in device_dumps:\n host_path = os.path.join(host_dir, dump_filename)\n # Skip any ignored files since they're not useful and could be deleted by\n # the time we try to pull them.\n if _IsIgnoredFileType(dump_filename):\n continue\n if os.path.exists(host_path):\n continue\n device_path = cmd_helper.SingleQuote(\n posixpath.join(self.MINIDUMP_DIR, dump_filename))\n\n # Skip any directories that happen to be in the list.\n if self.IsDir(device_path):\n continue\n\n # Skip any files that have a corresponding .lock file, as that implies the\n # file hasn't been fully written to disk yet.\n device_lock_path = cmd_helper.SingleQuote(\n posixpath.join(self.MINIDUMP_DIR, dump_filename + '.lock'))\n if self.FileExistsOnDevice(device_lock_path):\n logging.debug('Not pulling file %s because a .lock file exists for it',\n device_path)\n continue\n try:\n self.GetFile(device_path, host_path)\n except Exception as e: # pylint: disable=broad-except\n logging.error('Failed to get file %s: %s', device_path, e)\n continue\n # Set the local version's modification time to the device's.\n stdout, _ = self.RunCmdOnDevice(\n ['ls', '--time-style', '+%s', '-l', device_path])\n stdout = stdout.strip()\n # We expect whitespace-separated fields in this order:\n # mode, links, owner, group, size, mtime, filename.\n # Offset by the difference of the device and host clocks.\n device_mtime = int(stdout.split()[5])\n host_mtime = device_mtime - time_offset\n os.utime(host_path, (host_mtime, host_mtime))",
"def test_dump(self):\n self._run_tests(\"dump\")",
"def run_dump(self, expanded, unexpanded) : \n\t\tif len(expanded) < 2 :\n\t\t\treturn self.errormessage(\"Needs at least a destination directory and one object id to dump\")\n\t\tdestination = os.path.normpath(os.path.expanduser(expanded[0])) # in case there's a ~username\n\t\tif not os.path.isdir(destination) :\n\t\t\treturn self.errormessage(\"%s is not a directory\" % destination)\n\t\tstatus = 0\n\t\tfor arg in expanded[1:] :\n\t\t\tobject = self.toObject(self.__context, arg)\n\t\t\tif object is None :\n\t\t\t\tstatus = status + self.errormessage(\"Object %s doesn't exist\" % arg)\n\t\t\telif not self.HasPerms(object, 'View management screens') :\n\t\t\t\tstatus = status - 1\n\t\t\telif not hasattr(object, \"document_src\") or not callable(object.document_src) :\n\t\t\t\tstatus = status + self.errormessage(\"Doesn't know how to dump object %s\" % arg)\n\t\t\telse :\n\t\t\t\tfname = os.path.join(destination, object.getId())\n\t\t\t\ttry :\n\t\t\t\t\tfout = open(fname, \"wb\")\n\t\t\t\t\tfout.write(object.document_src())\n\t\t\t\t\tfout.close()\n\t\t\t\t\tself.htmlmessage(\"Object %s dumped to server as %s\" % (self.ObjectPath(object), fname))\n\t\t\t\texcept IOError, msg :\n\t\t\t\t\tstatus = status + self.errormessage('Error %s, occured while dumping %s' % (msg, arg))\n\t\treturn status",
"def testPullMinidumpsLockFilesIgnored(self):\n def GetDumpLocation(_=None):\n return '/sdcard/dumps/'\n\n platform_backend = self._browser_backend.platform_backend\n platform_backend.GetDumpLocation = GetDumpLocation\n remote_path = posixpath.join(GetDumpLocation(), 'Crashpad', 'pending')\n self._browser_backend.device.RunShellCommand(['mkdir', '-p', remote_path])\n remote_dump_file = posixpath.join(remote_path, 'test_dump')\n remote_lock_file = posixpath.join(remote_path, 'test_file.lock')\n self._browser_backend.device.RunShellCommand(\n ['touch', remote_dump_file])\n self._browser_backend.device.RunShellCommand(\n ['touch', remote_lock_file])\n try:\n self._browser_backend.PullMinidumps()\n finally:\n self._browser_backend.device.RemovePath(GetDumpLocation(), recursive=True)\n\n local_path = os.path.join(\n self._browser_backend._tmp_minidump_dir, 'test_dump')\n self.assertTrue(os.path.exists(local_path))\n local_path = os.path.join(\n self._browser_backend._tmp_minidump_dir, 'test_file.lock')\n self.assertFalse(os.path.exists(local_path))",
"def run_massif(source: str, dest: str):\n print(f\"-> Running massif on {source}...\", end='')\n\n subprocess.run(\n [\"valgrind\", \"--tool=massif\", \"--massif-out-file=\" + dest, \"--time-unit=B\", \"--stacks=yes\", source],\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL\n )\n\n print(\" Done\")",
"def split_debug(src, objcopy=None, objdump=None):\n if objcopy is None:\n objcopy = \"objcopy\"\n if objdump is None:\n objdump = \"objdump\"\n if not contains_debug_info(src, objdump=objdump):\n ui.info(\"-- Already stripped\", src)\n return\n src_stat = os.stat(src)\n dirname, basename = os.path.split(src)\n debug_dir = os.path.join(dirname, \".debug\")\n qisys.sh.mkdir(debug_dir)\n dest = os.path.join(src, debug_dir, basename)\n to_run = list()\n to_run.append([objcopy, \"--only-keep-debug\", src, dest])\n to_run.append([objcopy,\n \"--strip-debug\",\n \"--strip-unneeded\",\n \"--add-gnu-debuglink=%s\" % dest,\n src])\n try:\n for cmd in to_run:\n qisys.command.check_output(cmd, stderr=subprocess.STDOUT)\n ui.info(\"-- Debug info extracted for\", src)\n except qisys.command.CommandFailedException as e:\n ui.error(\"Error while Extracting package debug for %s\" % src)\n ui.error(str(e))\n # After the commands have run, utime of the file has changed, causing\n # cmake to re-install the libraries. Which is not cool ...\n # So set back mtime to its previous value:\n os.utime(src, (src_stat.st_atime, src_stat.st_mtime))",
"def test_execute_dump_site_transaction(self):\n\n instruction = Instruction(\"dump(3)\")\n\n with std_out() as (out, err):\n self.transaction_manager.execute(instruction)\n\n output = out.getvalue().strip()\n self.assertEqual(output, \"{'x14': { x14: 140 }, 'x18': { x18: 180 }, 'x10': { x10: 100 }, 'x8': { x8: 80 }, 'x16': { x16: 160 }, 'x2': { x2: 20 }, 'x12': { x12: 120 }, 'x6': { x6: 60 }, 'x20': { x20: 200 }, 'x4': { x4: 40 }}\")",
"def backup_dump(self):\n errors = Queue.Queue()\n threads = []\n for host in self.shards:\n t = threading.Thread(target=host.mongodump, args=(errors,))\n threads.append(t)\n if self.config_server is not None:\n t = threading.Thread(target=self.config_server.mongodump, args=(errors,))\n threads.append(t)\n for thread in threads:\n thread.start()\n for thread in threads:\n thread.join()\n if not errors.empty():\n # We don't really care for all errors, so just through the first one\n raise Exception(errors.get())",
"def _flushDumpBuffer(self):\r\n\r\n dumpFiles = []\r\n prefix = os.path.splitext(core.FW_conf['test_result_name'])[0]\r\n for i in range(len(self._tab._dumpBuffer)):\r\n dumpFiles.append(self._tab._TestAutomationBridge__saveDump(self._tab._dumpBuffer.pop(), '%s_%s' % (prefix, i)))\r\n\r\n if dumpFiles:\r\n zipFilePath = os.path.split(dumpFiles[0])[0]\r\n zipf = zipfile.ZipFile(os.path.join(zipFilePath, '%s.zip' % prefix), 'w')\r\n try:\r\n for file in dumpFiles:\r\n zipf.write(file, os.path.split(file)[1])\r\n os.remove(file)\r\n finally:\r\n zipf.close()",
"def PullDumps(self, host_dir):\n # The device/emulator's clock might be off from the host, so calculate an\n # offset that can be added to the host time to get the corresponding device\n # time.\n # The offset is (device_time - host_time), so a positive value means that\n # the device clock is ahead.\n time_offset = self.GetDeviceHostClockOffset()\n\n stdout, _ = self.RunCmdOnDevice(\n ['ls', '-1', cmd_helper.SingleQuote(self.CROS_MINIDUMP_DIR)])\n device_dumps = stdout.splitlines()\n for dump_filename in device_dumps:\n host_path = os.path.join(host_dir, dump_filename)\n # Skip any ignored files since they're not useful and could be deleted by\n # the time we try to pull them.\n if _IsIgnoredFileType(dump_filename):\n continue\n if os.path.exists(host_path):\n continue\n device_path = cmd_helper.SingleQuote(\n posixpath.join(self.CROS_MINIDUMP_DIR, dump_filename))\n # Skip any directories that happen to be in the list.\n stdout, _ = self.RunCmdOnDevice(['test', '-f', device_path, '&&',\n 'echo', 'true', '||', 'echo', 'false'])\n if 'false' in stdout:\n continue\n # Skip any files that have a corresponding .lock file, as that implies the\n # file hasn't been fully written to disk yet.\n device_lock_path = device_path + '.lock'\n if self.FileExistsOnDevice(device_lock_path):\n logging.debug('Not pulling file %s because a .lock file exists for it',\n device_path)\n continue\n try:\n self.GetFile(device_path, host_path)\n except Exception as e: # pylint: disable=broad-except\n logging.error('Failed to get file %s: %s', device_path, e)\n continue\n # Set the local version's modification time to the device's.\n stdout, _ = self.RunCmdOnDevice(\n ['ls', '--time-style', '+%s', '-l', device_path])\n stdout = stdout.strip()\n # We expect whitespace-separated fields in this order:\n # mode, links, owner, group, size, mtime, filename.\n # Offset by the difference of the device and host clocks.\n device_mtime = int(stdout.split()[5])\n host_mtime = device_mtime - time_offset\n os.utime(host_path, (host_mtime, host_mtime))",
"def dump(self):\n self.dumpInit.emit()\n\n worker = DumpThread()\n thread = QtCore.QThread(self)\n self.__thread_maps['dump'] = [thread, worker]\n worker.moveToThread(thread)\n\n worker.dumpSig.connect(self.dumpSig)\n worker.dumpDone.connect(self.dumpDone)\n thread.started.connect(worker.dump)\n\n thread.start()",
"def dump(self):\r\n instr = \"\"\"\r\n mysqldump -u {0} -p{1} {2} {3} | mysql -u {4} -p{5} -h {6} {7}\r\n \"\"\".format(\r\n self.src_user,\r\n self.src_passwd,\r\n self.src_db,\r\n self.tbl_name,\r\n self.dest_user,\r\n self.dest_passwd,\r\n self.dest_host,\r\n self.dest_db)\r\n\r\n system(instr)",
"def blockdump():\n global buffer\n global maps\n\n raw_input(\"Press Enter to dump results...\")\n\n dumptime = datetime.datetime.now().strftime(\"%Y_%m_%d-%H_%M_%S.%f\")\n filename = \"dump-{0}.txt\".format(dumptime)\n\n with open(filename, \"a\") as f:\n\n # write headers\n f.write(\"Time, \")\n for hex, (units, func) in sorted(maps.items()):\n f.write(\"{0}, \".format(units))\n f.write(\"\\n\")\n\n # write buffer\n for line in buffer:\n f.write(line + \"\\n\")\n\n # beep\n print chr(7)\n\n # restart self\n blockdump()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Red = Disable Blue = Enable Any problem such as plugins on dashboard is enable but show disable here, info Owner | async def plugin(self,ctx):
special_case = {"Anime":"myanimelist","Anti Raid":"antiraid"}
plugin_setting = await self.redis.hgetall("{}:Config:Cogs".format(ctx.message.guild.id))
embed = discord.Embed()
cogs = self.bot.cogs.keys()
for x in cogs:
setting = u"\U0001F534" #red
if x in ("Core", "Remindme", "Tools", "REPL","Events"): # A Owner's thing only.
if ctx.message.author.id != self.bot.owner.id:
continue
setting = u"\U0001F535" #blue
if x.lower() in plugin_setting or special_case.get(x) in plugin_setting:
setting = u"\U0001F535" #blue
embed.add_field(name = x,value = setting)
if ctx.message.guild.me.colour.value:
embed.colour = ctx.message.guild.me.colour
embed.set_footer(text = "{} = Disable | {} = Enable".format(u"\U0001F534",u"\U0001F535"))
await ctx.send(embed=embed) | [
"def __neg__(self):\n _this_module.txt_command('disable {0}'.format(self.fullname))",
"def enabled(self, enable):\n #ic()\n pass",
"def Enabled(self) -> bool:",
"def action_pre_disable(self, software_profile_name, *args, **kwargs):\n pass",
"def disable_feature(self,reason,source=\"gff3_maniger\"):\r\n date = datetime.datetime.now().strftime(\"%Y-%m-%d\")\r\n self.add_history(date,source,reason)\r\n self.active = False\r\n if self._owner_line.type == 'SNP':\r\n self._owner_line._owner_set.all_snp_disabled()",
"def enableDisable(self): \n currentTab = self.tabWidget.currentWidget()\n isHelixTab = currentTab is self.helixTab\n isAtomicTab = currentTab is self.atomicTab\n isLoopTab = currentTab is self.loopTab \n isPositionTab = currentTab is self.positionTab\n \n for index in range( len(self.possibleAtomsList) ):\n atom = self.possibleAtomsList[index]\n atom.setVisible(isAtomicTab)\n \n if(len(self.possibleAtomsList) > 0):\n self.CAlphaViewer.emitModelChanged()\n \n \n #self.undoButton.setEnabled(isAtomicTab or isHelixTab or isPositionTab)\n #self.redoButton.setEnabled(isAtomicTab or isHelixTab or isPositionTab) ",
"def disable_account(self):\n pass",
"def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify",
"def get_shield(self):\r\n # TODO: Implementasi method untuk mengembalikan shield\r\n pass",
"def action_post_disable(self, software_profile_name, *args, **kwargs):\n pass",
"def get_status():\n return 'not-activated'",
"def get_everyone_denied(self):",
"def disable():\n configdb = ConfigDBConnector()\n configdb.connect()\n rif_info = {}\n rif_info['FLEX_COUNTER_STATUS'] = 'disable'\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", \"RIF\", rif_info)",
"async def update_bypass(self, ctx):\r\n self.bot.cache[ctx.guild.id]['settings']['admin_bypass'] = not self.bot.cache[ctx.guild.id]['settings']['admin_bypass']\r\n embed=discord.Embed(title=\"Guild settings updated\", description=f\"You have changed the **admin bypass** flag for this guild.\\nThe current value is: `{self.bot.cache[ctx.guild.id]['settings']['admin_bypass']}`\", color=self.bot.success_color)\r\n embed.set_author(name=ctx.bot.user.name, icon_url=ctx.bot.user.avatar_url)\r\n embed.set_thumbnail(url=ctx.guild.icon_url)\r\n embed.set_footer(text=f\"A True value means defined admin roles are able to bypass the cooldown. A False value means they cannot bypass the cooldown.\")\r\n await self.bot.write_db(ctx.guild)\r\n await ctx.send(embed=embed)",
"async def toggle(self, ctx: BBContext):\n\n self.code_enabled = not self.code_enabled\n e = 'enabled.' if self.code_enabled else 'disabled.'\n await ctx.send(f\"Bunker code auto reaction has been : **{e}**\")\n self.bot.logger.info('Bunker code listener %s by %s', e, str(ctx.author))",
"def CanAdminister(self):\r\n return Follower.ADMIN in self.labels and Follower.REMOVED not in self.labels",
"def module_update_non_admin_invisible_off(self):\n self.test_runner.run_module_update_non_admin_invisible_off()",
"def help_disable(self):\n print_say(\"sound: Deny Jarvis his voice.\", self)",
"def action_disable(args, cfg):\n ent_cls = entitlements.ENTITLEMENT_CLASS_BY_NAME[args.name]\n entitlement = ent_cls(cfg)\n ret = 0 if entitlement.disable() else 1\n cfg.status() # Update the status cache\n return ret"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
store input into filename used pickle.dump | def store (input, filename) :
cout = open (filename, 'w')
pickle.dump (input, cout)
cout.close () | [
"def pickle(self,data,filename):\n pickle.dump(data, open(filename, 'wb'))",
"def save(self, filename): \n pickle.dump(self, open(filename, 'w'))",
"def storeTree(inputTree,filename): \n fw = open(filename,'w')\n pickle.dump(inputTree, fw)\n fw.close",
"def psave(var, filename):\n pickle.dump(var, open(filename, 'wb'))",
"def save_data_to_pkl(self, out_name):\n with open(out_name, 'xb') as file:\n pickle.dump(self.results_to_save, file)\n print(f'Data wiitten to {out_name}')",
"def write_inputfile():",
"def store_pickles(filename, to_store): \n with open(filename, 'w') as f:\n pickle.dump(to_store, f)",
"def save_input(self):\n if not os.path.exists(self.wdir):\n os.makedirs(self.wdir)\n\n with open(self.filepath, \"w\") as f:\n f.write(self.input_string)\n print(f\"-- Input file [{self.filename}] written successfully.\")",
"def save_var(filename, data, protocol = -1, allow_dill=False):\n if filename.endswith('.gz') :\n open_method = gzip.open\n else:\n open_method = open\n\n output = open_method(filename, 'wb')\n try:\n # Pickle dictionary using given protocol\n std_pickle.dump(data, output, protocol)\n finally:\n output.close()\n\n return",
"def serialize_to_file(self, filename=None):\n\n if not filename:\n filename = self._default_filename + '.pkl'\n pickle.dump(self, open(filename, 'wb+'))\n return filename",
"def _save_data(self, name, data):\r\n\t\t\r\n\t\twith open(os.path.join(self.log_dir, name + '.pkl'), 'wb') as f:\r\n\t\t\tcPickle.dump(data, f, cPickle.HIGHEST_PROTOCOL)",
"def save(self):\n file = open(self.name+'.txt','wb')\n file.write(cPickle.dumps(self.__dict__))\n file.close()",
"def save_pickle(args, item, file_name):\n add_ext = '' if file_name.endswith('.pkl') else '.pkl'\n\n file_name = os.path.join(args.PKL_DIR, file_name) + add_ext\n\n with open(file_name, 'wb') as fh:\n pickle.dump(item, fh)\n return",
"def save(self,fn):\n fn = fn if fn[-4:] == \".pkl\" else fn+\".pkl\"\n with open(fn,\"wb+\") as f:\n pickle.dump(self,f)\n log(\"Saved reader to {}\".format(fn))",
"def save(object, filename, protocol = 0):\n file = gzip.GzipFile(filename, 'wb')\n file.write(pickle.dumps(object, protocol))\n file.close()",
"def save(self, name):\r\n\t\t\r\n\t\twith open(os.path.join(self.log_dir, name + '.pkl'), 'wb') as f:\r\n\t\t\tcPickle.dump(self, f, cPickle.HIGHEST_PROTOCOL)",
"def saveVar(var,name):\n with open(name+'.pickle','wb') as fl:\n pickle.dump(var,fl)",
"def save(self, output, data):\n pass",
"def save(contenu,name):\n with open(name, \"wb\") as fichier:\n mon_pickler = pickle.Pickler(fichier)\n mon_pickler.dump(contenu)\n fichier.close()"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sends args and kwargs to any configured callbacks. This handles the cases where the 'callbacks' variable is ``None``, a single function, or a list. | def _multiple_callbacks(callbacks, *args, **kwargs):
if isinstance(callbacks, list):
for cb in callbacks:
cb(*args, **kwargs)
return
if callbacks:
callbacks(*args, **kwargs) | [
"def _multiple_callbacks(callbacks, *args, **kwargs):\n if isinstance(callbacks, list):\n for cb in callbacks:\n cb(*args, **kwargs)\n return\n if callbacks:\n callbacks(*args, **kwargs)",
"def call_all_callbacks(callbacks, *args):\n for callback in callbacks:\n callback(*args)",
"def call(self):\n for cb in self.callbacks:\n if len(cb[1]) > 0:\n cb[0](*cb[1])\n else:\n cb[0]()",
"def run_callbacks(self, **kwargs):\n for callback in self.CALLBACKS:\n getattr(self, callback)(**kwargs)",
"def callbackWithArguments(callback,*args,**kwargs):\n\tdef real_callback():\n\t\tcallback(*args,**kwargs)\n\treturn real_callback",
"def _invoke_callbacks(self, *args, **kwargs):\n for callback in self._done_callbacks:\n _helpers.safe_invoke_callback(callback, *args, **kwargs)",
"def trigger(self, callback_type, *args):\n if self.callbacks.has_key(callback_type):\n for cb in self.callbacks[callback_type]:\n cb(*args)",
"def callbacks(self, callbacks):\n self._callbacks = callbacks",
"async def _call_callbacks(self, command: str, arg: dict[str, Any]) -> None:\n callbacks = self._callbacks.get(command, self._default_callback)\n for callback in callbacks:\n if inspect.iscoroutinefunction(callback):\n await cast(Awaitable[None], callback(arg))\n else:\n callback(arg)",
"def execCallback(self, funcList, *args):\n for func in funcList:\n func(*args)",
"def add_callbacks(self, callbacks: Collection[Callable[[], None]]) -> None:\n\n if not callbacks:\n return\n\n if not self.callbacks:\n self.callbacks = []\n\n for callback in callbacks:\n if callback not in self.callbacks:\n self.callbacks.append(callback)",
"def setCallback(self, callback, *args):\n\t\tself.callback = callback\n\t\tself.callback_args = args",
"def _forward_cb(self, *args, **kwargs):\n for callback_function in self.changeCallbacks:\n callback_function(*args, **kwargs)",
"def setEventCallbacks(self, callbacks) :\n for key in callbacks :\n self.proto.onEventCallbacks[key] = callbacks[key]",
"def _trigger_callbacks(self, event_type, **args):\n for event in self._event_callbacks[event_type]:\n event(**args)",
"def _run_callbacks(cls, cb_method, *args):\n global CALLBACKS\n for c in CALLBACKS:\n attr = getattr(c, cb_method)\n attr(*args)",
"def execute_callbacks(self, name, *args, **kwargs):\n callbacks = self.callbacks.get(name, {}).items()\n for order, func in callbacks:\n func(self, *args, **kwargs)\n\n return len(callbacks)",
"def spawn_callback(self, callback, *args, **kwargs):\n ...",
"def validate_callbacks(callbacks):\n if callbacks is None:\n callbacks = []\n elif isinstance(callbacks, Callback):\n callbacks = [callbacks]\n else:\n if not all([isinstance(c, Callback) for c in callbacks]):\n invalid_callbacks = [c for c in callbacks if not isinstance(c, Callback)]\n raise TypeError('Found non-callback object in callbacks: {}'.format(invalid_callbacks))\n\n return callbacks"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Adds and connects attributes from default encore FKIK switch anim setup to rig nodes in scene Imports default control setup from file or you may specify source_ctrl in args to override | def make_fkikSwitch_connection_attrs(partpre=None, side='Lt', source_ctrl=None, tag_name='switch', snapTo=None,
add_attrs=None):
switch_anim = ''
if source_ctrl is not None:
switch_anim = source_ctrl
partpre = partpre
if partpre == '':
partpre = 'mypart_'
if source_ctrl is None:
# filepath = r'C:/Users/Nicob/Documents/maya/scripts/rigBot/rigBot/config/switcher_anim.mb'
system_base_path = os.path.dirname(utils.__file__)
base_path = os.path.join(system_base_path, 'config')
file_path = os.path.join(base_path, 'switcher_anim.mb')
newnodes = mc.file(filepath, i=1, ignoreVersion=1, rnn=1, mergeNamespacesOnClash=0, rpr=partpre, ra=1,
options="v=0;", pr=1)
switch_anim = partpre + '_CTL'
# pos switcher grpOffset node if snapTo
if snapTo is not None:
utils.snap_to_transform(snapTo, switch_anim.replace('CTL', 'grpOffset'))
mc.setAttr(switch_anim.replace('CTL', 'grpOffset') + '.r', 0, 0, 0)
# get value of tags and sort into ik and fk vis groups
iks = []
fks = []
nodes = mc.ls('*.' + tag_name)
for node in nodes:
if partpre in node and side in node:
mode = mc.getAttr(node)
if mode:
mode = mode.lower()
if 'ik' in mode:
iks.append(node.split('.')[0])
if 'fk' in mode:
fks.append(node.split('.')[0])
for ik in iks:
# ikparpar=utils.get_parent(ik)
ikpar = utils.get_parent(ik)
if ikpar is None:
mc.connectAttr(switch_anim + '.FK_IK', ik + '.visiblity', f=1)
else:
mc.connectAttr(switch_anim + '.FK_IK', ikpar + '.visibility', f=1)
rvn = mc.createNode('reverse', name=switch_anim + '_fkik_vis_rv')
mc.connectAttr(switch_anim + '.FK_IK', rvn + '.inputX')
for fk in fks:
fkpar = utils.get_parent(fk)
if fkpar:
mc.connectAttr(rvn + '.outputX', fkpar + '.visibility', f=1)
if add_attrs is not None:
for att in add_attrs:
mc.addAttr(switch_anim, ln=att, min=0, max=1, dv=0, k=1)
nns = []
for nn in reversed(newnodes):
nnn = ''
sn = nn.split("|")
nnn = mc.rename(nn, sn[-1])
nns.append(nnn)
anim = mc.ls(partpre + '_CTL')
# if mc.objExists (partpre+'_skeleton_grp'):
# mc.parent (anim, partpre+'_skeleton_grp' )
return anim | [
"def node_setting_init(self):\n\n # Use nodes\n bpy.context.scene.use_nodes = True\n tree = bpy.context.scene.node_tree\n links = tree.links\n\n # Remove default nodes\n for node in tree.nodes:\n tree.nodes.remove(node)\n \n\n # -- Initialize nodes\n # both\n render_layer_node = tree.nodes.new('CompositorNodeRLayers')\n \n # # depth\n multiply_node = tree.nodes.new('CompositorNodeMath')\n multiply_node.operation = 'MULTIPLY'\n multiply_node.inputs[1].default_value = 1000 # meter to millimeter\n set_alpha_node = tree.nodes.new('CompositorNodeSetAlpha')\n # depth output\n file_output_depth_node = tree.nodes.new('CompositorNodeOutputFile')\n file_output_depth_node.base_path = config.paths['depth_dir']\n file_output_depth_node.format.file_format = \"OPEN_EXR\"\n file_output_depth_node.format.color_mode = \"RGB\"\n file_output_depth_node.format.color_depth = \"16\"\n file_output_depth_node.file_slots[0].path = '######.exr' # blender placeholder #\n \n # rgb\n image_node = tree.nodes.new('CompositorNodeImage')\n alpha_over_node = tree.nodes.new('CompositorNodeAlphaOver')\n # rgb output\n file_output_rgb_node = tree.nodes.new('CompositorNodeOutputFile')\n file_output_rgb_node.base_path = config.paths['rgb_dir']\n file_output_rgb_node.format.file_format = \"PNG\" # default is \"PNG\"\n file_output_rgb_node.format.color_mode = \"RGB\"\t# default is \"BW\"\n file_output_rgb_node.format.color_depth = \"8\" # default is 8\n file_output_rgb_node.format.compression = 0\t # default is 15\n file_output_rgb_node.file_slots[0].path = '######.png' # blender placeholder #\n \n # segmentation mask output\n file_output_mask_node = tree.nodes.new('CompositorNodeOutputFile')\n file_output_mask_node.base_path = config.paths['mask_dir']\n file_output_mask_node.format.color_mode = \"BW\"\t# default is \"BW\"\n file_output_mask_node.file_slots[0].path = '######.png' # blender placeholder #\n divide_node = tree.nodes.new('CompositorNodeMath')\n divide_node.operation = 'DIVIDE'\n divide_node.inputs[1].default_value = 255\n\n # -- Set node links\n # for rgb\n links.new(render_layer_node.outputs[\"Image\"], alpha_over_node.inputs[2])\n links.new(image_node.outputs[\"Image\"], alpha_over_node.inputs[1])\n links.new(alpha_over_node.outputs[\"Image\"], file_output_rgb_node.inputs[\"Image\"])\n # # for depth\n links.new(render_layer_node.outputs[\"Alpha\"], set_alpha_node.inputs[\"Alpha\"])\n links.new(render_layer_node.outputs[\"Depth\"], multiply_node.inputs[0])\n links.new(multiply_node.outputs[\"Value\"], set_alpha_node.inputs[\"Image\"])\n links.new(set_alpha_node.outputs[\"Image\"], file_output_depth_node.inputs[\"Image\"])\n # for mask\n #links.new(render_layer_node.outputs[\"IndexOB\"], file_output_mask_node.inputs[\"Image\"])\n links.new(render_layer_node.outputs[\"IndexOB\"], divide_node.inputs[0])\n links.new(divide_node.outputs[\"Value\"], file_output_mask_node.inputs[\"Image\"])",
"def setup_threeCtrl(lf_lidrails, rt_lidrails):\n # Declare control variables\n lf_up = ['lf_lid01_tp01_ccc', 'lf_lid01_tp02_ccc', 'lf_lid01_tp03_ccc']\n lf_dn = ['lf_lid01_dn01_ccc', 'lf_lid01_dn02_ccc', 'lf_lid01_dn03_ccc']\n rt_up = ['rt_lid01_tp01_ccc', 'rt_lid01_tp02_ccc', 'rt_lid01_tp03_ccc']\n rt_dn = ['rt_lid01_dn01_ccc', 'rt_lid01_dn02_ccc', 'rt_lid01_dn03_ccc']\n\n # Connect lidRails ramps to lid profile controls\n\n # ========\n # lf_up\n\n # inner\n cmds.connectAttr(lf_up[0] + '.tx', lf_lidrails + '.offsettop[0].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[0] + '.ty', lf_lidrails + '.offsettop[0].offsettop_FloatValue', f=True)\n # mid\n lf_lid01_um01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_um01_addDoubleLinear')\n cmds.connectAttr(lf_up[1] + '.tx', lf_lid01_um01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_um01_adn + '.input2', 0.5)\n cmds.connectAttr(lf_lid01_um01_adn + '.output', lf_lidrails + '.offsettop[1].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[1] + '.ty', lf_lidrails + '.offsettop[1].offsettop_FloatValue', f=True)\n # outer\n lf_lid01_uo01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_uo01_addDoubleLinear')\n cmds.connectAttr(lf_up[2] + '.tx', lf_lid01_uo01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_uo01_adn + '.input2', 1.0)\n cmds.connectAttr(lf_lid01_uo01_adn + '.output', lf_lidrails + '.offsettop[2].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[2] + '.ty', lf_lidrails + '.offsettop[2].offsettop_FloatValue', f=True)\n\n # ========\n # lf_dn\n\n # Reverse node\n lf_dn_rvn = cmds.createNode('reverse', n='lf_lid01_dn01_reverse')\n # inner\n cmds.connectAttr(lf_dn[0] + '.tx', lf_lidrails + '.offsetbottom[0].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[0] + '.ty', lf_dn_rvn + '.inputX', f=True)\n cmds.connectAttr(lf_dn_rvn + '.outputX', lf_lidrails + '.offsetbottom[0].offsetbottom_FloatValue', f=True)\n # mid\n lf_lid01_dm01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_dm01_addDoubleLinear')\n cmds.connectAttr(lf_dn[1] + '.tx', lf_lid01_dm01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_dm01_adn + '.input2', 0.5)\n cmds.connectAttr(lf_lid01_dm01_adn + '.output', lf_lidrails + '.offsetbottom[1].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[1] + '.ty', lf_dn_rvn + '.inputY', f=True)\n cmds.connectAttr(lf_dn_rvn + '.outputY', lf_lidrails + '.offsetbottom[1].offsetbottom_FloatValue', f=True)\n # outer\n lf_lid01_do01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_do01_addDoubleLinear')\n cmds.connectAttr(lf_dn[2] + '.tx', lf_lid01_do01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_do01_adn + '.input2', 1.0)\n cmds.connectAttr(lf_lid01_do01_adn + '.output', lf_lidrails + '.offsetbottom[2].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[2] + '.ty', lf_dn_rvn + '.inputZ', f=True)\n cmds.connectAttr(lf_dn_rvn + '.outputZ', lf_lidrails + '.offsetbottom[2].offsetbottom_FloatValue', f=True)\n\n # ========\n # rt_up\n\n # inner\n rt_lid01_ui01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_ui01_plusMinusAverage')\n cmds.setAttr(rt_lid01_ui01_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_ui01_asn + '.input1D[0]', 1.0)\n cmds.connectAttr(rt_up[0] + '.tx', rt_lid01_ui01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_ui01_asn + '.output1D', rt_lidrails + '.offsettop[2].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[0] + '.ty', rt_lidrails + '.offsettop[2].offsettop_FloatValue', f=True)\n # mid\n rt_lid01_um01_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_um01_multDoubleLinear')\n rt_lid01_um01_adn = cmds.createNode('addDoubleLinear', n='rt_lid01_um01_addDoubleLinear')\n cmds.connectAttr(rt_up[1] + '.tx', rt_lid01_um01_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um01_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_um01_mdn + '.output', rt_lid01_um01_adn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um01_adn + '.input2', 0.5)\n cmds.connectAttr(rt_lid01_um01_adn + '.output', rt_lidrails + '.offsettop[1].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[1] + '.ty', rt_lidrails + '.offsettop[1].offsettop_FloatValue', f=True)\n # outer\n rt_lid01_uo_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_uo_multDoubleLinear')\n cmds.connectAttr(rt_up[2] + '.tx', rt_lid01_uo_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_uo_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_uo_mdn + '.output', rt_lidrails + '.offsettop[0].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[2] + '.ty', rt_lidrails + '.offsettop[0].offsettop_FloatValue', f=True)\n\n # ========\n # rt_dn\n\n # Reverse node\n rt_dn_rvn = cmds.createNode('reverse', n='rt_lid01_dn01_reverse')\n # inner\n rt_lid01_di01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_di01_plusMinusAverage')\n cmds.setAttr(rt_lid01_di01_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_di01_asn + '.input1D[0]', 1.0)\n cmds.connectAttr(rt_dn[0] + '.tx', rt_lid01_di01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_di01_asn + '.output1D', rt_lidrails + '.offsetbottom[0].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[0] + '.ty', rt_dn_rvn + '.inputX', f=True)\n cmds.connectAttr(rt_dn_rvn + '.outputX', rt_lidrails + '.offsetbottom[0].offsetbottom_FloatValue', f=True)\n # mid\n rt_lid01_dm01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_dm01_plusMinusAverage')\n cmds.setAttr(rt_lid01_dm01_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_dm01_asn + '.input1D[0]', 0.5)\n cmds.connectAttr(rt_dn[1] + '.tx', rt_lid01_dm01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_dm01_asn + '.output1D', rt_lidrails + '.offsetbottom[1].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[1] + '.ty', rt_dn_rvn + '.inputY', f=True)\n cmds.connectAttr(rt_dn_rvn + '.outputY', rt_lidrails + '.offsetbottom[1].offsetbottom_FloatValue', f=True)\n # outer\n rt_lid01_do01_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_do01_multDoubleLinear')\n cmds.connectAttr(rt_dn[2] + '.tx', rt_lid01_do01_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_do01_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_do01_mdn + '.output', rt_lidrails + '.offsetbottom[2].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[2] + '.ty', rt_dn_rvn + '.inputZ', f=True)\n cmds.connectAttr(rt_dn_rvn + '.outputZ', rt_lidrails + '.offsetbottom[2].offsetbottom_FloatValue', f=True)",
"def importAnim(self, animLayer='', murderKeys=False, dataFile=None):\r\n topNode = cmds.ls(sl=1)[0]\r\n startFrame = int(cmds.playbackOptions(q=1, min=1))\r\n endFrame = int(cmds.playbackOptions(q=1, max=1))\r\n # savePath, startFrame, endFrame, aeDirPath = self.getFilePath(topNode)\r\n if dataFile:\r\n initPos = dataFile[0]\r\n ctlData = dataFile[1]\r\n else:\r\n savePath = cmds.fileDialog2(ds=2, fm=1, ff='MAF Files (*.animMAF)')[0]\r\n with open(savePath, 'r') as file:\r\n data = json.load(file)\r\n\r\n if data.keys()[0] == '_init':\r\n initPos = data[data.keys()[0]]\r\n ctlData = data[data.keys()[1]]\r\n else:\r\n initPos = data[data.keys()[1]]\r\n ctlData = data[data.keys()[0]]\r\n\r\n parList = cmds.listRelatives(cmds.ls(sl=1)[0], ad=1, f=1, type=\"transform\")\r\n # parList = list(set([cmds.listRelatives(i,f=1,p=1)[0] for i in hi]))\r\n for par in parList:\r\n shortPar = par.split(':')[-1]\r\n\r\n if shortPar == 'MASTER_CONTROL':\r\n cmds.setAttr(par + '.t', initPos[0][0], initPos[0][1], initPos[0][2])\r\n cmds.setAttr(par + '.r', initPos[1][0], initPos[1][1], initPos[1][2])\r\n cmds.setAttr(par + '.s', initPos[2][0], initPos[2][1], initPos[2][2])\r\n elif \"tranRot_CTL\" in shortPar:\r\n cmds.setAttr(par + '.t', initPos[0][0], initPos[0][1], initPos[0][2])\r\n cmds.setAttr(par + '.r', initPos[1][0], initPos[1][1], initPos[1][2])\r\n cmds.setAttr(par + '.s', initPos[2][0], initPos[2][1], initPos[2][2])\r\n\r\n # off = cmds.listRelatives(par,p=1)[0]\r\n\r\n if murderKeys:\r\n cmds.cutKey(par, time=(startFrame, endFrame), cl=1, option=\"keys\")\r\n # cmds.cutKey( off, time=(startFrame,endFrame), cl=1, option=\"keys\")\r\n\r\n self.setAnim(par, ctlData, startFrame, endFrame, animLayer)\r\n # self.setAnim(off,ctlData,startFrame,endFrame,animLayer)\r\n print \"IMPORT COMPLETE!\"",
"def setup_fourCtrl(lf_lidrails, rt_lidrails):\n # Declare control variables\n lf_up = ['L_upperLid1_ctrl', 'L_upperLid2_ctrl', 'L_upperLid3_ctrl', 'L_upperLid4_ctrl']\n lf_dn = ['L_lowerLid1_ctrl', 'L_lowerLid2_ctrl', 'L_lowerLid3_ctrl', 'L_lowerLid4_ctrl']\n rt_up = ['R_upperLid1_ctrl', 'R_upperLid2_ctrl', 'R_upperLid3_ctrl', 'R_upperLid4_ctrl']\n rt_dn = ['R_lowerLid1_ctrl', 'R_lowerLid2_ctrl', 'R_lowerLid3_ctrl', 'R_lowerLid4_ctrl']\n\n # Connect lidRails ramps to lid profile controls\n\n # lf_up =========\n\n # inner\n cmds.connectAttr(lf_up[0] + '.tx', lf_lidrails + '.offsettop[0].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[0] + '.ty', lf_lidrails + '.offsettop[0].offsettop_FloatValue', f=True)\n # mid - inner\n lf_lid01_um01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_um01_addDoubleLinear')\n cmds.connectAttr(lf_up[1] + '.tx', lf_lid01_um01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_um01_adn + '.input2', 0.333)\n cmds.connectAttr(lf_lid01_um01_adn + '.output', lf_lidrails + '.offsettop[1].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[1] + '.ty', lf_lidrails + '.offsettop[1].offsettop_FloatValue', f=True)\n # mid - outer\n lf_lid01_um02_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_um02_addDoubleLinear')\n cmds.connectAttr(lf_up[2] + '.tx', lf_lid01_um02_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_um02_adn + '.input2', 0.666)\n cmds.connectAttr(lf_lid01_um02_adn + '.output', lf_lidrails + '.offsettop[2].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[2] + '.ty', lf_lidrails + '.offsettop[2].offsettop_FloatValue', f=True)\n # outer\n lf_lid01_uo01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_uo01_addDoubleLinear')\n cmds.connectAttr(lf_up[3] + '.tx', lf_lid01_uo01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_uo01_adn + '.input2', 1.0)\n cmds.connectAttr(lf_lid01_uo01_adn + '.output', lf_lidrails + '.offsettop[3].offsettop_Position', f=True)\n cmds.connectAttr(lf_up[3] + '.ty', lf_lidrails + '.offsettop[3].offsettop_FloatValue', f=True)\n\n # lf_dn =========\n\n lf_dn_rvn = cmds.createNode('reverse', n='lf_lid01_dn01_reverse')\n lf_dn02_rvn = cmds.createNode('reverse', n='lf_lid01_dn02_reverse')\n # inner\n cmds.connectAttr(lf_dn[0] + '.tx', lf_lidrails + '.offsetbottom[0].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[0] + '.ty', lf_dn_rvn + '.inputX', f=True)\n cmds.connectAttr(lf_dn_rvn + '.outputX', lf_lidrails + '.offsetbottom[0].offsetbottom_FloatValue', f=True)\n # mid - inner\n lf_lid01_dm01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_dm01_addDoubleLinear')\n cmds.connectAttr(lf_dn[1] + '.tx', lf_lid01_dm01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_dm01_adn + '.input2', 0.333)\n cmds.connectAttr(lf_lid01_dm01_adn + '.output', lf_lidrails + '.offsetbottom[1].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[1] + '.ty', lf_dn_rvn + '.inputY', f=True)\n cmds.connectAttr(lf_dn_rvn + '.outputY', lf_lidrails + '.offsetbottom[1].offsetbottom_FloatValue', f=True)\n # mid - outer\n lf_lid01_dm02_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_dm02_addDoubleLinear')\n cmds.connectAttr(lf_dn[2] + '.tx', lf_lid01_dm02_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_dm02_adn + '.input2', 0.666)\n cmds.connectAttr(lf_lid01_dm02_adn + '.output', lf_lidrails + '.offsetbottom[2].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[2] + '.ty', lf_dn02_rvn + '.inputX', f=True)\n cmds.connectAttr(lf_dn02_rvn + '.outputX', lf_lidrails + '.offsetbottom[2].offsetbottom_FloatValue', f=True)\n # outer\n lf_lid01_do01_adn = cmds.createNode('addDoubleLinear', n='lf_lid01_do01_addDoubleLinear')\n cmds.connectAttr(lf_dn[3] + '.tx', lf_lid01_do01_adn + '.input1', f=True)\n cmds.setAttr(lf_lid01_do01_adn + '.input2', 1.0)\n cmds.connectAttr(lf_lid01_do01_adn + '.output', lf_lidrails + '.offsetbottom[3].offsetbottom_Position', f=True)\n cmds.connectAttr(lf_dn[3] + '.ty', lf_dn02_rvn + '.inputY')\n cmds.connectAttr(lf_dn02_rvn + '.outputY', lf_lidrails + '.offsetbottom[3].offsetbottom_FloatValue', f=True)\n\n # rt_up =========\n\n # inner\n rt_lid01_ui01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_ui01_plusMinusAverage')\n cmds.setAttr(rt_lid01_ui01_asn + '.input1D[0]', 1.0)\n cmds.connectAttr(rt_up[0] + '.tx', rt_lid01_ui01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_ui01_asn + '.output1D', rt_lidrails + '.offsettop[3].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[0] + '.ty', rt_lidrails + '.offsettop[3].offsettop_FloatValue', f=True)\n # mid -inner\n rt_lid01_um01_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_um01_multDoubleLinear')\n rt_lid01_um01_adn = cmds.createNode('addDoubleLinear', n='rt_lid01_um01_addDoubleLinear')\n cmds.connectAttr(rt_up[2] + '.tx', rt_lid01_um01_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um01_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_um01_mdn + '.output', rt_lid01_um01_adn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um01_adn + '.input2', 0.333)\n cmds.connectAttr(rt_lid01_um01_adn + '.output', rt_lidrails + '.offsettop[2].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[2] + '.ty', rt_lidrails + '.offsettop[2].offsettop_FloatValue', f=True)\n\n # mid - outer\n rt_lid01_um02_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_um02_multDoubleLinear')\n rt_lid01_um02_adn = cmds.createNode('addDoubleLinear', n='rt_lid01_um02_addDoubleLinear')\n cmds.connectAttr(rt_up[1] + '.tx', rt_lid01_um02_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um02_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_um02_mdn + '.output', rt_lid01_um02_adn + '.input1', f=True)\n cmds.setAttr(rt_lid01_um02_adn + '.input2', 0.666)\n cmds.connectAttr(rt_lid01_um02_adn + '.output', rt_lidrails + '.offsettop[1].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[1] + '.ty', rt_lidrails + '.offsettop[1].offsettop_FloatValue', f=True)\n\n # outer\n rt_lid01_uo_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_uo_multDoubleLinear')\n cmds.connectAttr(rt_up[3] + '.tx', rt_lid01_uo_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_uo_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_uo_mdn + '.output', rt_lidrails + '.offsettop[0].offsettop_Position', f=True)\n cmds.connectAttr(rt_up[3] + '.ty', rt_lidrails + '.offsettop[0].offsettop_FloatValue', f=True)\n\n # rt_dn =========\n\n rt_dn_rvn = cmds.createNode('reverse', n='rt_lid01_dn01_reverse')\n rt_dn02_rvn = cmds.createNode('reverse', n='rt_lid01_dn02_reverse')\n # inner\n rt_lid01_di01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_di01_plusMinusAverage')\n cmds.setAttr(rt_lid01_di01_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_di01_asn + '.input1D[0]', 1.0)\n cmds.connectAttr(rt_dn[0] + '.tx', rt_lid01_di01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_di01_asn + '.output1D', rt_lidrails + '.offsetbottom[0].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[0] + '.ty', rt_dn_rvn + '.inputX', f=True)\n cmds.connectAttr(rt_dn_rvn + '.outputX', rt_lidrails + '.offsetbottom[0].offsetbottom_FloatValue', f=True)\n # mid - inner\n rt_lid01_dm01_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_dm01_plusMinusAverage')\n cmds.setAttr(rt_lid01_dm01_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_dm01_asn + '.input1D[0]', 0.333)\n cmds.connectAttr(rt_dn[2] + '.tx', rt_lid01_dm01_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_dm01_asn + '.output1D', rt_lidrails + '.offsetbottom[1].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[2] + '.ty', rt_dn_rvn + '.inputY', f=True)\n cmds.connectAttr(rt_dn_rvn + '.outputY', rt_lidrails + '.offsetbottom[1].offsetbottom_FloatValue', f=True)\n # mid - outer\n rt_lid01_dm02_asn = cmds.createNode('plusMinusAverage', n='rt_lid01_dm02_plusMinusAverage')\n cmds.setAttr(rt_lid01_dm02_asn + '.operation', 2)\n cmds.setAttr(rt_lid01_dm02_asn + '.input1D[0]', 0.666)\n cmds.connectAttr(rt_dn[1] + '.tx', rt_lid01_dm02_asn + '.input1D[1]', f=True)\n cmds.connectAttr(rt_lid01_dm02_asn + '.output1D', rt_lidrails + '.offsetbottom[2].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[1] + '.ty', rt_dn02_rvn + '.inputX', f=True)\n cmds.connectAttr(rt_dn02_rvn + '.outputX', rt_lidrails + '.offsetbottom[2].offsetbottom_FloatValue', f=True)\n # outer\n rt_lid01_do01_mdn = cmds.createNode('multDoubleLinear', n='rt_lid01_do01_multDoubleLinear')\n cmds.connectAttr(rt_dn[3] + '.tx', rt_lid01_do01_mdn + '.input1', f=True)\n cmds.setAttr(rt_lid01_do01_mdn + '.input2', -1.0)\n cmds.connectAttr(rt_lid01_do01_mdn + '.output', rt_lidrails + '.offsetbottom[3].offsetbottom_Position', f=True)\n cmds.connectAttr(rt_dn[3] + '.ty', rt_dn02_rvn + '.inputY', f=True)\n cmds.connectAttr(rt_dn02_rvn + '.outputY', rt_lidrails + '.offsetbottom[3].offsetbottom_FloatValue')",
"def create_ik_setup(controls, joints):\n\n # Create control offset transforms\n exp_tf_ms = []\n for ctl in controls:\n par = cmds.listRelatives(ctl, parent=True)\n buf = create_offset_transform(ctl, BUF)\n exp = create_offset_transform(ctl, EXP)\n off = create_offset_transform(ctl, OFF)\n cmds.parent(ctl, off)\n cmds.parent(off, exp)\n cmds.parent(exp, buf)\n if par:\n cmds.parent(buf, par[0])\n exp_tf_ms.append(buf)\n\n root_control, pole_control, goal_control = controls\n handle, effector = cmds.ikHandle(sj=joints[0], ee=joints[-1], sol='ikRPsolver')\n cmds.setAttr('{}.hiddenInOutliner'.format(handle), True)\n cmds.orientConstraint(goal_control, joints[-1], mo=True)\n cmds.parent(handle, goal_control)\n cmds.hide(handle)\n\n # Connect root control to ik joint offset group\n ik_joints_offset = cmds.listRelatives(joints[0], p=True)[0]\n cmds.parentConstraint(root_control, ik_joints_offset, mo=True)\n cmds.scaleConstraint(root_control, ik_joints_offset, mo=True)\n\n # Connect twisting and pole vector control\n cmds.addAttr(goal_control, ln='twist', at='float', k=True)\n cmds.connectAttr('{}.twist'.format(goal_control), '{}.twist'.format(handle))\n cmds.poleVectorConstraint(pole_control, handle)\n\n # Add PV visibility attribute\n cmds.addAttr(goal_control, shortName='pv', longName='poleVector', at='bool', k=True)\n cmds.connectAttr('{}.pv'.format(goal_control), '{}.v'.format(pole_control))\n cmds.setAttr('{}.pv'.format(goal_control),1)\n\n # Add curve that points elbow to pole control\n crv = cmds.curve(p=[[0, 0, 0], [0, 1, 0]], d=1)\n cmds.connectAttr('{}.visibility'.format(pole_control), '{}.visibility'.format(crv))\n lock_hide_attrs(crv, attrs=['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz'])\n cmds.setAttr('{}.overrideEnabled'.format(crv), True)\n cmds.setAttr('{}.overrideDisplayType'.format(crv), 2)\n decomp_joint = cmds.createNode('decomposeMatrix')\n decomp_control = cmds.createNode('decomposeMatrix')\n cmds.connectAttr('{}.worldMatrix'.format(joints[1]), '{}.inputMatrix'.format(decomp_joint))\n cmds.connectAttr('{}.worldMatrix'.format(pole_control), '{}.inputMatrix'.format(decomp_control))\n cmds.connectAttr('{}.outputTranslate'.format(decomp_joint), '{}.controlPoints[0]'.format(crv))\n cmds.connectAttr('{}.outputTranslate'.format(decomp_control), '{}.controlPoints[1]'.format(crv))\n\n return handle, crv, exp_tf_ms",
"def setup(self, trainers):",
"def __init__(self, cfg_index, conditions, pars_dir, step_title, use_defaults, input_cfg_json_data):\n super().__init__(cfg_index, conditions, pars_dir, step_title, use_defaults, input_cfg_json_data)\n self.set_name = \"alignment\"\n if input_cfg_json_data:\n self._read_custom_pars()\n else:\n self._combine_conditions()",
"def init_vars():\n\tda_vinci.base.usepackage(\"pgfkeys\")\n\tda_vinci.base.add_preamble(setup_script)",
"def init_trainers(self, args):\n self.actors_cur = [None for _ in range(self.num_agents)]\n self.critics_cur = [None for _ in range(self.num_agents)]\n self.actors_tar = [None for _ in range(self.num_agents)]\n self.critics_tar = [None for _ in range(self.num_agents)]\n self.optimizers_c = [None for _ in range(self.num_agents)]\n self.optimizers_a = [None for _ in range(self.num_agents)]\n input_size_global = sum(self.obs_shape_n) + sum(self.action_shape_n)\n\n if args.restore == True: # restore the model\n game_step = int(args.old_model_name.split('_')[-1][:-1])\n for idx in range(self.num_agents):\n self.actors_cur[idx] = torch.load(args.old_model_name+'a_c_{}.pt'.format(idx))\n self.actors_tar[idx] = torch.load(args.old_model_name+'a_t_{}.pt'.format(idx))\n self.critics_cur[idx] = torch.load(args.old_model_name+'c_c_{}.pt'.format(idx))\n self.critics_tar[idx] = torch.load(args.old_model_name+'c_t_{}.pt'.format(idx))\n self.optimizers_a[idx] = optim.Adam(self.actors_cur[idx].parameters(), args.lr_a)\n self.optimizers_c[idx] = optim.Adam(self.critics_cur[idx].parameters(), args.lr_c)\n self.var = self.var - (game_step-args.learning_start_episode*args.per_episode_max_len)*args.var_discount\n self.var = self.min_var if self.var < self.min_var else self.var\n old_data = {'game_step':game_step, 'episode_gone_old':int(game_step/args.per_episode_max_len)}\n\n # Note: if you need load old model, there should be a procedure for juding if the trainers[idx] is None\n for i in range(self.num_agents):\n self.actors_cur[i] = actor_agent(self.obs_shape_n[i], self.action_shape_n[i], \\\n args).to(args.device)\n self.critics_cur[i] = critic_agent(sum(self.obs_shape_n), sum(self.action_shape_n), \\\n args).to(args.device)\n self.actors_tar[i] = actor_agent(self.obs_shape_n[i], self.action_shape_n[i], \\\n args).to(args.device)\n self.critics_tar[i] = critic_agent(sum(self.obs_shape_n), sum(self.action_shape_n), \\\n args).to(args.device)\n self.optimizers_a[i] = optim.Adam(self.actors_cur[i].parameters(), args.lr_a)\n self.optimizers_c[i] = optim.Adam(self.critics_cur[i].parameters(), args.lr_c)\n\n # return the old data, no need to update the trainers\n if args.restore == True: return old_data\n\n self.actors_tar = self.update_trainers(self.actors_cur, self.actors_tar, 1.0) # update the target par using the cur\n self.critics_tar = self.update_trainers(self.critics_cur, self.critics_tar, 1.0) # update the target par using the cur",
"def _augment_pipeline_cfg(self):",
"def addAttributes(self):\n\n # Anim -------------------------------------------\n self.blend_att = self.addAnimParam(\"blend\",\n \"Fk/Ik Blend\",\n \"double\",\n self.settings[\"blend\"],\n 0,\n 1)\n self.roll_att = self.addAnimParam(\"roll\",\n \"Roll\",\n \"double\",\n 0,\n -180,\n 180)\n self.armpit_roll_att = self.addAnimParam(\"aproll\",\n \"Armpit Roll\",\n \"double\",\n 0,\n -360,\n 360)\n\n self.scale_att = self.addAnimParam(\"ikscale\",\n \"Scale\",\n \"double\",\n 1,\n .001,\n 99)\n self.maxstretch_att = self.addAnimParam(\"maxstretch\",\n \"Max Stretch\",\n \"double\",\n self.settings[\"maxstretch\"],\n 1,\n 99)\n self.slide_att = self.addAnimParam(\"slide\",\n \"Slide\",\n \"double\",\n .5,\n 0,\n 1)\n self.softness_att = self.addAnimParam(\"softness\",\n \"Softness\",\n \"double\",\n 0,\n 0,\n 1)\n self.reverse_att = self.addAnimParam(\"reverse\",\n \"Reverse\",\n \"double\",\n 0,\n 0,\n 1)\n self.roundness_att = self.addAnimParam(\"roundness\",\n \"Roundness\",\n \"double\",\n 0,\n 0,\n self.size)\n self.volume_att = self.addAnimParam(\"volume\",\n \"Volume\",\n \"double\",\n 1,\n 0,\n 1)\n\n if self.settings[\"extraTweak\"]:\n self.tweakVis_att = self.addAnimParam(\n \"Tweak_vis\", \"Tweak Vis\", \"bool\", False)\n\n # Ref\n if self.settings[\"ikrefarray\"]:\n ref_names = self.get_valid_alias_list(\n self.settings[\"ikrefarray\"].split(\",\"))\n\n if len(ref_names) > 1:\n self.ikref_att = self.addAnimEnumParam(\n \"ikref\",\n \"Ik Ref\",\n 0,\n ref_names)\n\n if self.settings[\"ikTR\"]:\n ref_names = [\"Auto\", \"ik_ctl\"]\n if self.settings[\"ikrefarray\"]:\n ref_names = ref_names + self.get_valid_alias_list(\n self.settings[\"ikrefarray\"].split(\",\"))\n\n self.ikRotRef_att = self.addAnimEnumParam(\"ikRotRef\",\n \"Ik Rot Ref\",\n 0,\n ref_names)\n\n if self.settings[\"upvrefarray\"]:\n ref_names = self.get_valid_alias_list(\n self.settings[\"upvrefarray\"].split(\",\"))\n ref_names = [\"Auto\"] + ref_names\n if len(ref_names) > 1:\n self.upvref_att = self.addAnimEnumParam(\"upvref\",\n \"UpV Ref\",\n 0, ref_names)\n\n if self.settings[\"pinrefarray\"]:\n ref_names = self.get_valid_alias_list(\n self.settings[\"pinrefarray\"].split(\",\"))\n ref_names = [\"Auto\"] + ref_names\n if len(ref_names) > 1:\n self.pin_att = self.addAnimEnumParam(\"elbowref\",\n \"Elbow Ref\",\n 0,\n ref_names)\n\n if self.validProxyChannels:\n attrs_list = [self.blend_att, self.roundness_att]\n if self.settings[\"extraTweak\"]:\n attrs_list += [self.tweakVis_att]\n attribute.addProxyAttribute(\n attrs_list,\n [self.fk0_ctl,\n self.fk1_ctl,\n self.fk2_ctl,\n self.ik_ctl,\n self.upv_ctl,\n self.mid_ctl])\n attribute.addProxyAttribute(self.roll_att,\n [self.ik_ctl, self.upv_ctl])\n\n # Setup ------------------------------------------\n # Eval Fcurve\n if self.guide.paramDefs[\"st_profile\"].value:\n self.st_value = self.guide.paramDefs[\"st_profile\"].value\n self.sq_value = self.guide.paramDefs[\"sq_profile\"].value\n else:\n self.st_value = fcurve.getFCurveValues(self.settings[\"st_profile\"],\n self.divisions)\n self.sq_value = fcurve.getFCurveValues(self.settings[\"sq_profile\"],\n self.divisions)\n\n self.st_att = [self.addSetupParam(\"stretch_%s\" % i,\n \"Stretch %s\" % i,\n \"double\", self.st_value[i],\n -1,\n 0)\n for i in range(self.divisions)]\n\n self.sq_att = [self.addSetupParam(\"squash_%s\" % i,\n \"Squash %s\" % i,\n \"double\",\n self.sq_value[i],\n 0,\n 1)\n for i in range(self.divisions)]\n\n self.resample_att = self.addSetupParam(\"resample\",\n \"Resample\",\n \"bool\",\n True)\n self.absolute_att = self.addSetupParam(\"absolute\",\n \"Absolute\",\n \"bool\",\n False)",
"def __init__(self, *args: Module, data_keys: List[str]) -> None:\n super().__init__()\n self.data_keys = data_keys\n\n keys = []\n for key in data_keys:\n if key == \"image\":\n keys.append(\"input\")\n elif key == \"boxes\":\n keys.append(\"bbox\")\n else:\n keys.append(key)\n\n self.augs = K.AugmentationSequential(*args, data_keys=keys)",
"def __init__(self, *args, **kwargs):\n super(MayaScene, self).__init__(*args, **kwargs)",
"def nodeInitializer():\n asset_name_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.asset_name = asset_name_attr.create(\n \"Asset_Name\", \"asset_name\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.asset_name)\n\n dpack_id_attr = OpenMaya.MFnNumericAttribute()\n DvRootNode.dpack_id = dpack_id_attr.create(\n \"Deliverable_Package_ID\", \"dpack_id\",\n OpenMaya.MFnNumericData.kInt)\n DvRootNode.addAttribute(DvRootNode.dpack_id)\n\n project_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.project = project_attr.create(\n \"Project\", \"project\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.project)\n\n project_id_attr = OpenMaya.MFnNumericAttribute()\n DvRootNode.project_id = project_id_attr.create(\n \"Project_ID\", \"project_id\",\n OpenMaya.MFnNumericData.kInt)\n DvRootNode.addAttribute(DvRootNode.project_id)\n\n task_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.task = task_attr.create(\n \"Task\", \"task\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.task)\n\n task_id_attr = OpenMaya.MFnNumericAttribute()\n DvRootNode.task_id = task_id_attr.create(\n \"Task_ID\", \"task_id\",\n OpenMaya.MFnNumericData.kInt)\n DvRootNode.addAttribute(DvRootNode.task_id)\n\n type_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.asset_type = type_attr.create(\n \"Asset_Type\", \"asset_type\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.asset_type)\n\n version_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.version = version_attr.create(\n \"Version\", \"version\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.version)\n\n file_collection_id_attr = OpenMaya.MFnNumericAttribute()\n DvRootNode.file_collection_id = file_collection_id_attr.create(\n \"File_Collection_ID\", \"fc_id\",\n OpenMaya.MFnNumericData.kInt)\n DvRootNode.addAttribute(DvRootNode.file_collection_id)\n\n status_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.status = status_attr.create(\n \"Status\", \"status\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.status)\n\n file_name_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.file_name = file_name_attr.create(\n \"File_Name\", \"file_name\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.file_name)\n\n file_type_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.file_type = file_type_attr.create(\n \"File_Type\", \"file_type\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.file_type)\n\n user_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.user = user_attr.create(\n \"User\", \"user\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.user)\n\n user_id_attr = OpenMaya.MFnNumericAttribute()\n DvRootNode.user_id = user_id_attr.create(\n \"User_ID\", \"user_id\",\n OpenMaya.MFnNumericData.kInt)\n DvRootNode.addAttribute(DvRootNode.user_id)\n\n date_created_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.date_created = date_created_attr.create(\n \"Date_Created\", \"date_created\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(\"\"))\n DvRootNode.addAttribute(DvRootNode.date_created)\n\n node_version_attr = OpenMaya.MFnTypedAttribute()\n DvRootNode.node_version = node_version_attr.create(\n \"Node_Version\", \"node_version\",\n OpenMaya.MFnData.kString,\n OpenMaya.MFnStringData().create(NODE_VERSION))\n DvRootNode.addAttribute(DvRootNode.node_version)",
"def setupOptionsFrame(self):\n\n # CPU / CUDA options\n self.device.addItems([\"cuda\", \"cpu\"])\n self.scriptedEffect.addLabeledOptionsWidget(\"Device:\", self.device)\n\n self.modality.addItems([\"CT\", \"MRI\"])\n self.scriptedEffect.addLabeledOptionsWidget(\"Modality:\", self.modality)\n\n # Add ROI options\n self.roiSelector.nodeTypes = ['vtkMRMLMarkupsROINode']\n self.roiSelector.noneEnabled = True\n self.roiSelector.setMRMLScene(slicer.mrmlScene)\n self.scriptedEffect.addLabeledOptionsWidget(\"ROI: \", self.roiSelector)\n\n # Toggle ROI visibility button\n toggleROIVisibilityButton = qt.QPushButton(\"Toggle ROI Visibility\")\n toggleROIVisibilityButton.objectName = self.__class__.__name__ + 'ToggleROIVisibility'\n toggleROIVisibilityButton.setToolTip(\"Toggle selected ROI visibility\")\n toggleROIVisibilityButton.connect('clicked()', self.toggleROIVisibility)\n self.scriptedEffect.addOptionsWidget(toggleROIVisibilityButton)\n\n # Apply button\n applyButton = qt.QPushButton(\"Apply\")\n applyButton.objectName = self.__class__.__name__ + 'Apply'\n applyButton.setToolTip(\"Extract liver from input volume\")\n applyButton.connect('clicked()', self.onApply)\n self.scriptedEffect.addOptionsWidget(applyButton)",
"def svbSetup(geometryLevel=1,stage=0):\r\n\tprint \"CALL svbSETUP\"\r\n\tdatasetpath = '/work/01336/carson/intelTACC/data/Whipple300/Whipple_Shield.exo.300.000'\r\n\tif os.path.exists(datasetpath):\r\n\t\tif os.access(datasetpath,os.R_OK):\r\n\t\t\tWhipple_Shield_exo_300_010 = ExodusIIReader(FileName=[datasetpath])\r\n\t\telse:\r\n\t\t\tprint \"Read Permission Denied for: %s\\n\" % datasetpath\r\n\t\t\tsys.exit()\r\n\telse:\r\n\t\tprint \"Dataset %s does not exist\\n\" % datasetpath\r\n\t\tsys.exit()\r\n\r\n\tglobal AnimationScene1 \r\n\tglobal timesteps\r\n\tif stage == 0: #pipeline setup\r\n\t\ttimesteps = Whipple_Shield_exo_300_010.TimestepValues\r\n\t\tAnimationScene1.EndTime = timesteps[len(timesteps)-1]\r\n\t\tAnimationScene1.PlayMode = 'Snap To TimeSteps'\r\n\r\n\t\tWhipple_Shield_exo_300_010.FileRange = [0, 299]\r\n\t\tWhipple_Shield_exo_300_010.XMLFileName = 'Invalid result'\r\n\t\tWhipple_Shield_exo_300_010.FilePrefix = '/work/01336/carson/intelTACC/data/Whipple300/Whipple_Shield.exo.300.'\r\n\t\tWhipple_Shield_exo_300_010.ModeShape = 20\r\n\t\tWhipple_Shield_exo_300_010.FilePattern = '%s%03i'\r\n\r\n\r\n#\t\tRenderView1 = GetRenderView()\r\n#\t\tRenderView1.CenterOfRotation = [0.0, 0.0, 0.018435800448060036]\r\n\r\n\t\tWhipple_Shield_exo_300_010.NodeSetArrayStatus = []\r\n\t\tWhipple_Shield_exo_300_010.ElementVariables = ['VOID_FRC', 'VOLFRC1', 'VOLFRC2', 'DENSITY']\r\n\t\tWhipple_Shield_exo_300_010.ElementBlocks = ['Unnamed block ID: 1 Type: HEX']\r\n\r\n#\t\tDataRepresentation1 = Show()\r\n#\t\tDataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483]\r\n#\t\tDataRepresentation1.SelectionPointFieldDataArrayName = 'GlobalNodeId'\r\n#\t\tDataRepresentation1.SelectionCellFieldDataArrayName = 'DENSITY'\r\n#\t\tDataRepresentation1.ScalarOpacityUnitDistance = 0.0002821527718911672\r\n#\t\tDataRepresentation1.ExtractedBlockIndex = 2\r\n#\t\tDataRepresentation1.ScaleFactor = 0.005079999938607216\r\n\r\n#\t\tRenderView1.CameraPosition = [0.0, 0.0, 0.18813575352296963]\r\n#\t\tRenderView1.CameraFocalPoint = [0.0, 0.0, 0.018435800448060036]\r\n#\t\tRenderView1.CameraClippingRange = [0.11770729481791534, 0.23555732428522624]\r\n#\t\tRenderView1.CameraParallelScale = 0.043921579808790676\r\n\r\n\t\tCellDatatoPointData1 = CellDatatoPointData()\r\n\r\n#\t\tDataRepresentation2 = Show()\r\n#\t\tDataRepresentation2.EdgeColor = [0.0, 0.0, 0.5000076295109483]\r\n#\t\tDataRepresentation2.SelectionPointFieldDataArrayName = 'DENSITY'\r\n#\t\tDataRepresentation2.SelectionCellFieldDataArrayName = 'DENSITY'\r\n#\t\tDataRepresentation2.ScalarOpacityUnitDistance = 0.0002821527718911672\r\n#\t\tDataRepresentation2.ExtractedBlockIndex = 2\r\n#\t\tDataRepresentation2.ScaleFactor = 0.005079999938607216\r\n\r\n#\t\tDataRepresentation1.Visibility = 0\r\n\r\n\t\tContour1 = Contour( PointMergeMethod=\"Uniform Binning\" )\r\n\r\n\t\tContour1.PointMergeMethod = \"Uniform Binning\"\r\n\t\tContour1.ContourBy = ['POINTS', 'DENSITY']\r\n\t\tContour1.Isosurfaces = [3943.054656982422]\r\n\r\n\t\tContour1.Isosurfaces = [0.5]\r\n\t\tContour1.ContourBy = ['POINTS', 'VOLFRC2']\r\n\r\n\t\tDataRepresentation3 = Show()\r\n\t\tDataRepresentation3.ScaleFactor = 0.005079999938607216\r\n\t\tDataRepresentation3.EdgeColor = [0.0, 0.0, 0.5000076295109483]\r\n\t\tDataRepresentation3.SelectionPointFieldDataArrayName = 'DENSITY'\r\n\t\tDataRepresentation3.SelectionCellFieldDataArrayName = 'DENSITY'\r\n\r\n\t\tSetActiveSource(CellDatatoPointData1)\r\n\r\n\t\tContour2 = Contour( PointMergeMethod=\"Uniform Binning\" )\r\n\r\n Contour2.PointMergeMethod = \"Uniform Binning\"\r\n Contour2.ContourBy = ['POINTS', 'DENSITY']\r\n Contour2.Isosurfaces = [3943.054656982422]\r\n\r\n Contour2.Isosurfaces = [0.5]\r\n Contour2.ContourBy = ['POINTS', 'VOLFRC1']\r\n\r\n\t\tDataRepresentation4 = Show()\r\n\t\tDataRepresentation4.ScaleFactor = 0.005079999938607216\r\n\t\tDataRepresentation4.EdgeColor = [0.0, 0.0, 0.5000076295109483]\r\n\t\tDataRepresentation4.SelectionPointFieldDataArrayName = 'DENSITY'\r\n\t\tDataRepresentation4.SelectionCellFieldDataArrayName = 'DENSITY'\r\n#\t\tDataRepresentation2.Visibility = 0\r\n\r\n\tAnimationScene1.AnimationTime = timesteps[stage]\r\n\tRenderView1 = GetRenderView()\r\n\tRenderView1.CenterOfRotation = [0.0, 0.0, 0.018435800448060036]\r\n\tRenderView1.CameraViewUp = [-0.041981806193924054, -0.6484761172246292, -0.7600764786111757]\r\n\tRenderView1.CameraPosition = [-0.012931246084195372, 0.05071249414152018, -0.04261877853747655]\r\n\tRenderView1.CameraClippingRange = [0.007083748934846819, 0.1534122165178119]\r\n\tRenderView1.CameraFocalPoint = [0.0006400393297762241, -0.008990028403178798, 0.0075681618661409275]\r\n\tRenderView1.CameraParallelScale = 0.043921579808790676",
"def load_inputs(mod, switch_data, inputs_dir):\n # Include select in each load() function so that it will check out\n # column names, be indifferent to column order, and throw an error\n # message if some columns are not found.\n switch_data.load_aug(\n filename=os.path.join(inputs_dir, 'generator_info.tab'),\n auto_select=True,\n optional_params=[\n 'g_unit_size', 'g_scheduled_outage_rate', 'g_forced_outage_rate',\n 'g_ccs_capture_efficiency', 'g_ccs_energy_load',\n 'g_storage_efficiency', 'g_store_to_release_ratio'],\n index=mod.GENERATION_TECHNOLOGIES,\n param=(\n mod.g_dbid, mod.g_max_age, mod.g_min_build_capacity,\n mod.g_scheduled_outage_rate, mod.g_forced_outage_rate,\n mod.g_is_variable, mod.g_is_baseload,\n mod.g_is_flexible_baseload, mod.g_is_cogen,\n mod.g_competes_for_space, mod.g_variable_o_m,\n mod.g_energy_source, mod.g_full_load_heat_rate,\n mod.g_unit_size, mod.g_ccs_capture_efficiency,\n mod.g_ccs_energy_load, mod.g_storage_efficiency,\n mod.g_store_to_release_ratio))\n # Construct sets of storage and CCS technologies as well as\n # technologies with discrete unit sizes.\n if 'g_unit_size' in switch_data.data():\n switch_data.data()['GEN_TECH_WITH_UNIT_SIZES'] = {\n None: switch_data.data(name='g_unit_size').keys()\n }\n if 'g_ccs_capture_efficiency' in switch_data.data():\n switch_data.data()['GEN_TECH_CCS'] = {\n None: switch_data.data(name='g_ccs_capture_efficiency').keys()\n }\n if 'g_storage_efficiency' in switch_data.data():\n switch_data.data()['GEN_TECH_STORAGE'] = {\n None: switch_data.data(name='g_storage_efficiency').keys()\n }\n switch_data.load_aug(\n optional=True,\n filename=os.path.join(inputs_dir, 'gen_new_build_costs.tab'),\n auto_select=True,\n index=mod.NEW_GENERATION_BUILDYEARS,\n param=[mod.g_overnight_cost, mod.g_fixed_o_m])\n\n # read G_MULTI_FUELS from gen_multiple_fuels.dat if available\n multi_fuels_path = os.path.join(inputs_dir, 'gen_multiple_fuels.dat')\n if os.path.isfile(multi_fuels_path):\n switch_data.load(filename=multi_fuels_path)",
"def __setSupportedNodeAttributes(self):\n \n self.supportedNodeAttributes={}\n \n #for a fileSet Node\n self.supportedNodeAttributes[\"fileSet\"]=[\"fileRange\",\"timeFile\",\"outputPath\",\"frequency\"]\n \n #for a dataPerFile Node\n self.supportedNodeAttributes[\"radialCutZone\"]=[]\n \n #for variable node\n self.supportedNodeAttributes[\"includeBoundaries\"]=[]\n \n #for interpVar\n self.supportedNodeAttributes[\"numRInterp\"]=[]",
"def __init__(self, parentWdg=None):\n super(DefaultActions, self).__init__()\n self._parent = parentWdg\n self._netManager = None\n self._viewManager = None\n self._scene = None\n self._editor = None\n self._editMode = None\n self._project = None\n self._versionManager = None\n self._inputManager = None\n self._hostManager = None\n self._solverDialog = None\n self._deploymentDialog = None\n\n # Build all actions\n self.openNetAction = self._buildAction(self.tr(\"&Import Network\"), None, lambda: self.openNetDialog())\n self.openSolverAction = self._buildAction(self.tr(\"Import Solver\"), None, lambda: self.openSolverDialog())\n self.saveNetAction = self._buildAction(self.tr(\"&Export Network\"), None, lambda: self.saveNetDialog())\n self.saveSolverAction = self._buildAction(\"&Export Solver\", None, lambda: self.saveSolverDialog())\n self.exitAction = self._buildAction(self.tr(\"&Exit\"), \"Ctrl+Q\", self.quitTriggered, None,\n self.tr(\"Close Application\"))\n\n self.undoAction = self._buildAction(self.tr(\"Undo\"), \"Ctrl+Z\", lambda: self._netManager.undo(),\n icon=\"resources/leftArrow.png\")\n self.redoAction = self._buildAction(self.tr(\"Redo\"), \"Shift+Ctrl+Z\", lambda: self._netManager.redo(),\n icon=\"resources/rightArrow.png\")\n\n self.editNetAction = self._buildAction(self.tr(\"Edit &Network as Prototxt\"), \"Ctrl+E\", self.openNetEditor)\n self.editSolverAction = self._buildAction(self.tr(\"Edit &Solver as Prototxt\"), \"Shift+Ctrl+E\",\n self.openSolverEditor)\n self.viewInputManager = self._buildAction(self.tr(\"&Input Manager\"), \"Ctrl+I\", self.openInputManager, icon=\"resources/input_manager.png\")\n self.viewHostManager = self._buildAction(self.tr(\"&Host Manager\"), \"Ctrl+H\", self.openHostManager, icon=\"resources/host_manager.png\")\n self.viewCaffeVersionManager = self._buildAction(self.tr(\"&Caffe Version Manager\"), \"Ctrl+M\", self.changeCaffeVersion, icon=\"resources/CVM.png\")\n\n self.loadDefaultViewAction = self._buildAction(self.tr(\"&Load Default View\"), None, self._loadDefaultView)\n\n self.sortSceneAction = self._buildAction(self.tr(\"Arrange Layers Horizontally\"), None, self._sortScene)\n self.sortSceneActionVertical = self._buildAction(self.tr(\"Arrange Layers Vertically\"), None, self._sortSceneVertical)\n\n self.newProjectAction = self._buildAction(self.tr(\"New Project\"), \"Ctrl+N\", lambda: self.newProjectDialog(),\n icon=\"resources/newDocument.png\")\n self.loadProjectAction = self._buildAction(self.tr(\"Load Project\"), \"Ctrl+O\", lambda: self.openProjectDialog(),\n icon=\"resources/openDocument.png\")\n self.saveProjectAction = self._buildAction(self.tr(\"Save Project\"), \"Ctrl+S\", lambda: self.saveProject(),\n icon=\"resources/saveProject.png\")\n\n self.deployAction = self._buildAction(self.tr(\"Deploy and export\"), None,\n lambda: self.openDeploymentDialog())\n\n # Build menus\n self.dockMenu = QtWidgets.QMenu(\"Docks\", self._parent)\n self.toolBarMenu = QtWidgets.QMenu(\"Toolbars\", self._parent)\n\n # Recently-Menu\n self.recentNetData = RecentlyData()\n self.recentSolverData = RecentlyData()\n self.recentProjectsData = RecentlyData()\n self._loadActionSettings()\n\n self.recentNetData.recentlyChanged.connect(self._saveActionSettings)\n self.recentSolverData.recentlyChanged.connect(self._saveActionSettings)\n self.recentProjectsData.recentlyChanged.connect(self._saveActionSettings)\n\n self.recentNetMenu = None\n self.recentSolverMenu = None\n\n # Actions for the active session\n self.startSessionAction = self._buildAction(\"Start Session\", \"F5\",\n lambda: self._viewManager.getSessionsDock().onStart(),\n icon=QIcon(QPixmap('resources/start.png')))\n self.pauseSessionAction = self._buildAction(\"Pause Session\", \"F6\",\n lambda: self._viewManager.getSessionsDock().onPause(),\n icon=QIcon(QPixmap('resources/pause.png')))\n self.snapshotAction = self._buildAction(\"Create Snapshot\", \"F8\",\n lambda: self._viewManager.getSessionsDock().onSnap(),\n icon=QIcon(QPixmap('resources/snap.png')))\n\n def projectHelper(p):\n if self._versionManager and p:\n self._versionManager.updateProject(p)\n\n self.projectChanged.connect(projectHelper)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create an IK attribute on the given ctrl, connect IK handles to ik switch. Also connect fk ctrls and ik ctrls visibility to switch. This will create an 'IK' attr on the switch ctrl | def create_fk_ik_switch(switch_ctrl, ik_handles, fk_ctrls, ik_ctrls, vis_ctrl=None, switch_attr_name='IK', vis_attr_name='fkIkCtrlVis'):
fk_ctrls = mc.ls(fk_ctrls)
ik_ctrls = mc.ls(ik_ctrls)
ik_handles = mc.ls(ik_handles)
if not vis_ctrl:
vis_ctrl = switch_ctrl
# Create attributes
if not mc.objExists(switch_ctrl+'.'+switch_attr_name):
mc.addAttr(switch_ctrl, ln=switch_attr_name, min=0, max=1, k=1)
if not mc.objExists(vis_ctrl+'.'+vis_attr_name):
mc.addAttr(vis_ctrl, ln=vis_attr_name, at='enum', en='auto:fkOnly:ikOnly:both', k=1)
# Connect ik handles
for handle in ik_handles:
mc.connectAttr(switch_ctrl+'.'+switch_attr_name, handle+'.ikBlend')
# Create swicth for ik ctrl
ik_choice = utils.create_node('choice', n=vis_attr_name+'_ik_choice')
mc.connectAttr(vis_ctrl+'.'+vis_attr_name, ik_choice+'.selector')
mc.connectAttr(switch_ctrl+'.'+switch_attr_name, ik_choice+'.input[0]')
mc.setAttr(ik_choice+'.input[1]', 0)
mc.setAttr(ik_choice+'.input[2]', 1)
mc.setAttr(ik_choice+'.input[3]', 1)
for ctrl in ik_ctrls:
mc.setAttr(ctrl+'.v', l=0)
mc.connectAttr(ik_choice+'.output', ctrl+'.v', f=1)
mc.setAttr(ctrl+'.v', l=1)
# Create swicth for ik ctrl
fk_choice = utils.create_node('choice', n=vis_attr_name+'_fk_choice')
fk_rv = utils.create_node('reverse', n=vis_attr_name+'_fk_choice')
mc.connectAttr(switch_ctrl+'.'+switch_attr_name, fk_rv+'.inputX')
mc.connectAttr(vis_ctrl+'.'+vis_attr_name, fk_choice+'.selector')
mc.connectAttr(fk_rv+'.outputX', fk_choice+'.input[0]')
mc.setAttr(fk_choice+'.input[1]', 1)
mc.setAttr(fk_choice+'.input[2]', 0)
mc.setAttr(fk_choice+'.input[3]', 1)
for ctrl in fk_ctrls:
mc.setAttr(ctrl+'.v', l=0)
mc.connectAttr(fk_choice+'.output', ctrl+'.v', f=1)
mc.setAttr(ctrl+'.v', l=1)
return True | [
"def create_soft_ik(ik_ctrl, ik_joints, ik_handle):\n\n # get name and constant variables\n name = ik_handle+'Soft'\n parent = utils.get_parent(ik_joints[0])\n ik_handle_parent = utils.get_parent(ik_handle)\n\n # get total length of joint chain\n chain_length = 0\n for jnt in ik_joints[1:]:\n chain_length += abs(mc.getAttr(jnt+'.tx'))\n\n mc.addAttr(ik_joints[0], ln='softIkChainLength', k=1, dv=chain_length)\n\n #create dist node, (distance between top ik_joint and ik_handle) = X\n soft_ik_root = utils.snap_locator(ik_joints[0], node_type='transform')\n soft_ik_root = mc.rename(soft_ik_root, name+'_root_'+utils.get_suffix('transform'))\n\n dist = utils.create_distance_reader(soft_ik_root, ik_handle_parent)\n\n #create the dSoft and softIK attributes on the controller\n mc.addAttr(ik_ctrl, ln='softIK', min=0, k=1)\n ctrl_clamp = mc.createNode('clamp')\n mc.connectAttr(ik_ctrl+'.softIK', ctrl_clamp+'.inputR')\n mc.setAttr(ctrl_clamp+'.minR', 0.0001)\n mc.setAttr(ctrl_clamp+'.maxR', 10000000)\n\n #create node network for soft IK\n da_pma = mc.createNode('plusMinusAverage', n=name+'_da_pma')\n x_minus_da_pma = mc.createNode('plusMinusAverage', n=name+'_x_minus_da_pma')\n negate_x_minus_md = mc.createNode('multiplyDivide', n=name+'_negate_x_minus_md')\n divBy_dSoft_md = mc.createNode('multiplyDivide', n=name+'_divBy_dSoft_md')\n pow_e_md = mc.createNode('multiplyDivide', n=name+'_pow_e_md')\n one_minus_pow_e_pma = mc.createNode('plusMinusAverage', n=name+'_one_minus_pow_e_pma')\n times_dSoft_md = mc.createNode('multiplyDivide', n=name+'_times_dSoft_md')\n plus_da_pma = mc.createNode('plusMinusAverage', n=name+'_plus_da_pma')\n da_cond = mc.createNode('condition', n=name+'_da_cond')\n dist_diff_pma = mc.createNode('plusMinusAverage', n=name+'_dist_diff_pma')\n defaultPos_pma = mc.createNode('plusMinusAverage', n=name+'_defaultPos_pma')\n\n #set operations\n mc.setAttr(da_pma+'.operation', 2)\n mc.setAttr(x_minus_da_pma+'.operation', 2)\n mc.setAttr(negate_x_minus_md+'.operation', 1)\n mc.setAttr(divBy_dSoft_md+'.operation', 2)\n mc.setAttr(pow_e_md+'.operation', 3)\n mc.setAttr(one_minus_pow_e_pma+'.operation', 2)\n mc.setAttr(times_dSoft_md+'.operation', 1)\n mc.setAttr(plus_da_pma+'.operation', 1)\n mc.setAttr(da_cond+'.operation', 5)\n mc.setAttr(dist_diff_pma+'.operation', 2)\n mc.setAttr(defaultPos_pma+'.operation', 2)\n\n #make connections\n mc.connectAttr(ik_joints[0]+'.softIkChainLength', da_pma+'.input1D[0]')\n mc.connectAttr(ctrl_clamp+'.outputR', da_pma+'.input1D[1]')\n\n mc.connectAttr(dist+'.localDistance', x_minus_da_pma+'.input1D[0]')\n mc.connectAttr(da_pma+'.output1D', x_minus_da_pma+'.input1D[1]')\n\n mc.connectAttr(x_minus_da_pma+'.output1D', negate_x_minus_md+'.input1X')\n mc.setAttr(negate_x_minus_md+'.input2X', -1)\n\n mc.connectAttr(negate_x_minus_md+'.outputX', divBy_dSoft_md+'.input1X')\n mc.connectAttr(ctrl_clamp+'.outputR', divBy_dSoft_md+'.input2X')\n\n mc.setAttr(pow_e_md+'.input1X', 2.718281828)\n mc.connectAttr(divBy_dSoft_md+'.outputX', pow_e_md+'.input2X')\n\n mc.setAttr(one_minus_pow_e_pma+'.input1D[0]', 1)\n mc.connectAttr(pow_e_md+'.outputX' , one_minus_pow_e_pma+'.input1D[1]')\n\n mc.connectAttr(one_minus_pow_e_pma+'.output1D', times_dSoft_md+'.input1X')\n mc.connectAttr(ctrl_clamp+'.outputR', times_dSoft_md+'.input2X')\n\n mc.connectAttr(times_dSoft_md+'.outputX', plus_da_pma+'.input1D[0]')\n mc.connectAttr(da_pma+'.output1D', plus_da_pma+'.input1D[1]')\n\n mc.connectAttr(da_pma+'.output1D', da_cond+'.firstTerm')\n mc.connectAttr(dist+'.localDistance', da_cond+'.secondTerm')\n mc.connectAttr(dist+'.localDistance', da_cond+'.colorIfFalseR')\n mc.connectAttr(plus_da_pma+'.output1D', da_cond+'.colorIfTrueR')\n\n mc.connectAttr(da_cond+'.outColorR', dist_diff_pma+'.input1D[0]')\n mc.connectAttr(dist+'.localDistance', dist_diff_pma+'.input1D[1]')\n\n mc.setAttr(defaultPos_pma+'.input1D[0]', 0)\n mc.connectAttr(dist_diff_pma+'.output1D', defaultPos_pma+'.input1D[1]')\n\n # Create new ik aim node\n up = [1,0,0]\n aim = [0,1,0]\n\n grp = mc.createNode('transform', n=name+'_soft_aim_'+utils.get_suffix('transform'), p=ik_handle_parent)\n gAim = mc.createNode('transform', n=name+'_soft_'+utils.get_suffix('transform'), p=grp)\n\n mc.aimConstraint(soft_ik_root,\n grp,\n aim=aim,\n u=up,\n wu=up,\n wut='objectRotation',\n wuo=ik_ctrl,\n n=grp+'_ac')\n\n mc.connectAttr(defaultPos_pma+'.output1D', gAim+'.ty')\n mc.pointConstraint(gAim, ik_handle)\n mc.parent(ik_handle, gAim)\n\n # parent stuff\n if parent:\n mc.parent(soft_ik_root, parent)\n\n return gAim",
"def switch_to_ik(robot):\n\n target_ctrl_path = get_target_ctrl_path(robot)\n tool_ctrl_path = get_tool_ctrl_path(robot)\n fk_ctrls_path = format_path(__FK_CTRLS_PATH, robot)\n\n try:\n # Turn FK control visibility off\n pm.setAttr(fk_ctrls_path + '.v', 0)\n\n # Turn IK control visibility on\n pm.setAttr(target_ctrl_path + '.v', 1)\n pm.setAttr(format_path(__TARGET_CTRL_PATH + '|{1}target_CTRLShape',\n robot) + '.visibility', 1)\n\n if pm.objExists(tool_ctrl_path):\n pm.setAttr(tool_ctrl_path + '.v'.format(robot), 1)\n except:\n # These aren't crucial to the switch as they're just visual, and \n # a connection or locking of any of these attributes might throw\n # an error, so let's just skip it\n pass\n \n try:\n # Snap IK Ctrl to FK location\n _snap_ik_target_to_fk(robot)\n except:\n raise MimicError('Error swithching to IK; could not snap IK CTRL to FK')\n\n ## Find closest IK configuration to current FK pose ##\n # Get FK config and all IK solutions\n ik_sols = find_ik_solutions(robot)\n fk_config = find_fk_config(robot)\n\n # Remove all MFG-specific offsets from the FK config\n solver_params = get_solver_params(robot)\n axis_offsets = solver_params.axis_offsets\n rot_directions = solver_params.rot_directions\n fk_config_norm = _normalize_fk_pose(fk_config, axis_offsets, rot_directions)\n\n ## TO-DO: account for FK config rotations above and below 180 degrees\n # Select the closes IK configuration to the given FK config\n ik_config = find_closest_config(fk_config_norm, ik_sols)\n\n # Match IK config to FK pose\n pm.setAttr(target_ctrl_path + '.ikSolution1', ik_config[0])\n pm.setAttr(target_ctrl_path + '.ikSolution2', ik_config[1])\n pm.setAttr(target_ctrl_path + '.ikSolution3', ik_config[2])\n\n # turn ik solve back on\n pm.setAttr(target_ctrl_path + '.ik', 1)",
"def make_fkikSwitch_connection_attrs(partpre=None, side='Lt', source_ctrl=None, tag_name='switch', snapTo=None,\n add_attrs=None):\n\n switch_anim = ''\n if source_ctrl is not None:\n switch_anim = source_ctrl\n\n partpre = partpre\n if partpre == '':\n partpre = 'mypart_'\n\n if source_ctrl is None:\n # filepath = r'C:/Users/Nicob/Documents/maya/scripts/rigBot/rigBot/config/switcher_anim.mb'\n system_base_path = os.path.dirname(utils.__file__)\n base_path = os.path.join(system_base_path, 'config')\n file_path = os.path.join(base_path, 'switcher_anim.mb')\n newnodes = mc.file(filepath, i=1, ignoreVersion=1, rnn=1, mergeNamespacesOnClash=0, rpr=partpre, ra=1,\n options=\"v=0;\", pr=1)\n\n switch_anim = partpre + '_CTL'\n\n # pos switcher grpOffset node if snapTo\n\n if snapTo is not None:\n utils.snap_to_transform(snapTo, switch_anim.replace('CTL', 'grpOffset'))\n mc.setAttr(switch_anim.replace('CTL', 'grpOffset') + '.r', 0, 0, 0)\n\n # get value of tags and sort into ik and fk vis groups\n\n iks = []\n fks = []\n nodes = mc.ls('*.' + tag_name)\n\n for node in nodes:\n if partpre in node and side in node:\n mode = mc.getAttr(node)\n if mode:\n mode = mode.lower()\n if 'ik' in mode:\n iks.append(node.split('.')[0])\n if 'fk' in mode:\n fks.append(node.split('.')[0])\n for ik in iks:\n # ikparpar=utils.get_parent(ik)\n ikpar = utils.get_parent(ik)\n if ikpar is None:\n mc.connectAttr(switch_anim + '.FK_IK', ik + '.visiblity', f=1)\n else:\n mc.connectAttr(switch_anim + '.FK_IK', ikpar + '.visibility', f=1)\n rvn = mc.createNode('reverse', name=switch_anim + '_fkik_vis_rv')\n mc.connectAttr(switch_anim + '.FK_IK', rvn + '.inputX')\n for fk in fks:\n fkpar = utils.get_parent(fk)\n if fkpar:\n mc.connectAttr(rvn + '.outputX', fkpar + '.visibility', f=1)\n if add_attrs is not None:\n for att in add_attrs:\n mc.addAttr(switch_anim, ln=att, min=0, max=1, dv=0, k=1)\n\n nns = []\n\n for nn in reversed(newnodes):\n nnn = ''\n sn = nn.split(\"|\")\n nnn = mc.rename(nn, sn[-1])\n nns.append(nnn)\n\n anim = mc.ls(partpre + '_CTL')\n\n # if mc.objExists (partpre+'_skeleton_grp'):\n # mc.parent (anim, partpre+'_skeleton_grp' )\n return anim",
"def create_ik_setup(controls, joints):\n\n # Create control offset transforms\n exp_tf_ms = []\n for ctl in controls:\n par = cmds.listRelatives(ctl, parent=True)\n buf = create_offset_transform(ctl, BUF)\n exp = create_offset_transform(ctl, EXP)\n off = create_offset_transform(ctl, OFF)\n cmds.parent(ctl, off)\n cmds.parent(off, exp)\n cmds.parent(exp, buf)\n if par:\n cmds.parent(buf, par[0])\n exp_tf_ms.append(buf)\n\n root_control, pole_control, goal_control = controls\n handle, effector = cmds.ikHandle(sj=joints[0], ee=joints[-1], sol='ikRPsolver')\n cmds.setAttr('{}.hiddenInOutliner'.format(handle), True)\n cmds.orientConstraint(goal_control, joints[-1], mo=True)\n cmds.parent(handle, goal_control)\n cmds.hide(handle)\n\n # Connect root control to ik joint offset group\n ik_joints_offset = cmds.listRelatives(joints[0], p=True)[0]\n cmds.parentConstraint(root_control, ik_joints_offset, mo=True)\n cmds.scaleConstraint(root_control, ik_joints_offset, mo=True)\n\n # Connect twisting and pole vector control\n cmds.addAttr(goal_control, ln='twist', at='float', k=True)\n cmds.connectAttr('{}.twist'.format(goal_control), '{}.twist'.format(handle))\n cmds.poleVectorConstraint(pole_control, handle)\n\n # Add PV visibility attribute\n cmds.addAttr(goal_control, shortName='pv', longName='poleVector', at='bool', k=True)\n cmds.connectAttr('{}.pv'.format(goal_control), '{}.v'.format(pole_control))\n cmds.setAttr('{}.pv'.format(goal_control),1)\n\n # Add curve that points elbow to pole control\n crv = cmds.curve(p=[[0, 0, 0], [0, 1, 0]], d=1)\n cmds.connectAttr('{}.visibility'.format(pole_control), '{}.visibility'.format(crv))\n lock_hide_attrs(crv, attrs=['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz'])\n cmds.setAttr('{}.overrideEnabled'.format(crv), True)\n cmds.setAttr('{}.overrideDisplayType'.format(crv), 2)\n decomp_joint = cmds.createNode('decomposeMatrix')\n decomp_control = cmds.createNode('decomposeMatrix')\n cmds.connectAttr('{}.worldMatrix'.format(joints[1]), '{}.inputMatrix'.format(decomp_joint))\n cmds.connectAttr('{}.worldMatrix'.format(pole_control), '{}.inputMatrix'.format(decomp_control))\n cmds.connectAttr('{}.outputTranslate'.format(decomp_joint), '{}.controlPoints[0]'.format(crv))\n cmds.connectAttr('{}.outputTranslate'.format(decomp_control), '{}.controlPoints[1]'.format(crv))\n\n return handle, crv, exp_tf_ms",
"def make_ik_fk(blend_root, switch_jnt):\n # unparents the jnt at the end of the chain so it isnt duped\n\n cmds.select(switch_jnt)\n cmds.pickWalk(direction='up')\n to_parent = cmds.ls(selection=True)\n cmds.parent(switch_jnt, world=True)\n\n # get the obj with the ik/fk switch\n switch_obj_temp = switch_jnt.replace(NamingConventionEnums.JOINT_SUFFIX,\n NamingConventionEnums.CONTROL_CURVE_SUFFIX)\n switch_obj = switch_obj_temp.replace('ball', 'foot')\n\n # duplicate IKs\n ik_children = cmds.duplicate(blend_root, renameChildren=True)\n # duplicate FKs\n fk_children = cmds.duplicate(blend_root, renameChildren=True)\n\n # makes a list of the 3 blend joints\n blend_objs = [blend_root]\n blend_children = cmds.listRelatives(blend_root, allDescendents=True)\n blend_children.reverse()\n blend_objs.extend(blend_children)\n\n # get the pole vector obj\n pv_obj = blend_objs[1].replace(NamingConventionEnums.JOINT_SUFFIX,\n '_PV' + NamingConventionEnums.CONTROL_CURVE_SUFFIX)\n\n # calls the ik_fk_switch in gen utils\n gu.ik_fk_switch(blend_objs, ik_children, fk_children, switch_obj, pv_obj)\n\n cmds.setAttr(switch_obj + \".ikFkSwitch\", 1)\n cmds.setAttr(ik_children[0] + '.visibility', 0)\n\n # rename ik_children\n for joint in ik_children:\n if '1' in joint:\n cmds.rename(joint,\n joint.rsplit(NamingConventionEnums.JOINT_SUFFIX)[0]\n + '_IK' + NamingConventionEnums.JOINT_SUFFIX)\n # rename fk_children\n for joint in fk_children:\n if '2' in joint:\n cmds.rename(joint,\n joint.rsplit(NamingConventionEnums.JOINT_SUFFIX)[0]\n + '_FK' + NamingConventionEnums.JOINT_SUFFIX)\n\n # re-parent the switch obj\n cmds.parent(switch_jnt, to_parent)\n\n #rename the bind joints\n for joint in blend_objs:\n bind_jnt = joint.replace(NamingConventionEnums.JOINT_SUFFIX,\n NamingConventionEnums.BIND_JOINT_SUFFIX)\n if joint.find('ankle') == -1:\n cmds.rename(joint, bind_jnt)",
"def _createControl(plg, attrLabel, connectable='True', enabled='True'):\n\n pass",
"def addOperators(self):\n # 1 bone chain Upv ref ==============================================\n self.ikHandleUpvRef = primitive.addIkHandle(\n self.root,\n self.getName(\"ikHandleArmChainUpvRef\"),\n self.armChainUpvRef,\n \"ikSCsolver\")\n pm.pointConstraint(self.ik_ctl,\n self.ikHandleUpvRef)\n pm.parentConstraint(self.armChainUpvRef[0],\n self.upv_cns,\n mo=True)\n\n # Visibilities -------------------------------------\n # fk\n fkvis_node = node.createReverseNode(self.blend_att)\n\n for shp in self.fk0_ctl.getShapes():\n pm.connectAttr(fkvis_node + \".outputX\", shp.attr(\"visibility\"))\n for shp in self.fk1_ctl.getShapes():\n pm.connectAttr(fkvis_node + \".outputX\", shp.attr(\"visibility\"))\n for shp in self.fk2_ctl.getShapes():\n pm.connectAttr(fkvis_node + \".outputX\", shp.attr(\"visibility\"))\n\n # ik\n for shp in self.upv_ctl.getShapes():\n pm.connectAttr(self.blend_att, shp.attr(\"visibility\"))\n for shp in self.ikcns_ctl.getShapes():\n pm.connectAttr(self.blend_att, shp.attr(\"visibility\"))\n for shp in self.ik_ctl.getShapes():\n pm.connectAttr(self.blend_att, shp.attr(\"visibility\"))\n for shp in self.line_ref.getShapes():\n pm.connectAttr(self.blend_att, shp.attr(\"visibility\"))\n if self.settings[\"ikTR\"]:\n for shp in self.ikRot_ctl.getShapes():\n pm.connectAttr(self.blend_att, shp.attr(\"visibility\"))\n\n # Controls ROT order -----------------------------------\n attribute.setRotOrder(self.fk0_ctl, \"XZY\")\n attribute.setRotOrder(self.fk1_ctl, \"XYZ\")\n attribute.setRotOrder(self.fk2_ctl, \"YZX\")\n attribute.setRotOrder(self.ik_ctl, \"XYZ\")\n\n # IK Solver -----------------------------------------\n out = [self.bone0, self.bone1, self.ctrn_loc, self.eff_loc]\n o_node = applyop.gear_ikfk2bone_op(out,\n self.root,\n self.ik_ref,\n self.upv_ctl,\n self.fk_ctl[0],\n self.fk_ctl[1],\n self.fk_ref,\n self.length0,\n self.length1,\n self.negate)\n\n if self.settings[\"ikTR\"]:\n # connect the control inputs\n outEff_dm = o_node.listConnections(c=True)[-1][1]\n\n inAttr = self.ikRot_npo.attr(\"translate\")\n outEff_dm.attr(\"outputTranslate\") >> inAttr\n\n outEff_dm.attr(\"outputScale\") >> self.ikRot_npo.attr(\"scale\")\n dm_node = node.createDecomposeMatrixNode(o_node.attr(\"outB\"))\n dm_node.attr(\"outputRotate\") >> self.ikRot_npo.attr(\"rotate\")\n\n # rotation\n mulM_node = applyop.gear_mulmatrix_op(\n self.ikRot_ctl.attr(\"worldMatrix\"),\n self.eff_loc.attr(\"parentInverseMatrix\"))\n intM_node = applyop.gear_intmatrix_op(o_node.attr(\"outEff\"),\n mulM_node.attr(\"output\"),\n o_node.attr(\"blend\"))\n dm_node = node.createDecomposeMatrixNode(intM_node.attr(\"output\"))\n dm_node.attr(\"outputRotate\") >> self.eff_loc.attr(\"rotate\")\n transform.matchWorldTransform(self.fk2_ctl, self.ikRot_cns)\n\n # scale: this fix the scalin popping issue\n intM_node = applyop.gear_intmatrix_op(\n self.fk2_ctl.attr(\"worldMatrix\"),\n self.ik_ctl_ref.attr(\"worldMatrix\"),\n o_node.attr(\"blend\"))\n mulM_node = applyop.gear_mulmatrix_op(\n intM_node.attr(\"output\"),\n self.eff_loc.attr(\"parentInverseMatrix\"))\n dm_node = node.createDecomposeMatrixNode(mulM_node.attr(\"output\"))\n dm_node.attr(\"outputScale\") >> self.eff_loc.attr(\"scale\")\n\n pm.connectAttr(self.blend_att, o_node + \".blend\")\n if self.negate:\n mulVal = -1\n else:\n mulVal = 1\n node.createMulNode(self.roll_att, mulVal, o_node + \".roll\")\n pm.connectAttr(self.scale_att, o_node + \".scaleA\")\n pm.connectAttr(self.scale_att, o_node + \".scaleB\")\n pm.connectAttr(self.maxstretch_att, o_node + \".maxstretch\")\n pm.connectAttr(self.slide_att, o_node + \".slide\")\n pm.connectAttr(self.softness_att, o_node + \".softness\")\n pm.connectAttr(self.reverse_att, o_node + \".reverse\")\n\n # Twist references ---------------------------------\n\n pm.pointConstraint(self.mid_ctl_twst_ref,\n self.tws1_npo, maintainOffset=False)\n pm.connectAttr(self.mid_ctl.scaleX, self.tws1_loc.scaleX)\n pm.orientConstraint(self.mid_ctl_twst_ref,\n self.tws1_npo, maintainOffset=False)\n\n o_node = applyop.gear_mulmatrix_op(self.eff_loc.attr(\n \"worldMatrix\"), self.root.attr(\"worldInverseMatrix\"))\n dm_node = pm.createNode(\"decomposeMatrix\")\n pm.connectAttr(o_node + \".output\", dm_node + \".inputMatrix\")\n pm.connectAttr(dm_node + \".outputTranslate\",\n self.tws2_npo.attr(\"translate\"))\n\n dm_node = pm.createNode(\"decomposeMatrix\")\n pm.connectAttr(o_node + \".output\", dm_node + \".inputMatrix\")\n pm.connectAttr(dm_node + \".outputRotate\", self.tws2_npo.attr(\"rotate\"))\n\n o_node = applyop.gear_mulmatrix_op(\n self.eff_loc.attr(\"worldMatrix\"),\n self.tws2_rot.attr(\"parentInverseMatrix\"))\n dm_node = pm.createNode(\"decomposeMatrix\")\n pm.connectAttr(o_node + \".output\", dm_node + \".inputMatrix\")\n attribute.setRotOrder(self.tws2_rot, \"XYZ\")\n pm.connectAttr(dm_node + \".outputRotate\", self.tws2_rot + \".rotate\")\n\n self.tws0_rot.setAttr(\"sx\", .001)\n self.tws2_rot.setAttr(\"sx\", .001)\n\n add_node = node.createAddNode(self.roundness_att, .001)\n pm.connectAttr(add_node + \".output\", self.tws1_rot.attr(\"sx\"))\n\n pm.connectAttr(self.armpit_roll_att, self.tws0_rot + \".rotateX\")\n\n # Roll Shoulder\n applyop.splineIK(self.getName(\"rollRef\"), self.rollRef,\n parent=self.root, cParent=self.bone0)\n\n # Volume -------------------------------------------\n distA_node = node.createDistNode(self.tws0_loc, self.tws1_loc)\n distB_node = node.createDistNode(self.tws1_loc, self.tws2_loc)\n add_node = node.createAddNode(distA_node + \".distance\",\n distB_node + \".distance\")\n div_node = node.createDivNode(add_node + \".output\",\n self.root.attr(\"sx\"))\n\n dm_node = pm.createNode(\"decomposeMatrix\")\n pm.connectAttr(self.root.attr(\"worldMatrix\"), dm_node + \".inputMatrix\")\n\n div_node2 = node.createDivNode(div_node + \".outputX\",\n dm_node + \".outputScaleX\")\n self.volDriver_att = div_node2 + \".outputX\"\n\n if self.settings[\"extraTweak\"]:\n for tweak_ctl in self.tweak_ctl:\n for shp in tweak_ctl.getShapes():\n pm.connectAttr(self.tweakVis_att, shp.attr(\"visibility\"))\n\n # Divisions ----------------------------------------\n # at 0 or 1 the division will follow exactly the rotation of the\n # controler.. and we wont have this nice tangent + roll\n for i, div_cns in enumerate(self.div_cns):\n\n if self.settings[\"supportJoints\"]:\n if i < (self.settings[\"div0\"] + 1):\n perc = i * .5 / (self.settings[\"div0\"] + 1.0)\n elif i < (self.settings[\"div0\"] + 2):\n perc = .49\n elif i < (self.settings[\"div0\"] + 3):\n perc = .50\n elif i < (self.settings[\"div0\"] + 4):\n perc = .51\n\n else:\n perc = .5 + \\\n (i - self.settings[\"div0\"] - 3.0) * .5 / \\\n (self.settings[\"div1\"] + 1.0)\n else:\n if i < (self.settings[\"div0\"] + 1):\n perc = i * .5 / (self.settings[\"div0\"] + 1.0)\n elif i < (self.settings[\"div0\"] + 2):\n perc = .501\n else:\n perc = .5 + \\\n (i - self.settings[\"div0\"] - 1.0) * .5 / \\\n (self.settings[\"div1\"] + 1.0)\n\n perc = max(.001, min(.990, perc))\n\n # Roll\n if self.negate:\n o_node = applyop.gear_rollsplinekine_op(\n div_cns, [self.tws2_rot, self.tws1_rot, self.tws0_rot],\n 1.0 - perc, 40)\n else:\n o_node = applyop.gear_rollsplinekine_op(\n div_cns, [self.tws0_rot, self.tws1_rot, self.tws2_rot],\n perc, 40)\n\n pm.connectAttr(self.resample_att, o_node + \".resample\")\n pm.connectAttr(self.absolute_att, o_node + \".absolute\")\n\n # Squash n Stretch\n o_node = applyop.gear_squashstretch2_op(\n div_cns, None, pm.getAttr(self.volDriver_att), \"x\")\n pm.connectAttr(self.volume_att, o_node + \".blend\")\n pm.connectAttr(self.volDriver_att, o_node + \".driver\")\n pm.connectAttr(self.st_att[i], o_node + \".stretch\")\n pm.connectAttr(self.sq_att[i], o_node + \".squash\")\n # match IK/FK ref\n pm.parentConstraint(self.bone0, self.match_fk0_off, mo=True)\n pm.parentConstraint(self.bone1, self.match_fk1_off, mo=True)\n if self.settings[\"ikTR\"]:\n transform.matchWorldTransform(self.ikRot_ctl, self.match_ikRot)\n transform.matchWorldTransform(self.fk_ctl[2], self.match_fk2)\n\n # ===================================================\n # WING OPERATORS\n # ===================================================\n # Parent finger start cns groups. Doing this earlier results in an offset once IK solver is applied\n self.finger0Start_npo.setParent(self.div_cns[0])\n self.finger1Start_cns.setParent(self.mid_ctl)\n self.finger2Start_npo.setParent(self.eff_loc)\n self.finger3Start_cns.setParent(self.eff_loc)\n self.finger4Start_npo.setParent(self.eff_loc)\n\n # Orientation for finger0Start_cns\n pm.orientConstraint(self.root,\n self.finger0Start_inf, maintainOffset=True)\n pb = pm.createNode('pairBlend')\n self.finger0Start_inf.r.connect(pb.inRotate2)\n pb.weight.set(0.5)\n pb.rotInterpolation.set(1)\n pb.outRotate.connect(self.finger0Start_cns.r)\n\n # Orientation for finger2Start_cns\n pm.orientConstraint(self.bone1,\n self.finger2Start_inf, maintainOffset=True)\n pb = pm.createNode('pairBlend')\n self.finger2Start_inf.r.connect(pb.inRotate2)\n pb.weight.set(0.5)\n pb.rotInterpolation.set(1)\n pb.outRotate.connect(self.finger2Start_cns.r)\n\n # Orientation for finger2Start_cns\n pConst = pm.pointConstraint(self.finger2Start_ctl, self.finger4Start_ctl, self.finger3Start_cns, mo=True)\n oConst = pm.orientConstraint(self.finger2Start_ctl, self.finger4Start_ctl, self.finger3Start_cns, mo=True)\n oConst.interpType.set(2)\n\n def _wingMidConstrainPos(endCns, endCtl, targ):\n '''\n Sets the mid finger ctl to be constrained between start and end (plus initial offset)\n '''\n endPos = pm.createNode('plusMinusAverage')\n endPos.input3D[0].set(endCns.t.get())\n endCtl.t.connect(endPos.input3D[1])\n avgPos = node.createMulNode(endPos.output3D, [.5, .5, .5])\n offsetPos = pm.createNode('plusMinusAverage')\n offsetPos.input3D[0].set(targ.t.get() - avgPos.output.get())\n avgPos.output.connect(offsetPos.input3D[1])\n offsetPos.output3D.connect(targ.t)\n\n _wingMidConstrainPos(self.finger0End_cns, self.finger0End_ctl, self.finger0Mid_cns)\n _wingMidConstrainPos(self.finger1End_cns, self.finger1End_ctl, self.finger1Mid_cns)\n _wingMidConstrainPos(self.finger2End_cns, self.finger2End_ctl, self.finger2Mid_cns)\n _wingMidConstrainPos(self.finger2BEnd_cns, self.finger2BEnd_ctl, self.finger2BMid_cns)\n _wingMidConstrainPos(self.finger3End_cns, self.finger3End_ctl, self.finger3Mid_cns)\n _wingMidConstrainPos(self.finger4End_cns, self.finger4End_ctl, self.finger4Mid_cns)\n\n # Connect wing curves to controls\n for ctl, index in zip([self.finger0Start_ctl, self.finger1Start_ctl, self.finger2Start_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingLeadCrv0.controlPoints[index])\n\n for ctl, index in zip([self.finger0Mid_ctl, self.finger1Mid_ctl, self.finger2Mid_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingMidCrv0.controlPoints[index])\n\n for ctl, index in zip([self.finger0End_ctl, self.finger1End_ctl, self.finger2End_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingTrailCrv0.controlPoints[index])\n\n for ctl, index in zip([self.finger2Start_ctl, self.finger3Start_ctl, self.finger4Start_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingLeadCrv1.controlPoints[index])\n\n for ctl, index in zip([self.finger2BMid_ctl, self.finger3Mid_ctl, self.finger4Mid_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingMidCrv1.controlPoints[index])\n\n for ctl, index in zip([self.finger2BEnd_ctl, self.finger3End_ctl, self.finger4End_ctl], range(3)):\n d = node.createDecomposeMatrixNode(ctl.worldMatrix[0])\n d.outputTranslate.connect(self.wingTrailCrv1.controlPoints[index])\n\n # Sample lofted surface and constrain feathers\n def _createFeatherConstraints(crv, joints, surface, name, vParam, startUParam, endUParam, upMps, numJoints):\n # Make point on surface info nodes\n numPoints = pm.getAttr(crv.spans) + crv.degree() - 1\n infList = []\n for i in range(numPoints):\n num = str(i).zfill(2)\n param = (endUParam - startUParam) / (numPoints - 1) * i + startUParam\n inf = pm.createNode('pointOnSurfaceInfo', name='%s_%s_surfaceInfo' % (name, num))\n surface.worldSpace[0].connect(inf.inputSurface)\n inf.parameterV.set(vParam)\n inf.parameterU.set(param)\n infList.append(inf)\n inf.result.position.connect(crv.controlPoints[i])\n\n jointDist = vector.getDistance(infList[0].result.position.get(), infList[-1].result.position.get())\n\n # Buffer zone for crv\n bufferVec = pm.createNode('plusMinusAverage', name='%s_bufferVec_utl' % name)\n infList[-1].result.position.connect(bufferVec.input3D[0])\n infList[-2].result.position.connect(bufferVec.input3D[1])\n bufferVec.operation.set(2)\n bufferVecNorm = pm.createNode('vectorProduct', name='%s_bufferVecNormal_utl' % name)\n bufferVec.output3D.connect(bufferVecNorm.input1)\n bufferVecNorm.operation.set(0)\n bufferVecNorm.normalizeOutput.set(1)\n bufferVecScaled = pm.createNode('multiplyDivide', name='%s_bufferVecScaled_utl' % name)\n bufferVecNorm.output.connect(bufferVecScaled.input1)\n bufferVecScaled.input2.set((jointDist, jointDist, jointDist))\n bufferPos = pm.createNode('plusMinusAverage', name='%s_bufferPos_utl' % name)\n infList[-1].result.position.connect(bufferPos.input3D[0])\n bufferVecScaled.output.connect(bufferPos.input3D[1])\n bufferPos.output3D.connect(crv.controlPoints[numPoints+1])\n\n crvInf = pm.createNode('curveInfo', name='%s_crvInfo_utl' % name)\n crv.worldSpace[0].connect(crvInf.inputCurve)\n\n defaultRange = jointDist / crvInf.arcLength.get()\n stretch = pm.createNode('multiplyDivide', name='%s_stretch_utl' % name)\n crvInf.arcLength.connect(stretch.input2X)\n stretch.input1X.set(crvInf.arcLength.get())\n stretch.operation.set(2)\n\n mps = []\n\n for i in range(numJoints):\n num = str(i).zfill(2)\n param = (defaultRange / (numJoints-1))*i\n paramNode = pm.createNode('multDoubleLinear', name='%s_%s_paramMult_utl' % (name, num))\n paramNode.input1.set(param)\n stretch.outputX.connect(paramNode.input2)\n mp = pm.createNode('motionPath', name='%s_%s_mp_utl' % (name, num))\n crv.worldSpace[0].connect(mp.geometryPath)\n mp.fractionMode.set(1)\n paramNode.output.connect(mp.uValue)\n if upMps:\n mp.frontAxis.set(0)\n mp.upAxis.set(1)\n mp.follow.set(1)\n upVec = pm.createNode('plusMinusAverage', name='%s_%s_upVec_utl' % (name, num))\n upVec.operation.set(2)\n upMps[i].allCoordinates.connect(upVec.input3D[0])\n mp.allCoordinates.connect(upVec.input3D[1])\n upVec.output3D.connect(mp.worldUpVector)\n\n mps.append(mp)\n\n for i in range(len(joints)):\n j = joints[i]\n j.setParent(None)\n j.setParent(self.noXForm)\n j.inheritsTransform.set(0)\n j.jo.set((0, 0, 0))\n mps[i].allCoordinates.connect(j.t)\n mps[i].rotate.connect(j.r)\n\n return mps\n\n covMps = []\n margMps = []\n\n upMps = _createFeatherConstraints(self.wingDict['primaries']['curves'][0],\n [],\n self.wingSurface1,\n self.getName('primary_guide'),\n 0.0,\n 0.1,\n 1.9,\n [],\n self.settings['numJointsPrimary'])\n if self.settings['enablePCoverts']:\n covMps = _createFeatherConstraints(self.wingDict['primaryCoverts']['curves'][0],\n [],\n self.wingSurface1,\n self.getName('primaryCoverts_guide'),\n 0.0,\n 0.1,\n 1.9*self.settings['lengthPCoverts'],\n [],\n self.settings['numJointsPCoverts'])\n\n for i in range(self.settings['numPrimaries']):\n num = str(i).zfill(2)\n spacing = (.99 / self.settings['numPrimaries'])\n joints = self.wingDict['primaries']['joints'][i*self.settings['numJointsPrimary']:(i+1)*self.settings['numJointsPrimary']]\n upMps = _createFeatherConstraints(self.wingDict['primaries']['curves'][i+1],\n joints,\n self.wingSurface1,\n self.getName('primary_%s' % num),\n spacing * i + .01,\n 0.1,\n 1.9,\n upMps,\n self.settings['numJointsPrimary'])\n if self.settings['enablePCoverts']:\n joints = self.wingDict['primaryCoverts']['joints'][i*self.settings['numJointsPCoverts']:(i+1)*self.settings['numJointsPCoverts']]\n covMps = _createFeatherConstraints(self.wingDict['primaryCoverts']['curves'][i+1],\n joints,\n self.wingSurface1,\n self.getName('primaryCoverts_%s' % num),\n (spacing * i) + (spacing*0.5) + .01,\n 0.1,\n 1.9*self.settings['lengthPCoverts'],\n covMps,\n self.settings['numJointsPCoverts'])\n\n upMps = _createFeatherConstraints(self.wingDict['secondaries']['curves'][0],\n [],\n self.wingSurface1,\n self.getName('secondary_guide'),\n 0.0,\n 0.1,\n 1.9,\n [],\n self.settings['numJointsSecondary'])\n if self.settings['enableSCoverts']:\n covMps = _createFeatherConstraints(self.wingDict['secondaryCoverts']['curves'][0],\n [],\n self.wingSurface1,\n self.getName('secondaryCoverts_guide'),\n 0.0,\n 0.1,\n 1.9,\n [],\n self.settings['numJointsSCoverts'])\n\n if self.settings['enableSMarginals']:\n covMps = _createFeatherConstraints(self.wingDict['secondaryMarginals']['curves'][0],\n [],\n self.wingSurface1,\n self.getName('secondaryMarginals_guide'),\n 0.0,\n 0.1,\n 1.9,\n [],\n self.settings['numJointsSMarginals'])\n\n for i in range(self.settings['numSecondaries']):\n num = str(i).zfill(2)\n spacing = (.99 / self.settings['numSecondaries'])\n joints = self.wingDict['secondaries']['joints'][i*self.settings['numJointsSecondary']:(i+1)*self.settings['numJointsSecondary']]\n upMps = _createFeatherConstraints(self.wingDict['secondaries']['curves'][i+1],\n joints,\n self.wingSurface0,\n self.getName('secondary_%s' % num),\n spacing * i + .01,\n 0.1,\n 1.9,\n upMps,\n self.settings['numJointsSecondary'])\n\n if self.settings['enableSCoverts']:\n joints = self.wingDict['secondaryCoverts']['joints'][i*self.settings['numJointsSCoverts']:(i+1)*self.settings['numJointsSCoverts']]\n covMps = _createFeatherConstraints(self.wingDict['secondaryCoverts']['curves'][i+1],\n joints,\n self.wingSurface0,\n self.getName('secondaryCoverts_%s' % num),\n (spacing * i) + (spacing*0.5) + .01,\n 0.1,\n 1.9*self.settings['lengthSCoverts'],\n covMps,\n self.settings['numJointsSCoverts'])\n \n if self.settings['enableSMarginals']:\n joints = self.wingDict['secondaryMarginals']['joints'][i*self.settings['numJointsSMarginals']:(i+1)*self.settings['numJointsSMarginals']]\n margMps = _createFeatherConstraints(self.wingDict['secondaryMarginals']['curves'][i+1],\n joints,\n self.wingSurface0,\n self.getName('secondaryMarginals_%s' % num),\n spacing * i + .01,\n 0.1,\n 1.9*self.settings['lengthSMarginals'],\n margMps,\n self.settings['numJointsSMarginals'])\n\n return",
"def SetControlSignals(inst_spec, itype, ctrl):\n\n itype <<= inst_spec.itype\n\n #\n # The Literal() function (see instructions.py) generates an Atlas 'literal'\n # value that can be used on the right-hand side of an assignment (as is done\n # below).\n #\n\n ctrl.ex <<= inst_spec.ex_ctrl.Literal()\n ctrl.mem <<= inst_spec.mem_ctrl.Literal()\n ctrl.wb <<= inst_spec.wb_ctrl.Literal()",
"def ikfkMechanics(module, extraName, jnts, mechSkelGrp, ctrlGrp, moduleType, rig):\n jntSuffix = suffix['joint']\n newJntChains = []\n ## create duplicate chains\n for chain in ['IK', 'FK']:\n newJnts = utils.duplicateJntChain(chain, jnts, parent=mechSkelGrp.name)\n newJntChains.append(newJnts)\n ikJnts = newJntChains[0]\n fkJnts = newJntChains[1]\n for i, each in enumerate(jnts):\n newName = '{}_result{}'.format(each.rsplit('_', 1)[0], jntSuffix)\n jnts[i] = cmds.rename(each, newName)\n # utils.addJntToSkinJnt(jnts[i], rig=rig)\n ## settings control\n module.settingCtrl = ctrlFn.ctrl(name='{}{}Settings'.format(extraName, moduleType),\n guide='{}{}Settings{}'.format(module.moduleName,\n moduleType, suffix['locator']),\n deleteGuide=True, side=module.side, skipNum=True,\n parent=module.rig.settingCtrlsGrp.name,\n scaleOffset=rig.scaleOffset, rig=rig)\n if moduleType == 'arm':\n settingJnt = jnts[3]\n else:\n settingJnt = jnts[2]\n module.settingCtrl.makeSettingCtrl(ikfk=True, parent=settingJnt)\n ## parent constraints\n for jnt, ikJnt, fkJnt in zip(jnts, ikJnts, fkJnts):\n parConstr = cmds.parentConstraint(ikJnt, fkJnt, jnt)\n cmds.connectAttr(module.settingCtrl.ctrl.ikfkSwitch, '{}.{}W1'.format(parConstr[0], fkJnt))\n swRev = utils.newNode('reverse', name='{}{}IKFKSw'.format(extraName, moduleType),\n side=module.side)\n swRev.connect('inputX', module.settingCtrl.ctrl.ikfkSwitch, mode='to')\n swRev.connect('outputX', '{}.{}W0'.format(parConstr[0], ikJnt), mode='from')\n ## control vis groups\n ikCtrlGrp = utils.newNode('group', name='{}{}IKCtrls'.format(extraName, moduleType),\n side=module.side, parent=ctrlGrp.name, skipNum=True)\n fkCtrlGrp = utils.newNode('group', name='{}{}FKCtrls'.format(extraName, moduleType),\n side=module.side, parent=ctrlGrp.name, skipNum=True)\n cmds.setDrivenKeyframe(ikCtrlGrp.name, at='visibility',\n cd=module.settingCtrl.ctrl.ikfkSwitch, dv=0.999, v=1)\n cmds.setDrivenKeyframe(ikCtrlGrp.name, at='visibility',\n cd=module.settingCtrl.ctrl.ikfkSwitch, dv=1, v=0)\n cmds.setDrivenKeyframe(fkCtrlGrp.name, at='visibility',\n cd=module.settingCtrl.ctrl.ikfkSwitch, dv=0.001, v=1)\n cmds.setDrivenKeyframe(fkCtrlGrp.name, at='visibility',\n cd=module.settingCtrl.ctrl.ikfkSwitch, dv=0, v=0)\n return ikJnts, fkJnts, jnts, ikCtrlGrp, fkCtrlGrp",
"def __generateIK(self):\n\n self.__ik = self.__KeyPair.generate()",
"def ik_to_fk(node):\n ik_main_off = get_parent(node.ik_main_conn)\n fk_01_off = get_parent(node.fk_01_conn)\n fk_02_off = get_parent(node.fk_02_conn)\n fk_03_off = get_parent(node.fk_03_conn)\n\n ik_main_world_trans = get_world_trans(node.ik_main_conn)\n fk_01_world_trans = get_world_trans(node.fk_01_conn)\n ik_main_off_world_trans = get_world_trans(ik_main_off)\n fk_01_off_world_trans = get_world_trans(fk_01_off)\n fk_02_off_world_trans = get_world_trans(fk_02_off)\n fk_03_off_world_trans = get_world_trans(fk_03_off)\n\n # calculate base information\n def_len = (ik_main_off_world_trans - fk_01_off_world_trans).length()\n\n # Calculate ik direction\n ik_dir_01 = ik_main_off_world_trans - fk_01_off_world_trans\n ik_dir_02 = ik_main_world_trans - fk_01_world_trans\n\n ik_dir_rot = ik_dir_01.rotateTo(ik_dir_02).asEulerRotation()\n\n # Apply ik direction -> important to calculate correct pole rotations\n fk_01_rot_plugs = get_rot_plugs(node.fk_01_conn)\n for i, plug in enumerate(fk_01_rot_plugs):\n plug.setMAngle(oMa.MAngle(ik_dir_rot[i], oMa.MAngle.kRadians))\n\n # Calculate ik pole rotations\n ik_pole_world_mat = get_world_matrix(node.ik_pole_conn, 0)\n fk_03_world_inv_mat = get_world_inv_matrix(node.fk_01_conn, 0)\n\n ik_pole_rot_mat = ik_pole_world_mat * fk_03_world_inv_mat\n\n ik_pole_vec = oMa.MTransformationMatrix(ik_pole_rot_mat).translation(oMa.MSpace.kWorld)\n ik_pole_vec.y = 0\n\n ik_pole_rot = oMa.MVector.kZaxisVector.rotateTo(ik_pole_vec).asEulerRotation()\n\n # Calculate ik rotations\n tri_a_len = (fk_02_off_world_trans - fk_01_off_world_trans).length()\n tri_b_len = (fk_03_off_world_trans - fk_02_off_world_trans).length()\n tri_c_len = (ik_main_world_trans - fk_01_world_trans).length()\n\n if tri_c_len >= def_len:\n fk_02_angle = 0\n fk_01_angle = 0\n else:\n fk_02_angle = math.pi - solve_triangle(tri_a_len, tri_b_len, tri_c_len, \"C\")\n fk_01_angle = -solve_triangle(tri_a_len, tri_b_len, tri_c_len, \"B\")\n\n # Add rotations together\n fk_01_temp = oMa.MEulerRotation(fk_01_angle, ik_pole_rot.y, 0)\n\n ik_dir_mat = compose_mat(ik_dir_rot)\n fk_01_mat = compose_mat(fk_01_temp)\n rot_mat = fk_01_mat * ik_dir_mat\n\n # Apply everything\n fk_01_rot = get_rot_from_mat(rot_mat)\n fk_02_rot = (fk_02_angle, 0, 0)\n\n fk_01_rot_plugs = get_rot_plugs(node.fk_01_conn)\n for i, plug in enumerate(fk_01_rot_plugs):\n plug.setMAngle(oMa.MAngle(fk_01_rot[i], oMa.MAngle.kRadians))\n\n fk_02_rot_plugs = get_rot_plugs(node.fk_02_conn)\n for i, plug in enumerate(fk_02_rot_plugs):\n if not plug.isLocked:\n plug.setMAngle(oMa.MAngle(fk_02_rot[i], oMa.MAngle.kRadians))\n\n # Calculate ankle rotation\n fk_03_rot = rot_world_space_to_local_space(node.ik_main_conn, get_parent(node.fk_03_conn))\n\n fk_03_rot_plugs = get_rot_plugs(node.fk_03_conn)\n for i, plug in enumerate(fk_03_rot_plugs):\n plug.setMAngle(oMa.MAngle(fk_03_rot[i], oMa.MAngle.kRadians))",
"def ik_system(self):\n # initial setup for roll locators hierarchy\n rolls_locs_data_tmp = self.roll_locs_hierarchy()\n self.ik_system_objs.append(rolls_locs_data_tmp[0][-1])\n\n driver_ctrls_offset_grp = [] \n\n self.ik_chain = joints_utils.related_clean_joint_chain(self.main_chain, self.side, \"ik\", True)\n\n self.ik_system_objs.append(self.ik_chain[0])\n\n # creating ctrl for shoulder/hip and his grps #### circleFourArrows ####\n self.hip_loc = cmds.spaceLocator(name=\"{}_{}_hipPosition_LOC\".format(self.side, self.name))\n hip_loc_off_grp = cmds.group(empty=True, name=\"{}_{}_hipPosition_offset_GRP\".format(self.side, self.name))\n transforms_utils.align_objs(self.main_chain[0], self.hip_loc)\n transforms_utils.align_objs(self.root_jnt, hip_loc_off_grp)\n cmds.parent(self.hip_loc[0], hip_loc_off_grp)\n\n cmds.parentConstraint(self.root_jnt, hip_loc_off_grp, maintainOffset=True)\n for axis in [\"X\", \"Y\", \"Z\"]:\n cmds.connectAttr(\"{}.scale{}\".format(self.root_jnt, axis), \"{}.scale{}\".format(hip_loc_off_grp, axis), force=True)\n\n self.ik_system_objs.append(hip_loc_off_grp)\n\n\n self.main_ctrl = controller.Control(\"{}_main_ik\".format(self.main_chain[2][:len(self.main_chain[2])-4]), 5.0, 'cube', self.main_chain[2], '', '', ['s', 'v'], '', True, True, False)\n self.main_ctrl.make_dynamic_pivot(\"{}_main_ik\".format(self.main_chain[2][:len(self.main_chain[2])-4]), 2.5, self.main_ctrl.get_control(), self.main_ctrl.get_control())\n driver_ctrls_offset_grp.append(self.main_ctrl.get_offset_grp())\n\n transforms_utils.align_objs(self.main_ctrl.get_control(), rolls_locs_data_tmp[0][0][0], True, False)\n\n # the --- rolls_locs_data_tmp[0][2] --- is the heel offset group\n cmds.parentConstraint(self.main_ctrl.get_control(), rolls_locs_data_tmp[0][3][0], maintainOffset=True)\n\n # set-up a foot roll system\n self.foot_roll_system(rolls_locs_data_tmp[0][3][0])\n cmds.parentConstraint(self.ik_chain[2], self.foot_chain[0], maintainOffset=True)\n for axis in [\"X\", \"Y\", \"Z\"]:\n cmds.connectAttr(\"{}.scale{}\".format(self.ik_chain[2], axis), \"{}.scale{}\".format(self.foot_chain[0], axis), force=True)\n\n ik_rotate_plane_handle = cmds.ikHandle(name=\"{}_{}_rotatePlane_IKH\".format(self.side, self.name), solver=\"ikRPsolver\", startJoint=self.ik_chain[0], endEffector=self.ik_chain[2])\n\n self.ik_system_objs.append(ik_rotate_plane_handle[0])\n\n cmds.parentConstraint(self.hip_loc[0], self.ik_chain[0], maintainOffset=True)\n\n cmds.parentConstraint(self.ankle_loc, ik_rotate_plane_handle[0], maintainOffset=True)\n\n cmds.orientConstraint(self.ankle_loc, self.ik_chain[2], maintainOffset=True)\n\n # building pole vector system\n ik_poleVector = self.create_locator_poleVector_system(\"{}_{}_ikPVSystem\".format(self.side, self.name), self.ik_chain[0], self.ik_chain[1], self.ik_chain[2])\n \n cmds.poleVectorConstraint(ik_poleVector[0], ik_rotate_plane_handle[0])\n\n self.poleVector_ctrl = controller.Control(\"{}_{}_poleVector_ik\".format(self.side, self.name), 5.0, 'sphere', '', '', '', ['r', 's', 'v'], '', True, True, False)\n \n driver_ctrls_offset_grp.append(self.poleVector_ctrl.get_offset_grp())\n\n transforms_utils.align_objs(ik_poleVector[0], self.poleVector_ctrl.get_offset_grp(), True, False)\n\n cmds.parent(ik_poleVector[1], self.poleVector_ctrl.get_control())\n\n # no flip Ik ---> pole vector system\n no_flip_fix_grps = polevectors_utils.no_flip_pole_vector(self.name, self.ik_chain[0], self.main_ctrl.get_control(), self.root_jnt, self.poleVector_ctrl.get_control(), self.poleVector_ctrl.get_offset_grp(), [self.world_space_loc[0]], [\"world\"], self.side)\n scale_fix_no_flip_off_grp = cmds.group(empty=True, name=\"{}_{}_scaleFix_noFlipIK_aiming_offset_GRP\".format(self.side, self.name))\n transforms_utils.align_objs(self.root_jnt, scale_fix_no_flip_off_grp)\n cmds.parentConstraint(self.root_jnt, scale_fix_no_flip_off_grp, maintainOffset=True)\n cmds.parent(no_flip_fix_grps[0], scale_fix_no_flip_off_grp)\n for axis in [\"X\", \"Y\", \"Z\"]:\n cmds.connectAttr(\"{}.scale{}\".format(self.root_jnt, axis), \"{}.scale{}\".format(scale_fix_no_flip_off_grp, axis), force=True)\n cmds.connectAttr(\"{}.scale{}\".format(self.root_jnt, axis), \"{}.scale{}\".format(no_flip_fix_grps[1], axis), force=True)\n\n self.ik_system_objs.extend([scale_fix_no_flip_off_grp, no_flip_fix_grps[1]])\n \n\n # adding the poleVector arrow\n annotation = polevectors_utils.pole_vector_arrow(self.ik_chain[1], self.poleVector_ctrl.get_control(), name=\"{}_{}_poleVector_ANT\".format(self.side, self.name))\n driver_ctrls_offset_grp.append(annotation)\n \n # parent constraint the foot part of the ik_chain to the reverse chain\n cmds.parentConstraint(self.foot_chain[1], self.ik_chain[3], maintainOffset=True)\n cmds.parentConstraint(self.foot_chain[2], self.ik_chain[4], maintainOffset=True)\n\n # clean the scene\n # self.ik_system_objs.append(self.ik_spring_chain[0])\n self.ik_system_objs.append(self.ik_chain[0])\n \n self.ik_system_grp = cmds.group(empty=True, name=\"{}_{}_ikSystem_GRP\".format(self.side, self.name))\n cmds.parent(self.ik_system_objs, self.ik_system_grp)\n cmds.group(empty=True, name=self.ik_ctrls_main_grp)\n transforms_utils.align_objs(self.root_jnt, self.ik_ctrls_main_grp)\n cmds.parent(driver_ctrls_offset_grp, self.ik_ctrls_main_grp)\n \n cmds.parentConstraint(self.base_control, self.ik_ctrls_main_grp, maintainOffset=True)\n\n self.module_main_grp(self.ik_system_grp) \n \n return True",
"def make_ik_chain(self, org_chain, ik_mstr, pole_target, ik_pole_direction=0):\n\t\tik_chain = []\n\t\tfor i, org_bone in enumerate(org_chain):\n\t\t\tik_bone = self.bone_infos.bone(\n\t\t\t\tname\t\t = org_bone.name.replace(\"ORG\", \"IK\")\n\t\t\t\t,source\t\t = org_bone\n\t\t\t\t,bone_group\t = self.bone_groups[\"IK Mechanism\"]\n\t\t\t\t,layers\t\t = self.bone_layers[\"IK Mechanism\"]\n\t\t\t\t,hide_select = self.mch_disable_select\n\t\t\t)\n\t\t\tik_chain.append(ik_bone)\n\n\t\t\tif i == 0:\n\t\t\t\t# First IK bone special treatment\n\t\t\t\tik_bone.parent = self.limb_root_bone.name\n\t\t\t\tik_bone.custom_shape = self.load_widget(\"IK_Base\")\n\t\t\t\tik_bone.use_custom_shape_bone_size = True\n\t\t\t\tik_bone.bone_group\t = self.bone_groups[\"IK Controls\"]\n\t\t\t\tik_bone.layers\t\t = self.bone_layers[\"IK Controls\"]\n\n\t\t\telse:\n\t\t\t\tik_bone.parent = ik_chain[-2]\n\t\t\t\n\t\t\tif i == self.params.CR_ik_length-1:\n\t\t\t\t# Add the IK constraint to the previous bone, targetting this one.\n\t\t\t\tpole_target_name = pole_target.name if pole_target else \"\"\n\t\t\t\tik_chain[self.params.CR_ik_length-2].add_constraint(self.obj, 'IK', \n\t\t\t\t\tpole_target\t\t= self.obj if pole_target_name!=\"\" else None,\n\t\t\t\t\tpole_subtarget\t= pole_target_name,\n\t\t\t\t\tpole_angle\t\t= self.pole_angle,\n\t\t\t\t\tsubtarget\t\t= ik_bone.name,\n\t\t\t\t\tchain_count\t\t= self.params.CR_ik_length-1\n\t\t\t\t)\n\t\t\t\t# Parent this one to the IK master.\n\t\t\t\tik_bone.parent = ik_mstr\n\n\t\t\t\tif self.params.CR_world_aligned_controls:\n\t\t\t\t\tfk_bone = self.fk_chain[i]\n\t\t\t\t\tfk_name = fk_bone.name\n\t\t\t\t\tfk_bone.name = fk_bone.name.replace(\"FK-\", \"FK-W-\")\t# W for World.\n\t\t\t\t\t# Make child control for the world-aligned control, that will have the original transforms and name.\n\t\t\t\t\t# This is currently just the target of a Copy Transforms constraint on the ORG bone.\n\t\t\t\t\tfk_child_bone = self.bone_infos.bone(\n\t\t\t\t\t\tname\t\t= fk_name\n\t\t\t\t\t\t,source\t\t= fk_bone\n\t\t\t\t\t\t,parent\t\t= fk_bone\n\t\t\t\t\t\t,bone_group\t= self.bone_groups[\"FK Helpers\"]\n\t\t\t\t\t\t,layers\t\t= self.bone_layers[\"FK Helpers\"]\n\t\t\t\t\t)\n\n\t\t\t\t\tfk_bone.flatten()\n\n\t\t\t\t\tik_mstr.flatten()\n\t\t\n\t\t# Add IK/FK Snapping to the UI.\n\t\tself.add_ui_data_ik_fk(self.fk_chain, ik_chain, pole_target)\n\t\treturn ik_chain",
"def __init__(self, controller, parent=None, name=None, live=False,\n hide_labels=False, select_controls=None,\n editable_labels=False):\n # Inheritance\n super(ControllerWidget, self).__init__(parent)\n\n QtCore.QResource.registerResource(os.path.join(os.path.dirname(\n os.path.dirname(__file__)), 'resources', 'widgets_icons.rcc'))\n\n # Class parameters\n self.controller = controller\n self.live = live\n self.hide_labels = hide_labels\n self.select_controls = select_controls\n # Parameter to store the connection status between the\n # controller widget and the controller\n self.connected = False\n # Parameter to store all the controller widget controls:\n # the keys correspond to the control name (a control name is\n # associated to a controller trait with the same name), the\n # dictionary elements are 4-uplets of the form (trait, control_class,\n # control_instance, control_label).\n self._controls = {}\n self._keys_connections = {}\n self.editable_labels = editable_labels\n\n # If possilbe, set the widget name\n if name:\n self.setObjectName(name)\n\n # Create the layout of the controller widget\n # We will add all the controls to this layout\n self._grid_layout = QtGui.QGridLayout()\n self._grid_layout.setAlignment(QtCore.Qt.AlignTop)\n self._grid_layout.setSpacing(3)\n self._grid_layout.setContentsMargins(0, 0, 0, 0)\n self.setLayout(self._grid_layout)\n\n self._groups = OrderedDict()\n\n # Create all the layout controls associated with the controller values\n # we want to tune (ie the user traits)\n self._create_controls()\n self.connect_keys()\n\n # Start the event loop that check for wrong edited fields (usefull\n # when we work off line, otherwise the traits make the job but it is\n # still user friendly).\n self._check()\n\n # Set the synchrinization between this object and the input controller:\n # 1) synchronize the edited values in the widget with the controller\n # values on the fly\n if self.live:\n self.connect()\n\n # 2) initialize the controller widget with the controller values and\n # wait synchronization instructions to update the controller values.\n else:\n self.update_controller_widget()",
"def get_ik_fk_controls_by_role(uiHost, attr_ctl_cnx):\n ik_controls = {\"ik_control\": None,\n \"pole_vector\": None,\n \"ik_rot\": None\n }\n fk_controls = []\n uiHost = pm.PyNode(uiHost)\n if uiHost.hasAttr(attr_ctl_cnx):\n cnxs = uiHost.attr(attr_ctl_cnx).listConnections()\n if cnxs:\n for c in cnxs:\n role = c.ctl_role.get()\n if \"fk\" in role:\n fk_controls.append(c.stripNamespace())\n elif role == \"upv\":\n ik_controls[\"pole_vector\"] = c.stripNamespace()\n elif role == \"ik\":\n ik_controls[\"ik_control\"] = c.stripNamespace()\n elif role == \"ikRot\":\n ik_controls[\"ik_rot\"] = c.stripNamespace()\n\n fk_controls = sorted(fk_controls)\n return ik_controls, fk_controls",
"def addControlActuator(self): \n # THE INDEX OF THE NEXT ACTUATOR\n nextActuator = self.controlForm.rowCount()\n\n labels = self.labelList[nextActuator]\n\n # CREATE WIDGETS ON CONTROL PANEL TAB\n actuatorName = QLabel(labels[0])\n actuatorName.setAlignment(Qt.AlignLeft|Qt.AlignVCenter)\n \n actuatorToggle = QPushButton(labels[1])\n actuatorToggle.setObjectName(\"actuator-button\")\n actuatorToggle.setCheckable(True)\n \n # ADD TO FORM LAYOUT\n self.controlForm.addRow(actuatorName, actuatorToggle)\n\n #actuatorName.setFixedHeight(int(actuatorName.sizeHint().height() * 1.5))\n #actuatorToggle.setFixedHeight(int(actuatorToggle.sizeHint().height() * 1.5))\n\n # LINK WIDGETS\n actuatorToggle.clicked.connect(lambda state, actuator = nextActuator: self.toggleActuator(actuator))",
"def _ik_and_fk_aligned(ik_ctrl, tcp_handle):\n\n # Define some small number to threshold our output\n delta = .0001\n\n # Initialize variables\n # translation_is_aligned = False\n # rotation_is_aligned = False\n ik_fk_are_aligned = False\n\n # Find the translation of each object and compare them\n ik_trans = pm.xform(ik_ctrl, q=True, rp=True, ws=True)\n tcp_trans = pm.xform(tcp_handle, q=True, rp=True, ws=True)\n\n # Find the distance between the ik controller and the tcp handle\n trans_diff = math.sqrt((ik_trans[0] - tcp_trans[0]) ** 2\n + (ik_trans[1] - tcp_trans[1]) ** 2\n + (ik_trans[2] - tcp_trans[2]) ** 2)\n\n if round(trans_diff, 6) < delta:\n ik_fk_are_aligned = True\n\n return ik_fk_are_aligned",
"def make_knode(self,i,path_len=0):\n return Knode(path_len=path_len,\\\n ident=self.nodes[i].ident,\\\n lindex=i)",
"def _set_control(self, key, value):\n \n self.client.session.set_device_controls(\n self.device.id,\n {key: value},\n )"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create soft ik constraint on ikHandle. | def create_soft_ik(ik_ctrl, ik_joints, ik_handle):
# get name and constant variables
name = ik_handle+'Soft'
parent = utils.get_parent(ik_joints[0])
ik_handle_parent = utils.get_parent(ik_handle)
# get total length of joint chain
chain_length = 0
for jnt in ik_joints[1:]:
chain_length += abs(mc.getAttr(jnt+'.tx'))
mc.addAttr(ik_joints[0], ln='softIkChainLength', k=1, dv=chain_length)
#create dist node, (distance between top ik_joint and ik_handle) = X
soft_ik_root = utils.snap_locator(ik_joints[0], node_type='transform')
soft_ik_root = mc.rename(soft_ik_root, name+'_root_'+utils.get_suffix('transform'))
dist = utils.create_distance_reader(soft_ik_root, ik_handle_parent)
#create the dSoft and softIK attributes on the controller
mc.addAttr(ik_ctrl, ln='softIK', min=0, k=1)
ctrl_clamp = mc.createNode('clamp')
mc.connectAttr(ik_ctrl+'.softIK', ctrl_clamp+'.inputR')
mc.setAttr(ctrl_clamp+'.minR', 0.0001)
mc.setAttr(ctrl_clamp+'.maxR', 10000000)
#create node network for soft IK
da_pma = mc.createNode('plusMinusAverage', n=name+'_da_pma')
x_minus_da_pma = mc.createNode('plusMinusAverage', n=name+'_x_minus_da_pma')
negate_x_minus_md = mc.createNode('multiplyDivide', n=name+'_negate_x_minus_md')
divBy_dSoft_md = mc.createNode('multiplyDivide', n=name+'_divBy_dSoft_md')
pow_e_md = mc.createNode('multiplyDivide', n=name+'_pow_e_md')
one_minus_pow_e_pma = mc.createNode('plusMinusAverage', n=name+'_one_minus_pow_e_pma')
times_dSoft_md = mc.createNode('multiplyDivide', n=name+'_times_dSoft_md')
plus_da_pma = mc.createNode('plusMinusAverage', n=name+'_plus_da_pma')
da_cond = mc.createNode('condition', n=name+'_da_cond')
dist_diff_pma = mc.createNode('plusMinusAverage', n=name+'_dist_diff_pma')
defaultPos_pma = mc.createNode('plusMinusAverage', n=name+'_defaultPos_pma')
#set operations
mc.setAttr(da_pma+'.operation', 2)
mc.setAttr(x_minus_da_pma+'.operation', 2)
mc.setAttr(negate_x_minus_md+'.operation', 1)
mc.setAttr(divBy_dSoft_md+'.operation', 2)
mc.setAttr(pow_e_md+'.operation', 3)
mc.setAttr(one_minus_pow_e_pma+'.operation', 2)
mc.setAttr(times_dSoft_md+'.operation', 1)
mc.setAttr(plus_da_pma+'.operation', 1)
mc.setAttr(da_cond+'.operation', 5)
mc.setAttr(dist_diff_pma+'.operation', 2)
mc.setAttr(defaultPos_pma+'.operation', 2)
#make connections
mc.connectAttr(ik_joints[0]+'.softIkChainLength', da_pma+'.input1D[0]')
mc.connectAttr(ctrl_clamp+'.outputR', da_pma+'.input1D[1]')
mc.connectAttr(dist+'.localDistance', x_minus_da_pma+'.input1D[0]')
mc.connectAttr(da_pma+'.output1D', x_minus_da_pma+'.input1D[1]')
mc.connectAttr(x_minus_da_pma+'.output1D', negate_x_minus_md+'.input1X')
mc.setAttr(negate_x_minus_md+'.input2X', -1)
mc.connectAttr(negate_x_minus_md+'.outputX', divBy_dSoft_md+'.input1X')
mc.connectAttr(ctrl_clamp+'.outputR', divBy_dSoft_md+'.input2X')
mc.setAttr(pow_e_md+'.input1X', 2.718281828)
mc.connectAttr(divBy_dSoft_md+'.outputX', pow_e_md+'.input2X')
mc.setAttr(one_minus_pow_e_pma+'.input1D[0]', 1)
mc.connectAttr(pow_e_md+'.outputX' , one_minus_pow_e_pma+'.input1D[1]')
mc.connectAttr(one_minus_pow_e_pma+'.output1D', times_dSoft_md+'.input1X')
mc.connectAttr(ctrl_clamp+'.outputR', times_dSoft_md+'.input2X')
mc.connectAttr(times_dSoft_md+'.outputX', plus_da_pma+'.input1D[0]')
mc.connectAttr(da_pma+'.output1D', plus_da_pma+'.input1D[1]')
mc.connectAttr(da_pma+'.output1D', da_cond+'.firstTerm')
mc.connectAttr(dist+'.localDistance', da_cond+'.secondTerm')
mc.connectAttr(dist+'.localDistance', da_cond+'.colorIfFalseR')
mc.connectAttr(plus_da_pma+'.output1D', da_cond+'.colorIfTrueR')
mc.connectAttr(da_cond+'.outColorR', dist_diff_pma+'.input1D[0]')
mc.connectAttr(dist+'.localDistance', dist_diff_pma+'.input1D[1]')
mc.setAttr(defaultPos_pma+'.input1D[0]', 0)
mc.connectAttr(dist_diff_pma+'.output1D', defaultPos_pma+'.input1D[1]')
# Create new ik aim node
up = [1,0,0]
aim = [0,1,0]
grp = mc.createNode('transform', n=name+'_soft_aim_'+utils.get_suffix('transform'), p=ik_handle_parent)
gAim = mc.createNode('transform', n=name+'_soft_'+utils.get_suffix('transform'), p=grp)
mc.aimConstraint(soft_ik_root,
grp,
aim=aim,
u=up,
wu=up,
wut='objectRotation',
wuo=ik_ctrl,
n=grp+'_ac')
mc.connectAttr(defaultPos_pma+'.output1D', gAim+'.ty')
mc.pointConstraint(gAim, ik_handle)
mc.parent(ik_handle, gAim)
# parent stuff
if parent:
mc.parent(soft_ik_root, parent)
return gAim | [
"def add_soft(self, fmla, weight):\n self._variables.update(fmla.get_variables())\n self._soft_constraints.append((fmla, weight))\n if self._handle is None:\n self._handle = _Handle()\n return self._handle",
"def solve_constraint_generic(problem, enqueue_condition=None) :\n raise NotImplementedError",
"def create_fk_ik_switch(switch_ctrl, ik_handles, fk_ctrls, ik_ctrls, vis_ctrl=None, switch_attr_name='IK', vis_attr_name='fkIkCtrlVis'):\n\n fk_ctrls = mc.ls(fk_ctrls)\n ik_ctrls = mc.ls(ik_ctrls)\n ik_handles = mc.ls(ik_handles)\n\n if not vis_ctrl:\n vis_ctrl = switch_ctrl\n\n # Create attributes\n if not mc.objExists(switch_ctrl+'.'+switch_attr_name):\n mc.addAttr(switch_ctrl, ln=switch_attr_name, min=0, max=1, k=1)\n\n if not mc.objExists(vis_ctrl+'.'+vis_attr_name):\n mc.addAttr(vis_ctrl, ln=vis_attr_name, at='enum', en='auto:fkOnly:ikOnly:both', k=1)\n\n # Connect ik handles\n for handle in ik_handles:\n mc.connectAttr(switch_ctrl+'.'+switch_attr_name, handle+'.ikBlend')\n\n # Create swicth for ik ctrl\n ik_choice = utils.create_node('choice', n=vis_attr_name+'_ik_choice')\n mc.connectAttr(vis_ctrl+'.'+vis_attr_name, ik_choice+'.selector')\n mc.connectAttr(switch_ctrl+'.'+switch_attr_name, ik_choice+'.input[0]')\n mc.setAttr(ik_choice+'.input[1]', 0)\n mc.setAttr(ik_choice+'.input[2]', 1)\n mc.setAttr(ik_choice+'.input[3]', 1)\n\n for ctrl in ik_ctrls:\n mc.setAttr(ctrl+'.v', l=0)\n mc.connectAttr(ik_choice+'.output', ctrl+'.v', f=1)\n mc.setAttr(ctrl+'.v', l=1)\n\n # Create swicth for ik ctrl\n fk_choice = utils.create_node('choice', n=vis_attr_name+'_fk_choice')\n fk_rv = utils.create_node('reverse', n=vis_attr_name+'_fk_choice')\n mc.connectAttr(switch_ctrl+'.'+switch_attr_name, fk_rv+'.inputX')\n mc.connectAttr(vis_ctrl+'.'+vis_attr_name, fk_choice+'.selector')\n mc.connectAttr(fk_rv+'.outputX', fk_choice+'.input[0]')\n mc.setAttr(fk_choice+'.input[1]', 1)\n mc.setAttr(fk_choice+'.input[2]', 0)\n mc.setAttr(fk_choice+'.input[3]', 1)\n\n for ctrl in fk_ctrls:\n mc.setAttr(ctrl+'.v', l=0)\n mc.connectAttr(fk_choice+'.output', ctrl+'.v', f=1)\n mc.setAttr(ctrl+'.v', l=1)\n\n return True",
"def createConstraint(self):\n return _libsbml.Model_createConstraint(self)",
"def createConstraint(*argv):",
"def ikSolver(self,robot,obj_pt,obj_axis):\n q = robot.getConfig()\n obj = IKObjective()\n obj.setFixedPoints(self.link,[self.localPosition1,self.localPosition2],[vectorops.madd(obj_pt,obj_axis,-0.03),vectorops.madd(obj_pt,obj_axis,0.03)])\n solver = IKSolver(robot)\n solver.add(obj)\n solver.setActiveDofs(self.armIndices)\n return solver",
"def add_constraint(self, constraint):",
"def _set_constraint(self):\n pass",
"def _parse_initbound(self) :\n\t\tlogging.debug(\"Parsing initbound soft constraints\")",
"def hard_sigmoid(x):\n return K.hard_sigmoid(x)",
"def soft_constraint ( self , var , value , name = '' , title = '' ) :\n \n assert isinstance ( var , ROOT.RooAbsReal ) ,\\\n \"Invalid ``v'': %s/%s\" % ( var , type ( var ) ) \n assert isinstance ( value , VE ),\\\n \"Invalid ``value'': %s/%s\" % ( value , type ( value ) )\n\n assert 0 < value.cov2() , 'Invalid error for %s' % value\n \n name = name if name else 'Gauss_%s_%s' % ( var.GetName() , self.name ) \n title = title if title else 'Gaussian Constraint(%s,%s) at %s' % ( var.GetName() , self.name , value )\n \n # value & error as RooFit objects: \n val = ROOT.RooFit.RooConst ( value.value () )\n err = ROOT.RooFit.RooConst ( value.error () )\n \n # Gaussian constrains \n gauss = ROOT.RooGaussian ( self.var_name ( name ) , title , var , val , err )\n \n # keep all the created technical stuff \n self.aux_keep.append ( val )\n self.aux_keep.append ( err )\n self.aux_keep.append ( gauss )\n\n self.info ('Constraint is created %s=%s' % ( var.name , value ) )\n return gauss",
"def _create_hardsigmoid(cls, op, op_t):\n node = cls._common_singa_tensor_to_onnx_node(op, op_t)\n\n node.attribute.extend([\n helper.make_attribute('alpha', op.alpha),\n helper.make_attribute('beta', op.gamma),\n ])\n return node",
"def create_activation_solver_constraint(\n self, constraint: relaxation.RelaxActivationConstraint, act_index: int,\n slope: float, bias: float):\n outvar = self.solver_variables[constraint.outvar.name]\n invar = self.solver_variables[constraint.invar.name]\n expression = invar[act_index] * slope + bias - outvar[act_index]\n if constraint.sense == 0:\n self.constraints += [expression == 0]\n if constraint.sense == 1:\n self.constraints += [expression <= 0]\n if constraint.sense == -1:\n self.constraints += [expression >= 0]",
"def declare_physical_budget(model, k):\n\n m = model\n\n m.budget = pe.Constraint(expr=sum(1*m.delta_gen[g] for g in m.delta_gen.index_set()) +\\\n sum(1*m.delta_branch[k] for k in m.delta_branch.index_set()) +\\\n sum(1*m.delta_load[b] for b in m.delta_load.index_set()) +\\\n sum(1*m.delta_bus[b] for b in m.delta_bus.index_set()) == k)",
"def test_create_hyperflex_auto_support_policy(self):\n pass",
"def constraintsolver(self):\n listconst = self.createconstraint() # get the constraint equations for the cells in the board\n if listconst:\n listconst = self.trivialcase(listconst)\n listconst = self.subtractconstraint(listconst, 0) # subtract the constraint equations to get a hint\n return self.generatehint()",
"def declare_physical_budget(model, k):\n\n m = model\n\n m.budget = pe.Constraint(expr=sum(5*m.delta_gen[g] for g in m.delta_gen.index_set()) +\\\n sum(1*m.delta_branch[k] for k in m.delta_branch.index_set()) +\\\n sum(3*m.delta_load[b] for b in m.delta_load.index_set()) +\\\n sum(1*m.delta_bus[b] for b in m.delta_bus.index_set()) == k)",
"def _create_hardsigmoid(cls, onnx_node, inputs, opset_version):\n alpha = onnx_node.getattr(\"alpha\", 0.2)\n beta = onnx_node.getattr(\"beta\", 0.5)\n _, forward = cls._common_onnx_node_to_singa_op(onnx_node, inputs,\n opset_version)\n return _, forward(alpha, beta)",
"def createConstraints(self):\n if isinstance(super().createConstraints(), pg.SparseMapMatrix):\n self.C1 = pg.SparseMapMatrix(self.constraintsRef())\n # make a copy because it will be overwritten\n else:\n self.C1 = self.constraints()\n\n self.C = pg.matrix.FrameConstraintMatrix(self.C1,\n len(self.fops),\n self.scalef)\n self.setConstraints(self.C)\n # cw = self.regionManager().constraintWeights()\n # self.regionManager().setConstraintsWeights(np.tile(cw, self.nf))\n # switch off automagic inside core.inversion which checks for\n # local modeltransform of the regionManager\n self.regionManager().setLocalTransFlag(False)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Quaterion / matrix based twist for upper arms and legs. | def upper_twist(shoulder_jnt, up_arm_ik_jnt, lo_arm_ik_jnt, up_arm_jnt, lo_arm_jnt, up_arm_twist_jnts):
# Create a group that does not rotate and parent under the ik arm parent (shoulder)
stable_reader_grp = utils.create_node('transform', n=up_arm_ik_jnt+'_stable_reader', p=up_arm_ik_jnt)
# Create a grp that will rotate with ik arm
twist_reader_grp = utils.create_node('transform', n=up_arm_ik_jnt+'_twist_reader', p=up_arm_ik_jnt)
twist_driver_grp = utils.create_node('transform', n=up_arm_ik_jnt+'_twist', p=twist_reader_grp)
mc.parent(stable_reader_grp, shoulder_jnt)
mc.addAttr(twist_reader_grp, ln='twist', k=1)
# Now set up mult matrix and decomp nodes to extract the twist between the two nodes
mult_mtx = mc.createNode('multMatrix')
decomp_mtx = mc.createNode('decomposeMatrix')
quat_to_euler = mc.createNode('quatToEuler')
mc.connectAttr(stable_reader_grp+'.worldInverseMatrix', mult_mtx+'.matrixIn[1]')
mc.connectAttr(twist_reader_grp+'.worldMatrix', mult_mtx+'.matrixIn[0]')
mc.connectAttr(mult_mtx+'.matrixSum', decomp_mtx+'.inputMatrix')
mc.connectAttr(decomp_mtx+'.outputQuatX', quat_to_euler+'.inputQuatX')
mc.connectAttr(decomp_mtx+'.outputQuatW', quat_to_euler+'.inputQuatW')
utils.connect_negative(quat_to_euler+'.outputRotateX', twist_reader_grp+'.twist')
mc.connectAttr(twist_reader_grp+'.twist', twist_driver_grp+'.rx')
# Connect joints
mc.parentConstraint(twist_driver_grp, up_arm_jnt, mo=1)
mc.parentConstraint(lo_arm_ik_jnt, lo_arm_jnt, mo=1)
div = 1.0 / (len(up_arm_twist_jnts))
mdl = mc.createNode('multDoubleLinear')
mc.setAttr(mdl+'.input1', div)
mc.connectAttr(quat_to_euler+'.outputRotateX', mdl+'.input2')
for i, joint in enumerate(up_arm_twist_jnts[:-1]):
mc.connectAttr(mdl+'.output', joint+'.rx')
mc.orientConstraint(up_arm_ik_jnt, up_arm_twist_jnts[-1], mo=1) | [
"def lap_mat(self):",
"def compute_twist(rbt):\n #YOUR CODE HERE\n R = rbt[:3,:3]\n orientation = eqf.find_omega_theta(R)# omega/theta\n v = eqf.find_v(orientation[0], orientation[1], trans).reshape(3,)\n return (v, orientation[0])",
"def a2t2(t, g, nu1, c1, temp, nu2, c2, wc, phi1, phim1):\n \n w1w2t2 = w_w.w1_w2(t, g, temp, nu1, c1, nu1, c1, wc, phi1, phi1)\n w1mw2mt2 = w_w.w1_w2(t, g, temp, nu2, c2, nu2, c2, wc, phim1, phim1) \n w1mw2t2 = w_w.w1_w2(t, g, temp, nu2, c2, nu1, c1, wc, phim1, phi1)\n w1w2mt2 = w_w.w1_w2(t, g, temp, nu1, c1, nu2, c2, wc, phi1, phim1) \n \n a11 = w1w2t2-w1w2mt2-w1mw2t2+w1mw2mt2\n a12 = w1w2t2-w1mw2t2+w1w2mt2-w1mw2mt2\n a21 = w1w2t2+w1mw2t2-w1w2mt2-w1mw2mt2\n a22 = w1w2t2+w1w2mt2+w1mw2t2+w1mw2mt2\n \n return 1/2 * np.array([[a11, a12], [a21, a22]])",
"def gyration(self):\n A = self.parent()\n hf = list(self.height_function())\n k = len(hf) - 1\n for i in range(1,k):\n for j in range(1,k):\n if (i+j) % 2 == 0 \\\n and hf[i-1][j] == hf[i+1][j] == hf[i][j+1] == hf[i][j-1]:\n if hf[i][j] < hf[i+1][j]:\n hf[i][j] += 2\n else:\n hf[i][j] -= 2\n for i in range(1,k):\n for j in range(1,k):\n if (i+j) % 2 == 1 \\\n and hf[i-1][j] == hf[i+1][j] == hf[i][j+1] == hf[i][j-1]:\n if hf[i][j] < hf[i+1][j]:\n hf[i][j] += 2\n else:\n hf[i][j] -= 2\n return A.from_height_function(matrix(hf))",
"def solve_upper_triangular(amat, b):\n return solve_triangular_base(amat, b, lower=False)",
"def fkine(robot, q):\n\n q = mat(q)\n n = robot.n\n if numrows(q)==1 and numcols(q)==n:\n t = robot.base\n for i in range(0,n):\n t = t * robot.links[i].tr(q[0,i])\n t = t * robot.tool\n return t\n else:\n if numcols(q) != n:\n raise Exception('bad data')\n t = []\n for qv in q: # for each trajectory point\n tt = robot.base\n for i in range(0,n):\n tt = tt * robot.links[i].tr(qv[0,i])\n t.append(tt*robot.tool)\n return t",
"def inv_kin(self, xy):\n\n def distance_to_default(q, *args): \n \"\"\"Objective function to minimize\n Calculates the euclidean distance through joint space to the default\n arm configuration. The weight list allows the penalty of each joint \n being away from the resting position to be scaled differently, such\n that the arm tries to stay closer to resting state more for higher \n weighted joints than those with a lower weight.\n \n :param list q: the list of current joint angles\n :returns scalar: euclidean distance to the default arm position\n \"\"\"\n # weights found with trial and error, get some wrist bend, but not much\n weight = [1, 1, 1.3, 1] \n return np.sqrt(np.sum([(qi - q0i)**2 * wi\n for qi,q0i,wi in zip(q, self.q0, weight)]))\n\n def x_constraint(q, xy):\n \"\"\"Returns the corresponding hand xy coordinates for \n a given set of joint angle values [shoulder, elbow, wrist], \n and the above defined arm segment lengths, L\n \n :param list q: the list of current joint angles\n :returns: the difference between current and desired x position\n \"\"\"\n x = ( self.L[0]*np.cos(q[0]) + self.L[1]*np.cos(q[0]+q[1]) + \n self.L[2]*np.cos(q[0]+q[1]+q[2]) + self.L[3]*np.cos(np.sum(q)) ) - xy[0]\n return x\n\n def y_constraint(q, xy): \n \"\"\"Returns the corresponding hand xy coordinates for \n a given set of joint angle values [shoulder, elbow, wrist], \n and the above defined arm segment lengths, L\n \n :param list q: the list of current joint angles\n :returns: the difference between current and desired y position\n \"\"\"\n y = ( self.L[0]*np.sin(q[0]) + self.L[1]*np.sin(q[0]+q[1]) + \n self.L[2]*np.sin(q[0]+q[1]+q[2]) + self.L[3]*np.sin(np.sum(q)) ) - xy[1]\n return y\n\n return scipy.optimize.fmin_slsqp( func=distance_to_default, \n x0=self.q, eqcons=[x_constraint, y_constraint], \n args=(xy,), iprint=0) # iprint=0 suppresses output",
"def walsh_matrix(self,scale):\n \n hadamard = spl.hadamard(scale,dtype=int)\n sequencies = np.apply_along_axis(self._get_sequency, 1, hadamard)\n return np.sort(sequencies),hadamard[np.argsort(sequencies)]",
"def test_linalg2() :\r\n \r\n logger.info('Test Linaear Algebra: ')\r\n \r\n LA3 = Ostap.Vector(3)\r\n l1 = LA3(0,1,2)\r\n l2 = LA3(3,4,5)\r\n \r\n logger.info ( 'l1 , l2 : %s %s ' % ( l1 , l2 ) )\r\n logger.info ( 'l1 + l2 : %s ' % ( l1 + l2 ) )\r\n \r\n logger.info ( 'l1 - l2 : %s ' % ( l1 - l2 ) )\r\n logger.info ( 'l1 * l2 : %s ' % ( l1 * l2 ) )\r\n logger.info ( 'l1 * 2 : %s ' % ( l1 * 2 ) )\r\n logger.info ( ' 2 * l2 : %s ' % ( 2 * l2 ) )\r\n logger.info ( 'l1 / 2 : %s ' % ( l1 / 2 ) )\r\n \r\n l1 /= 2 \r\n logger.info ( 'l1 /= 2 : %s ' % l1 )\r\n \r\n l1 *= 2 \r\n logger.info ( 'l1 *= 2 : %s ' % l1 )\r\n\r\n ## if ( 3 , 5 ) <= python_version :\r\n \r\n ## logger.info ( 'l1 @ l2 : %s ' % ( l1 @ l2 ) )\r\n ## logger.info ( 'l1 @ 2 : %s ' % ( l1 @ 2 ) )\r\n ## logger.info ( ' 2 @ l2 : %s ' % ( 2 @ l2 ) )\r\n \r\n logger.info('TEST matrices: ')\r\n \r\n m22 = Ostap.Math.Matrix(2,2) ()\r\n m23 = Ostap.Math.Matrix(2,3) ()\r\n s22 = Ostap.Math.SymMatrix(2)()\r\n \r\n l2 = Ostap.Math.Vector(2)()\r\n l3 = Ostap.Math.Vector(3)()\r\n \r\n l2[0] = 1\r\n l2[1] = 2\r\n \r\n l3[0] = 1\r\n l3[1] = 2\r\n l3[1] = 3\r\n \r\n logger.info ( 'l2 , l3 : %s %s ' % ( l2 , l3 ) )\r\n \r\n m22[0,0] = 1\r\n m22[0,1] = 1\r\n m22[1,1] = 1\r\n \r\n m23[0,0] = 1\r\n m23[1,1] = 1\r\n m23[0,2] = 1\r\n \r\n s22[0,0] = 2\r\n s22[1,0] = 1\r\n s22[1,1] = 3\r\n \r\n logger.info ( 'm22\\n%s' % m22 ) \r\n logger.info ( 's22\\n%s' % s22 ) \r\n logger.info ( 'm23\\n%s' % m23 ) \r\n logger.info ( 'm22/3\\n%s' % (m22/3) )\r\n \r\n logger.info ( 'm23*3\\n%s' % (m23*3) ) \r\n\r\n logger.info ( 'm22**3\\n%s' % m22**3 ) \r\n logger.info ( 's22**4\\n%s' % s22**4 ) \r\n\r\n logger.info ( 'm22 * m23 :\\n%s' % ( m22 * m23 ) ) \r\n logger.info ( 'm22 * l2 : %s ' % ( m22 * l2 ) ) \r\n logger.info ( 'l2 * m22 : %s ' % ( l2 * m22 ) ) \r\n logger.info ( 'm23 * l3 : %s ' % ( m23 * l3 ) ) \r\n logger.info ( 'l2 * m23 : %s ' % ( l2 * m23 ) )\r\n \r\n logger.info ( 'm22 * s22 + 2 * m22 :\\n%s ' % ( m22*s22 + 2*m22 ) )\r\n logger.info ( 'm22 == m22*1.0 : %s ' % ( m22 == m22 * 1.0 ) )\r\n logger.info ( 'm22 != m22*1.1 : %s ' % ( m22 != m22 * 1.1 ) )\r\n logger.info ( 'm23 == m23*1.0 : %s ' % ( m23 == m23 * 1.0 ) )\r\n logger.info ( 'm23 != m23*1.1 : %s ' % ( m23 != m23 * 1.1 ) )\r\n logger.info ( 'l1 == l1 *1.0 : %s ' % ( l1 == l1 * 1.0 ) )\r\n logger.info ( 'l1 != l1 *1.1 : %s ' % ( l1 != l1 * 1.1 ) )\r\n logger.info ( 's22 == s22*1.0 : %s ' % ( s22 == s22 * 1.0 ) )\r\n logger.info ( 's22 != s22*1.1 : %s ' % ( s22 != s22 * 1.1 ) )\r\n \r\n logger.info ( ' l1 == (0,1,2) : %s ' % ( l1 == ( 0 , 1 , 2 ) ) )\r\n logger.info ( ' l1 == [0,1,2] : %s ' % ( l1 == [ 0 , 1 , 2 ] ) )\r\n\r\n ## if ( 3 , 5 ) <= python_version :\r\n \r\n ## logger.info ( 'm23 @ 3 :\\n%s' % ( m23 @ 3 ) ) \r\n ## logger.info ( 'm22 @ m23 :\\n%s' % ( m22 @ m23 ) ) \r\n ## logger.info ( 'm22 @ l2 : %s ' % ( m22 @ l2 ) ) \r\n ## logger.info ( 'm23 @ l3 : %s ' % ( m23 @ l3 ) ) \r\n \r\n m22[0,0] = 1\r\n m22[0,1] = 2\r\n m22[1,0] = 2\r\n m22[1,1] = 3\r\n \r\n s22[0,0] = 1\r\n s22[0,1] = 2\r\n s22[1,1] = 3\r\n \r\n logger.info ( ' m22 == s22 : %s ' % ( m22 == s22 ) )\r\n logger.info ( ' m22 == s22*1.0 : %s ' % ( m22 == s22 * 1.0 ) )\r\n logger.info ( ' m22 != s22*1.1 : %s ' % ( m22 != s22 * 1.1 ) )\r\n\r\n ## ok \r\n m22 + m22\r\n\r\n ## crash \r\n m22 += m22\r\n\r\n ## crash\r\n m22 += Ostap.Math.Matrix(2,2) ()\r\n\r\n logger.info ( ' m22 += m22 :\\n%s ' % m22 )\r\n\r\n m22 -= m22*2\r\n\r\n logger.info ( ' m22 += m22*2 :\\n%s ' % m22 )\r\n\r\n\r\n m22 += s22*0\r\n m22 += s22\r\n m22 = m22 + s22\r\n\r\n\r\n logger.info ( ' m22 += s22*0 :\\n%s ' % m22 )\r\n\r\n m22 -= s22*2\r\n logger.info ( ' m22 -= s22*2 :\\n%s ' % m22 )\r\n\r\n s22 += s22*2\r\n logger.info ( ' s22 += s22*2 :\\n%s ' % s22 )\r\n\r\n s22 -= s22*2\r\n logger.info ( ' s22 -= s22*2 :\\n%s ' % s22 )\r\n \r\n if np :\r\n logger.info ( 'Operations with numpy objects')\r\n \r\n v2 = np.array ( [1.0,2.0] )\r\n v3 = np.array ( [1.0,2.0,3.0 ] )\r\n \r\n logger.info ( 'v2 * l2 : %s' % ( v2 * l2 ) )\r\n logger.info ( 'l3 * v3 : %s' % ( l3 * v3 ) )\r\n logger.info ( 's22 * v2 : %s' % ( s22 * v2 ) )\r\n logger.info ( 'm22 * v2 : %s' % ( m22 * v2 ) )\r\n logger.info ( 'm23 * v3 : %s' % ( m23 * v3 ) )\r\n\r\n logger.info ( 'm22 as np : %s' % ( m22.to_numpy() ) )\r\n logger.info ( 's22 as np : %s' % ( s22.to_numpy() ) )\r\n logger.info ( 'm23 as np : %s' % ( m23.to_numpy() ) )\r\n \r\n \r\n logger.info ( 'm22 + m22(np) :\\n%s' % ( m22 + m22.to_numpy () ) )\r\n logger.info ( 'm22 + s22(np) :\\n%s' % ( m22 + s22.to_numpy () ) )\r\n logger.info ( 's22 + s22(np) :\\n%s' % ( s22 + s22.to_numpy () ) )\r\n logger.info ( 's22 + m22(np) :\\n%s' % ( s22 + s22.to_numpy () ) )\r\n \r\n logger.info ( 'm22 * m22(np) :\\n%s' % ( m22 * m22.to_numpy () ) )\r\n logger.info ( 's22 * s22(np) :\\n%s' % ( s22 * s22.to_numpy () ) )\r\n logger.info ( 's22 * m23(np) :\\n%s' % ( s22 * m23.to_numpy () ) ) \r\n logger.info ( 'l2 * m22(np) :\\n%s' % ( l2 * m22.to_numpy () ) )\r\n\r\n logger.info ( 'SVector with errors')\r\n\r\n v2 = Ostap.Math.VectorE (2)()\r\n\r\n v2 [ 0 ] = 3\r\n v2 [ 1 ] = 4\r\n \r\n v2 . cov2 () [ 0 , 0 ] = 0.10\r\n v2 . cov2 () [ 0 , 1 ] = 0.05\r\n v2 . cov2 () [ 1 , 1 ] = 0.20\r\n\r\n rho = lambda x,y : ( x * x + y * y ) ** 0.5\r\n phi = lambda x,y : math.atan2 ( y , x ) \r\n \r\n\r\n r1 = v2.transform ( rho , phi )\r\n logger.info ( \" -> rho, phi %s \" % r1 )\r\n\r\n r2 = v2.transform ( rho )\r\n logger.info ( \" -> rho %s \" % r2 )",
"def reverse_quad(q):\n return [q[1], q[0], q[3], q[2]]",
"def test_quaternion_hamilton():\n q_ij = pr.concatenate_quaternions(pr.q_i, pr.q_j)\n assert_array_equal(pr.q_k, q_ij)\n q_ijk = pr.concatenate_quaternions(q_ij, pr.q_k)\n assert_array_equal(-pr.q_id, q_ijk)",
"def lie_bracket(self, matrix_a, matrix_b):\n return gs.matmul(matrix_a, matrix_b) - gs.matmul(matrix_b, matrix_a)",
"def _le_square(self, annot, p1, p2, lr, fill_color):\n m, im, L, R, w, scol, fcol, opacity = self._le_annot_parms(annot, p1, p2, fill_color)\n shift = 2.5 # 2*shift*width = length of square edge\n d = shift * max(1, w)\n M = R - (d/2., 0) if lr else L + (d/2., 0)\n r = Rect(M, M) + (-d, -d, d, d) # the square\n # the square makes line longer by (2*shift - 1)*width\n p = r.tl * im\n ap = \"q\\n%s%f %f m\\n\" % (opacity, p.x, p.y)\n p = r.tr * im\n ap += \"%f %f l\\n\" % (p.x, p.y)\n p = r.br * im\n ap += \"%f %f l\\n\" % (p.x, p.y)\n p = r.bl * im\n ap += \"%f %f l\\n\" % (p.x, p.y)\n ap += \"%g w\\n\" % w\n ap += scol + fcol + \"b\\nQ\\n\"\n return ap",
"def q2u(q):\n return np.tensordot(quat.as_float_array(np.asarray(q)), sigma, axes = (-1,-3))",
"def TG(self,T, axis, L_R):\r\n rank = np.ndim(A)\r\n ttt = Tensor_type(self, T, args)\r\n p_a = axis\r\n def m(a,b, out=None):\r\n return np.matmul(a,b, out=None)\r\n\r\n TG_primer = self.TG_primer\r\n\r\n def Lower_Raise(T, p_a, L_R, ttt=ttt):\r\n def G_G(self, L_R):\r\n if L_R == [-1,1]:\r\n return self.metric_tensor\r\n elif L_R == [1,-1]:\r\n return self.inv_metric_tensor\r\n\r\n if L_R == [-1,1] or L_R == [1,-1]: #----> lowering & raising indices respectivelly\r\n if ttt[1] == 0 and L_R[1] == -1 or ttt[0] == 0 and L_R[0] == -1:\r\n raise Exception(f\"you cant raise/lower an already raised/lowered tensor index. Your tensortype {ttt}\")\r\n else:\r\n T = TG_primer(T, p_a) #------> this is the part where p_a is used\r\n\r\n if rank == 1:\r\n L = m(G_G(L_R), T)\r\n elif rank == 2:\r\n L = np.array([m(G_G(L_R), T)])\r\n elif rank == 3:\r\n L = np.array([m(G_G(L_R), T[i]) for i in range(T.shape[0])])\r\n\r\n ttt = [ttt[0] +L_R[0], ttt[1] +L_R[1]]\r\n return L,ttt\r\n else:\r\n raise Excperion(\"the following must hold: Lower_raise == [-1,1] or Lower_Riase == [1,-1]\")\r\n return Lower_Raise(T, p_a, L_R)",
"def getQ(m, t):\n\n Q = []\n for r in range(len(t)):\n qrow = []\n for c in range(len(t)):\n qrow.append(m[t[r]][t[c]])\n Q.append(qrow) \n return Q",
"def get_stain_matrix(self, I, *args):",
"def l2(self):\n return math.sqrt(self.quadrance)",
"def leg_tr(tr_leg, tens_list, legs_list, ent_list):\n q_index = [legs.__contains__(tr_leg) for legs in legs_list].index(True,0)\n ax1 = legs_list[q_index].index(tr_leg,0)\n ax2 = legs_list[q_index].index(tr_leg,ax1+1)\n tens_list[q_index] = np.trace(tens_list[q_index], offset=0, axis1=ax1, axis2=ax2)\n legs_list[q_index].remove(tr_leg)\n legs_list[q_index].remove(tr_leg)\n ent_list[tr_leg] = np.array([0])"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stretch setup for biped (2 joint chain) arms and legs | def biped_stretch(ik_ctrl,
ik_last_node,
pv_ctrl,
switch_ctrl,
up_arm_fk_ctrl,
lo_arm_fk_ctrl,
wrist_fk_ctrl,
up_arm_ik_jnt,
lo_arm_ik_jnt,
wrist_ik_jnt,
ik_handle,
pin_attr_name='pinElbow',
shift_attr_name='shiftElbow'):
# add all my attrs on ctrls
mc.addAttr(ik_ctrl, ln=pin_attr_name, at='double', min=0, max=1, k=1)
mc.addAttr(ik_ctrl, ln=shift_attr_name, at='double', min=-1, max=1, k=1)
mc.addAttr(ik_ctrl, ln='autoStretch', at='double', min=0, max=1, k=1)
mc.addAttr(ik_ctrl, ln='upStretch', at='double', dv=1, min=0.001, k=1)
mc.addAttr(ik_ctrl, ln='loStretch', at='double', dv=1, min=0.001, k=1)
mc.addAttr(up_arm_fk_ctrl, ln='stretch', at='double', dv=1, min=0.001, k=1)
mc.addAttr(lo_arm_fk_ctrl, ln='stretch', at='double', dv=1, min=0.001, k=1)
# store initial length of joint
lo_init_length = mc.getAttr(lo_arm_ik_jnt+'.tx')
wrist_init_length = mc.getAttr(wrist_ik_jnt+'.tx')
max_init_length = mc.getAttr(lo_arm_ik_jnt+'.tx')+mc.getAttr(wrist_ik_jnt+'.tx')
lo_abs_init_length = abs(mc.getAttr(lo_arm_ik_jnt+'.tx'))
wrist_abs_length = abs(mc.getAttr(wrist_ik_jnt+'.tx'))
# Get parents for ik handle and root of the parm
arm_root_grp = utils.get_parent(up_arm_ik_jnt)
# Create distance nodes between base, end, and pv ctrl to get the length of side of the triangle
root_to_end_dist = utils.create_distance_reader(arm_root_grp, ik_last_node)
root_to_pv_dist = utils.create_distance_reader(arm_root_grp, pv_ctrl)
pv_to_end_dist = utils.create_distance_reader(pv_ctrl, ik_last_node)
# easy stuff first - create fk stretch nodes
lo_arm_fk_mdl = mc.createNode('multDoubleLinear')
wrist_fk_mdl = mc.createNode('multDoubleLinear')
mc.setAttr(lo_arm_fk_mdl+'.input1', mc.getAttr(lo_arm_ik_jnt+'.tx'))
mc.setAttr(wrist_fk_mdl+'.input1', mc.getAttr(wrist_ik_jnt+'.tx'))
mc.connectAttr(up_arm_fk_ctrl+'.stretch', lo_arm_fk_mdl+'.input2')
mc.connectAttr(lo_arm_fk_ctrl+'.stretch', wrist_fk_mdl+'.input2')
utils.connect_abs(lo_arm_fk_mdl+'.output', lo_arm_fk_ctrl+'_ZERO.tx')
if wrist_fk_ctrl and mc.objExists(wrist_fk_ctrl):
utils.connect_abs(wrist_fk_mdl+'.output', wrist_fk_ctrl+'_ZERO.tx')
# These arethe final fk stretch outputs to connect to joints
fk_stretch_final_output = [lo_arm_fk_mdl+'.output', wrist_fk_mdl+'.output']
# NOW creates node s for thew elbow pin
lo_arm_pin_mdl = mc.createNode('multDoubleLinear')
wrist_pin_mdl = mc.createNode('multDoubleLinear')
mc.setAttr(lo_arm_pin_mdl+'.input1', 1)
mc.setAttr(wrist_pin_mdl+'.input1', 1)
if lo_init_length < 0.0:
mc.setAttr(lo_arm_pin_mdl+'.input1', -1)
if wrist_init_length < 0.0:
mc.setAttr(wrist_pin_mdl+'.input1', -1)
mc.connectAttr(root_to_pv_dist+'.localDistance', lo_arm_pin_mdl+'.input2')
mc.connectAttr(pv_to_end_dist+'.localDistance', wrist_pin_mdl+'.input2')
# These arethe final elbow pin stretch outputs to connect to joints
pin_final_output = [lo_arm_pin_mdl+'.output', wrist_pin_mdl+'.output']
# create shift nodes
mc.addAttr(lo_arm_ik_jnt, ln='shiftLength', k=1)
mc.addAttr(wrist_ik_jnt, ln='shiftLength', k=1)
tt = 'linear'
mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=0, v=lo_init_length, itt=tt, ott=tt)
mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=1, v=0, itt=tt, ott=tt)
mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=-1, v=max_init_length, itt=tt, ott=tt)
mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=0, v=wrist_init_length, itt=tt, ott=tt)
mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=1, v=max_init_length, itt=tt, ott=tt)
mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=-1, v=0, itt=tt, ott=tt)
shift_final_output = [ lo_arm_ik_jnt+'.shiftLength', wrist_ik_jnt+'.shiftLength']
# Create ik indivisual stretch nodes
lo_arm_ik_scale_mdl = mc.createNode('multDoubleLinear')
wrist_ik_scale_mdl = mc.createNode('multDoubleLinear')
mc.connectAttr(shift_final_output[0], lo_arm_ik_scale_mdl+'.input1')
mc.connectAttr(shift_final_output[1], wrist_ik_scale_mdl+'.input1')
mc.connectAttr(ik_ctrl+'.upStretch', lo_arm_ik_scale_mdl+'.input2')
mc.connectAttr(ik_ctrl+'.loStretch', wrist_ik_scale_mdl+'.input2')
# This is the final output for scale and shift
ik_stretch_final_output = [lo_arm_ik_scale_mdl+'.output', wrist_ik_scale_mdl+'.output']
# Now create the IK auto stretch nodes
lo_auto_stretch_mdl = mc.createNode('multDoubleLinear')
wrist_auto_stretch_mdl = mc.createNode('multDoubleLinear')
auto_stretch_clamp = mc.createNode('clamp')
mc.setAttr(auto_stretch_clamp+'.minR', 1)
mc.setAttr(auto_stretch_clamp+'.maxR', 10000000)
mc.connectAttr(ik_stretch_final_output[0], lo_auto_stretch_mdl+'.input1', f=1)
mc.connectAttr(ik_stretch_final_output[1], wrist_auto_stretch_mdl+'.input1', f=1)
mc.connectAttr(root_to_end_dist+'.stretchFactor', auto_stretch_clamp+'.inputR')
mc.connectAttr(auto_stretch_clamp+'.outputR', lo_auto_stretch_mdl+'.input2', f=1)
mc.connectAttr(auto_stretch_clamp+'.outputR', wrist_auto_stretch_mdl+'.input2', f=1)
adl = mc.createNode('addDoubleLinear')
mc.connectAttr(lo_arm_ik_scale_mdl+'.output', adl+'.input1')
mc.connectAttr(wrist_ik_scale_mdl+'.output', adl+'.input2')
utils.connect_abs(adl+'.output', root_to_end_dist+'.jointChainLength')
# handle soft ik handle constraint override
pc = mc.pointConstraint(ik_last_node, ik_handle)[0]
if mc.objExists(up_arm_ik_jnt+'.softIkChainLength'):
# compensate feed in new chain length for soft ik chain length
utils.connect_abs(adl+'.output', up_arm_ik_jnt+'.softIkChainLength')
# blend off the soft ik constraint IF im in auto s tretch or pin mode
mdl = mc.createNode('multDoubleLinear')
utils.connect_reverse(ik_ctrl+'.'+pin_attr_name, mdl+'.input1')
utils.connect_reverse(ik_ctrl+'.autoStretch', mdl+'.input2')
mc.connectAttr(mdl+'.output', pc+'.w0')
utils.connect_reverse(pc+'.w0', pc+'.w1')
ik_auto_stretch_final_output = [lo_auto_stretch_mdl+'.output', wrist_auto_stretch_mdl+'.output']
# now create all my blends
# first blend btween FK and an empty ik input
# (this ikl input will take another blend node for blending oall the IK options )
fk_to_ik_blend = mc.createNode('blendColors')
mc.connectAttr(switch_ctrl+'.IK', fk_to_ik_blend+'.blender')
mc.connectAttr(fk_stretch_final_output[0], fk_to_ik_blend+'.color2R')
mc.connectAttr(fk_stretch_final_output[1], fk_to_ik_blend+'.color2G')
# now create a blender between pin elbow and the rest of the ik options
auto_ik_blend = mc.createNode('blendColors')
mc.connectAttr(ik_ctrl+'.autoStretch', auto_ik_blend+'.blender')
mc.connectAttr(ik_auto_stretch_final_output[0], auto_ik_blend+'.color1R')
mc.connectAttr(ik_auto_stretch_final_output[1], auto_ik_blend+'.color1G')
# Now connect it toth fk blend
mc.connectAttr(auto_ik_blend+'.outputR', fk_to_ik_blend+'.color1R')
mc.connectAttr(auto_ik_blend+'.outputG', fk_to_ik_blend+'.color1G')
# now create a blender between pin elbow and the rest of the ik options
pin_ik_blend = mc.createNode('blendColors')
mc.connectAttr(ik_ctrl+'.'+pin_attr_name, pin_ik_blend+'.blender')
mc.connectAttr(pin_final_output[0], pin_ik_blend+'.color1R')
mc.connectAttr(pin_final_output[1], pin_ik_blend+'.color1G')
# Now connect it toth fk blend
mc.connectAttr(pin_ik_blend+'.outputR', auto_ik_blend+'.color2R')
mc.connectAttr(pin_ik_blend+'.outputG', auto_ik_blend+'.color2G')
# now connect the shift and scale
mc.connectAttr(ik_stretch_final_output[0], pin_ik_blend+'.color2R')
mc.connectAttr(ik_stretch_final_output[1], pin_ik_blend+'.color2G')
# now for the magic! Connect the blend networll to joints
mc.connectAttr(fk_to_ik_blend+'.outputR', lo_arm_ik_jnt+'.tx')
mc.connectAttr(fk_to_ik_blend+'.outputG', wrist_ik_jnt+'.tx') | [
"def main_str_transform_setup(self, stretch_bone, chain_length):\n\n\t\tcum_length = self.org_chain[0].length\n\t\tfor i, main_str_bone in enumerate(self.main_str_bones):\n\t\t\tif i == 0: continue\n\t\t\tif i == len(self.main_str_bones)-1: continue\n\t\t\tmain_str_helper = self.bone_infos.bone(\n\t\t\t\tname\t\t = main_str_bone.name.replace(\"STR-\", \"STR-S-\")\n\t\t\t\t,source\t\t = main_str_bone\n\t\t\t\t,bbone_width = 1/10\n\t\t\t\t,bone_group\t = self.bone_groups[\"IK Mechanism\"]\n\t\t\t\t,layers\t\t = self.bone_layers[\"IK Mechanism\"]\n\t\t\t\t,parent\t\t = main_str_bone.parent\n\t\t\t\t,hide_select = self.mch_disable_select\n\t\t\t)\n\t\t\tmain_str_bone.parent = main_str_helper\n\n\t\t\tcon_name = 'CopyLoc_IK_Stretch'\n\t\t\tmain_str_helper.add_constraint(self.obj, 'COPY_LOCATION'\n\t\t\t\t,true_defaults\t= True\n\t\t\t\t,target\t\t\t= self.obj\n\t\t\t\t,subtarget\t\t= stretch_bone.name\n\t\t\t\t,name\t\t\t= con_name\n\t\t\t\t,head_tail\t\t= cum_length/chain_length\t# How far this bone is along the total chain length\n\t\t\t)\n\t\t\tcum_length += self.org_chain[i].length\n\n\t\t\tstretchy_drv = Driver()\t\t# Influence driver\n\t\t\tstretchy_drv.expression = f\"ik * stretch * (distance > {chain_length} * scale)\"\n\t\t\tvar_stretch = stretchy_drv.make_var(\"stretch\")\n\t\t\tvar_stretch.type = 'SINGLE_PROP'\n\t\t\tvar_stretch.targets[0].id_type = 'OBJECT'\n\t\t\tvar_stretch.targets[0].id = self.obj\n\t\t\tvar_stretch.targets[0].data_path = f'pose.bones[\"{self.prop_bone.name}\"][\"{self.ik_stretch_name}\"]'\n\n\t\t\tvar_ik = stretchy_drv.make_var(\"ik\")\n\t\t\tvar_ik.type = 'SINGLE_PROP'\n\t\t\tvar_ik.targets[0].id_type = 'OBJECT'\n\t\t\tvar_ik.targets[0].id = self.obj\n\t\t\tvar_ik.targets[0].data_path = f'pose.bones[\"{self.prop_bone.name}\"][\"{self.ikfk_name}\"]'\n\n\t\t\tvar_dist = stretchy_drv.make_var(\"distance\")\n\t\t\tvar_dist.type = 'LOC_DIFF'\n\t\t\tvar_dist.targets[0].id = self.obj\n\t\t\tvar_dist.targets[0].bone_target = self.ik_tgt_bone.name\n\t\t\tvar_dist.targets[0].transform_space = 'WORLD_SPACE'\n\t\t\tvar_dist.targets[1].id = self.obj\n\t\t\tvar_dist.targets[1].bone_target = self.ik_chain[0].name\n\t\t\tvar_dist.targets[1].transform_space = 'WORLD_SPACE'\n\n\t\t\tvar_scale = stretchy_drv.make_var(\"scale\")\n\t\t\tvar_scale.type = 'TRANSFORMS'\n\t\t\tvar_scale.targets[0].id = self.obj\n\t\t\tvar_scale.targets[0].transform_type = 'SCALE_Y'\n\t\t\tvar_scale.targets[0].transform_space = 'WORLD_SPACE'\n\t\t\tvar_scale.targets[0].bone_target = self.ik_chain[0].name\n\n\t\t\tdata_path = f'constraints[\"{con_name}\"].influence'\n\n\t\t\tmain_str_helper.drivers[data_path] = stretchy_drv",
"def testStretchModeBondLength(self):\n\n # Measure the distance\n ac = self.m[0]\n # The 0, 0, z atom\n atop = self.m[1]\n # The 0, 0, -z atom\n abot = self.m[6]\n d0 = GetBondLength(atop, abot)\n dc0 = GetBondLength(ac, atop)\n\n # Now create a stretch mode with just these two\n sm = StretchModeBondLength(atop, abot, None)\n sm.AddAtom(abot)\n\n self.assertEquals(sm.mpAtom0.GetName(), atop.GetName())\n self.assertEquals(sm.mpAtom1.GetName(), abot.GetName())\n\n # Stretch the bond by 5%\n delta = 0.05 * d0\n sm.Stretch(delta)\n\n # Make sure this does what we expected\n d1 = GetBondLength(atop, abot)\n self.assertAlmostEquals(d0+delta, d1, 6)\n\n # Note that only the second atom has moved\n dc1 = GetBondLength(ac, atop)\n self.assertAlmostEquals(dc0, dc1)\n\n return",
"def _calcStretchFactors(self):\n self.stretchX = math.log10(self.x2 - self.x1 + 1.0)\n self.stretchY = math.log10(self.y2 - self.y1 + 1.0)",
"def make_blinds(self):\n \n def create_miter(Vertex1, Vertex2, Vertex3):\n miter = Part.makePolygon([ Vertex1,Vertex2,Vertex3,Vertex1]) \n miter = Part.Face(miter)\n miter = miter.extrude(Base.Vector(0,0,self.thickness))\n \n return miter\n \n \n # left side\n blind_front = Part.makeBox(self.left_width, self.blind_depth, self.thickness)\n blind_front = blind_front.common(self.parts_left['top'])\n \n V1 = blind_front.Vertex2.Point\n V2 = blind_front.Vertex6.Point\n V3 = V2 + Base.Vector(-self.blind_depth,0,0)\n \n \n blind_front = blind_front.cut(create_miter(V1,V2,V3))\n \n self.parts_left['blind front'] = blind_front\n \n blind_side = Part.makeBox(self.blind_depth, self.depth, self.thickness)\n blind_side.Placement.Base = Base.Vector(self.left_width-self.blind_depth, 0,0)\n blind_side = blind_side.cut(blind_front)\n \n self.parts_left['blind side'] = blind_side\n \n # right side\n blind_front = Part.makeBox(self.right_width, self.blind_depth, self.thickness)\n self.parts_right['blind front'] = blind_front\n \n V1 = blind_front.Vertex2.Point\n V2 = blind_front.Vertex4.Point\n V3 = V2 + Base.Vector(self.blind_depth,0,0)\n \n blind_front = blind_front.cut(create_miter(V1,V2,V3))\n self.parts_right['blind front'] = blind_front\n \n blind_side = Part.makeBox(self.blind_depth, self.depth, self.thickness)\n blind_side = blind_side.cut(blind_front)\n \n self.parts_right['blind side'] = blind_side",
"def gripStretchQgsLineStringGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n obj = qad_utils.whatGeomIs(0, geom)\n if (type(obj) != list and type(obj) != tuple):\n objType = obj.whatIs()\n if objType == \"CIRCLE\": # se é cerchio\n newCircle = gripStretchCircle(obj, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n if newCircle is not None:\n return QgsGeometry.fromPolyline(newCircle.asPolyline(tolerance2ApproxCurve))\n elif objType == \"ARC\": # se é arco\n newArc = gripStretchArc(obj, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n if newArc is not None:\n return QgsGeometry.fromPolyline(newArc.asPolyline(tolerance2ApproxCurve))\n return None\n \n linearObjectListToStretch = qad_utils.QadLinearObjectList()\n linearObjectListToStretch.fromPolyline(geom.asPolyline())\n \n atPart = 0\n while atPart < linearObjectListToStretch.qty():\n linearObject = linearObjectListToStretch.getLinearObjectAt(atPart) \n if linearObject.isSegment():\n pt = linearObject.getStartPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto iniziale \n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setStartPt(pt)\n \n pt = linearObject.getEndPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto finale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setEndPt(pt)\n else: # se è arco\n newArc, newInverseFlag = gripStretchArc(linearObject.getArc(), ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve, linearObject.isInverseArc())\n if newArc is None:\n return None\n linearObject.setArc(newArc, newInverseFlag)\n\n atPart = atPart + 1\n \n pt = linearObjectListToStretch.getCentroid(tolerance2ApproxCurve) # verifico se polilinea ha un centroide\n if pt is not None:\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n linearObjectListToStretch.move(offSetX, offSetY)\n \n pts = linearObjectListToStretch.asPolyline(tolerance2ApproxCurve)\n stretchedGeom = QgsGeometry.fromPolyline(pts) \n \n return stretchedGeom",
"def stretch_audio(x1, x2, sr, path, hop_length):\n from alignmenttools import refine_warping_path\n print(\"Stretching...\")\n path_final = [(row[0], row[1]) for row in path if row[0] < x1.size and row[1] < x2.size]\n path_final.append((x1.size, x2.size))\n path_final = hop_length*np.array(path_final, dtype=int)\n x3 = np.zeros((x2.size, 2))\n x3[:, 1] = x2\n x1_stretch = timemap_stretch(x1, sr, path_final)\n print(\"x1.shape = \", x1.shape)\n print(\"x2.shape = \", x2.shape)\n print(\"x1_stretch.shape = \", x1_stretch.shape)\n x1_stretch = x1_stretch[0:min(x1_stretch.size, x3.shape[0])]\n x3 = x3[0:min(x3.shape[0], x1_stretch.size), :]\n x3[:, 0] = x1_stretch\n return x3",
"def rubber(widget):\n widget.layout().addStretch(100)",
"def msvfw32_StretchDIB(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"biDst\", \"lpvDst\", \"DstX\", \"DstY\", \"DstXE\", \"DstYE\", \"biSrc\", \"lpvSrc\", \"SrcX\", \"SrcY\", \"SrcXE\", \"SrcYE\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def route_bitlines(self):\n # adds the BL on metal 2\n offset = vector(self.bitcell.get_pin(self.bitcell_bl).cx(),0) - vector(0.5 * self.m2_width,0)\n self.add_layout_pin(text=\"bl\",\n layer=\"metal2\",\n offset=offset,\n width=drc['minwidth_metal2'],\n height=self.height)\n\n # adds the BR on metal 2\n offset = vector(self.bitcell.get_pin(self.bitcell_br).cx(),0) - vector(0.5 * self.m2_width,0)\n self.add_layout_pin(text=\"br\",\n layer=\"metal2\",\n offset=offset,\n width=drc['minwidth_metal2'],\n height=self.height)",
"def __init__(self, workplane, measures):\n\n cq.Workplane.bracket = utilities.bracket\n cq.Workplane.transformedWorkplane = utilities.transformedWorkplane\n cq.Workplane.bolt = utilities.bolt\n cq.Workplane.cutEachAdaptive = utilities.cutEachAdaptive\n\n self.model = workplane\n self.debug = False\n self.measures = measures\n m = self.measures\n\n # The bracket lengths are measured at the outside, but the construction actually uses a \n # central cuboid block with two attached brackets. Adapting the measures accordingly.\n m.center_block = Measures(\n # Naming is as seen from the horizontal leg.\n width = max(m.horizontal_leg.width, m.vertical_leg.width),\n depth = m.vertical_leg.height,\n height = m.horizontal_leg.height\n )\n m.horizontal_leg.depth -= m.center_block.depth\n m.vertical_leg.depth -= m.center_block.height\n\n # Create hole specs which combine the other hole measures in the format expected by bolthole().\n m.horizontal_leg.hole_specs = [\n {\n \"diameter\": m.horizontal_leg.hole_diameters[i] if isinstance(m.horizontal_leg.hole_diameters, list) else m.horizontal_leg.hole_diameters,\n \"clamp_length\": m.horizontal_leg.clamp_lengths[i] if isinstance(m.horizontal_leg.clamp_lengths, list) else m.horizontal_leg.clamp_lengths, \n \"nuthole_size\": m.horizontal_leg.nuthole_sizes[i] if isinstance(m.horizontal_leg.nuthole_sizes, list) else m.horizontal_leg.nuthole_sizes, \n \"nuthole_depth\": 1.1 * m.vertical_leg.depth # Just choose something large enough for cutting. \n }\n for i in range(m.horizontal_leg.hole_count)\n ]\n m.vertical_leg.hole_specs = [\n {\n \"diameter\": m.vertical_leg.hole_diameters[i] if isinstance(m.vertical_leg.hole_diameters, list) else m.vertical_leg.hole_diameters,\n \"clamp_length\": m.vertical_leg.clamp_lengths[i] if isinstance(m.vertical_leg.clamp_lengths, list) else m.vertical_leg.clamp_lengths, \n \"nuthole_size\": m.vertical_leg.nuthole_sizes[i] if isinstance(m.vertical_leg.nuthole_sizes, list) else m.vertical_leg.nuthole_sizes, \n \"nuthole_depth\": 1.1 * m.horizontal_leg.depth # Just choose something large enough for cutting. \n }\n for i in range(m.vertical_leg.hole_count)\n ]\n\n # TODO: Initialize missing measures with defaults.\n\n self.build()",
"def vizualize_stretch_and_squash(self, chain, where=None):\n\n # if where is not None:\n # parent = where\n # else:\n # parent = chain[0].parent()\n #\n # viz_subnet = parent.createNode(\"subnet\")\n # viz_subnet.setName(\"subnet_viz_chain\")\n # viz_subnet.parm(\"tdisplay\").set(1)\n # viz_subnet.parm(\"display\").set(0)\n # vizer_list = []\n # for i in chain:\n # fetcher = viz_subnet.createNode(\"fetch\", \"fetch_\"+i.name())\n # fetcher.setColor(hou.Color((1.0, 0.4, 0.0)))\n # fetcher.parm(\"useinputoffetched\").set(1)\n # fetcher.parm(\"fetchsubnet\").set(1)\n # fetcher.parm(\"fetchobjpath\").set(fetcher.relativePathTo(i))\n # fetcher.setDisplayFlag(0)\n #\n # vizer = viz_subnet.createNode(\"geo\", \"visualize_\"+i.name())\n # vizer.node(\"file1\").destroy()\n # vizer.createNode(\"sphere\")\n #\n # vizer.parm(\"scale\").set(i.parm(\"crscalex\") )\n # vizer.parm(\"sx\").set(0.025)\n # vizer.parm(\"sy\").set(0.025)\n # vizer.parm(\"sz\").set(0.025)\n #\n # vizer.setInput(0, fetcher)\n # vizer_list.append(vizer)\n # viz_subnet.layoutChildren()\n #\n # return vizer_list",
"def gripStretchQgsGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n wkbType = geom.wkbType()\n if wkbType == QGis.WKBPoint or wkbType == QGis.WKBPoint25D:\n pt = stretchPoint(geom.asPoint(), ptListToStretch, offSetX, offSetY)\n if pt is not None:\n return QgsGeometry.fromPoint(pt)\n \n if wkbType == QGis.WKBMultiPoint:\n stretchedGeom = QgsGeometry(geom)\n points = stretchedGeom.asMultiPoint() # vettore di punti\n atSubGeom = 0\n for pt in points:\n subGeom = QgsGeometry.fromPoint(pt)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n\n if wkbType == QGis.WKBLineString:\n return gripStretchQgsLineStringGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n \n if wkbType == QGis.WKBMultiLineString:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asMultiPolyline() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBPolygon:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asPolygon() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBMultiPolygon:\n stretchedGeom = QgsGeometry(geom)\n polygons = geom.asMultiPolygon() # vettore di poligoni\n atSubGeom = 0\n for polygon in polygons:\n subGeom = QgsGeometry.fromPolygon(polygon)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n return None",
"def sizing(self, thrust, rho, Rmax, wing_groups, clearance, bfus):\n\n# case 1: calculate radius from disk loading\n if self.set_DL:\n self.area = thrust/self.diskloading \n self.radius = sqrt(self.area/pi)\n\n# case 2: radius given directly\n elif self.set_radius:\n R = self.radius\n\n# case 3: span-driven sizing\n# loop over all wings this rotor appears on, find the multiplier\n# for rotor radius that identifies span required\n# (span - fuselage)/2 = available length along which rotors can be placed\n# this value is equal to multiplier * radius, hence find radius\n elif self.span_driven:\n wgids = self.wing_group_ids\n size = 1.0 + clearance*0.5 # clearance for rotor on each side\n Rmin = 1.0e9\n for wgid in wgids:\n group = wing_groups[wgid]\n nr = group.nrotors/group.nwings\n\n multiplier = size*float(nr) - 1.0\n radius = (group.span - bfus)*0.5/multiplier\n Rmin = min(radius, Rmin)\n #print('group',wgid,'SPAN = ',group.span,multiplier,radius,Rmin)\n #print('rotor radius from span driven sizing is ',Rmin)\n #x1=input('?')\n self.radius = Rmin \n\n# error message\n else:\n quit('CRITICAL ERROR: EITHER SET RADIUS OR DL, or enable span-driven rotor sizing')\n\n# cap max rotor size\n if(self.radius > Rmax):\n self.radius = Rmax \n print('capping rotor radius to max available')\n\n R = self.radius\n# print('rotor radius after sizing is ',R)\n\n#set diameter, area and disk loading\n self.diameter = 2.0*self.radius\n A = pi * self.radius * self.radius \n self.area = A\n self.diskloading = thrust/A\n\n# rotor blade loading or tip speed in hover \n if self.set_Vtip:\n CT = thrust/(rho*A*self.tipspeed*self.tipspeed) \n else:\n quit('CRITICAL ERROR: need to know tip speed for sizing')\n\n# need blade loading or solidity\n if self.set_BL:\n self.solidity = CT/self.ctsigma \n elif self.set_sigma:\n self.ctsigma = CT/self.solidity\n else:\n quit('SET EITHER BLADE LOADING or ROTOR SOLIDITY')\n\n# Main rotor chord, SI (m)\n self.aspectratio = self.nblade/(pi*self.solidity)\n self.chord = self.radius / self.aspectratio\n self.ctsigma = CT/self.solidity\n\n# 15% interference penalty for induced power of coaxial\n if self.type == 'coaxial':\n self.kint = 1.16 \n\n return None",
"def gripStretchQgsLinearObjectList(linearObjectList, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n linearObjectListToStretch = qad_utils.QadLinearObjectList(linearObjectList)\n \n atPart = 0\n while atPart < linearObjectListToStretch.qty():\n linearObject = linearObjectListToStretch.getLinearObjectAt(atPart) \n if linearObject.isSegment():\n pt = linearObject.getStartPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto iniziale \n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setStartPt(pt)\n \n pt = linearObject.getEndPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto finale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setEndPt(pt)\n else: # se è arco\n newArc, newInverseFlag = gripStretchArc(linearObject.getArc(), ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve, linearObject.isInverseArc())\n if newArc is None:\n return None\n linearObject.setArc(newArc, newInverseFlag)\n\n atPart = atPart + 1\n \n pt = linearObjectListToStretch.getCentroid(tolerance2ApproxCurve) # verifico se polilinea ha un centroide\n if pt is not None:\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n linearObjectListToStretch.move(offSetX, offSetY)\n\n return linearObjectListToStretch",
"def setup_resistant_layer(self, \n thickness,\n block_height,\n nrocks,\n dip,\n release_relief_threshold,\n starting_point,\n initial_elevation,\n top_of_hard_layer,\n bottom_of_hard_layer,\n x_coordinate_array,\n bedrock_elevation, \n node_spacing, \n core_nodes,\n channel_nodes,\n dx):\n #setup hillslope vs. channel nodes\n self.hillslope_nodes = np.copy(core_nodes)\n self.blocks_to_channel_size = np.array([], dtype = np.float64)\n self.blocks_to_channel_location = np.array([], dtype = np.int64)\n for fish in range(len(self.hillslope_nodes)):\n if self.hillslope_nodes[fish] in channel_nodes:\n self.hillslope_nodes[fish] = -9999\n hillslope_nodes_delete = np.where(self.hillslope_nodes == -9999)[0]\n self.hillslope_nodes = np.delete(self.hillslope_nodes, hillslope_nodes_delete)\n \n \n \n #nrocks = int((np.power(node_spacing, 2) * block_height) / (block_height * np.power(block_width, 2)))\n slope_threshold = release_relief_threshold / node_spacing\n \n #convert slope to radians\n slope_rad = np.radians(dip)\n \n block_width = dx\n \n #define resistant layer location\n thickness = thickness * block_height * np.cos(slope_rad) * block_width\n #starting_point = ncols*dx/2 #x values where layer begins\n intercept = initial_elevation - (np.tan(slope_rad) * starting_point)\n top_of_hard_layer[:] = np.tan(slope_rad) * x_coordinate_array + intercept\n bottom_of_hard_layer[:] = top_of_hard_layer[:] - \\\n (thickness / np.cos(slope_rad))\n #hard_layer = np.where((bedrock_elevation <= top_of_hard_layer) & \\\n # (bedrock_elevation >= bottom_of_hard_layer))[0]\n\n return (slope_threshold, slope_rad) #hard_layer",
"def build_rig(self):\n\n # create rig part top nodes\n self.create_part_master()\n\n # Get all the relevant part info\n prefix = self.prefix\n options = self.options\n anim_ctrls = self.anim_ctrls\n bind_jnts = self.bind_joints\n hooks = self.hooks\n ctrl_grps = self.ctrl_grps\n jnt_grps = self.jnt_grps\n\n mirror = self.mirror_value\n\n parent = options.get('parent')\n squash_stretch = options.get('squashStretch')\n aimDownBone = options.get('aimDownBone')\n single_joint = options.get('singleJoint')\n number_joints = options.get('numberJoints')\n pickWalk_parent = options.get('pickWalkParent')\n\n # Create ctrls\n zeros, ctrls, offsets, last_nodes = [], [], [], []\n\n for i, ctrl_name in enumerate(anim_ctrls):\n zero, ctrl, offCtrls, last_node = self.anim_ctrl(ctrl_name)\n zeros.append(zero)\n ctrls.append(ctrl)\n offsets.append(offCtrls)\n last_nodes.append(last_node)\n\n #Setup pickwaliking attributes for the fingers\n i = 0\n ctrls.reverse()\n for ctrl in ctrls:\n\n if i+1 < len(ctrls):\n\n pickWalk.attribute_tag(ctrls[i],ctrls[i+1])\n else:\n pickWalk.attribute_tag(ctrls[i],pickWalk_parent)\n break\n\n i+=1\n ctrls.reverse()\n\n if len(ctrls) > 1:\n for i in range(1, len(ctrls), 1):\n mc.parent(zeros[i], last_nodes[i-1])\n\n # constraint jnts\n if len(bind_jnts) > 2:\n\n # point and aim/orient contraint all joints down the chain based on the\n for i in range(len(last_nodes)-1):\n mc.pointConstraint(last_nodes[i], bind_jnts[i], mo=1, n=bind_jnts[i]+'_pc')\n if not squash_stretch:\n mc.scaleConstraint(last_nodes[i], bind_jnts[i], mo=1, n=bind_jnts[i]+'_sc')\n\n if i < len(last_nodes)-1:\n print aimDownBone\n if aimDownBone:\n mc.aimConstraint(last_nodes[i+1],\n bind_jnts[i],\n aim=[mirror,0,0],\n u=[0,1,0],\n wu=[0,1,0],\n wut='objectRotation',\n wuo=last_nodes[i],\n mo=1, n=bind_jnts[i]+'_ac')\n if aimDownBone == False:\n mc.orientConstraint(last_nodes[i],bind_jnts[i],n=bind_jnts[i]+'_oc')\n\n #parent constrain the last joint ot the last ctrl\n # mc.parentConstraint(last_nodes[-1], bind_jnts[-2], mo=1, n=bind_jnts[-2]+'_prc')\n # mc.parentConstraint(last_nodes[-1], bind_jnts[-1], mo=1, n=bind_jnts[-1]+'_prc')\n\n # if not squash_stretch:\n # mc.scaleConstraint(last_nodes[-1], bind_jnts[-2], mo=1, n=bind_jnts[-2]+'_sc')\n # mc.scaleConstraint(last_nodes[-1], bind_jnts[-1], mo=1, n=bind_jnts[-1]+'_sc')\n\n elif single_joint or number_joints == 1:\n mc.parentConstraint(last_nodes[0], bind_jnts[0], mo=1, n=bind_jnts[0]+'_prc')\n mc.scaleConstraint(last_nodes[0], bind_jnts[0], mo=1, n=bind_jnts[0]+'_sc')\n\n else:\n if squash_stretch:\n spline.preserve_volume(ctrls, bind_jnts[:-1], ctrls[0], attrs=['sy','sz'])\n\n mc.parentConstraint(bind_jnts[-2], bind_jnts[-1], mo=1, n=bind_jnts[-1]+'_prc')\n mc.scaleConstraint(bind_jnts[-2], bind_jnts[-1], mo=1, n=bind_jnts[-1]+'_sc')\n\n mc.parent(zeros[0], ctrl_grps[0])\n mc.parent(bind_jnts, jnt_grps[0])\n\n if not single_joint and number_joints == 1:\n mc.parent(bind_jnts[-1], bind_jnts[0])\n\n #utils.create_cfx_curves(self.bind_joints, self.prefix+'_'+self.part_type)\n\n if len(ctrls) > 1:\n spaces.tag(ctrls, arg='partParent:'+self.options.get('parent'))\n else:\n spaces.tag(ctrls)\n\n self.finalize_part()",
"def gripStretchQadGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n if type(geom) == list: # entità composta da più geometrie\n res = []\n for subGeom in geom:\n res.append(gripStretchQadGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve))\n return res\n else:\n if type(geom) == QgsPoint:\n return stretchPoint(geom, ptListToStretch, offSetX, offSetY)\n elif geom.whatIs() == \"ARC\":\n return gripStretchArc(geom, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve) \n elif geom.whatIs() == \"CIRCLE\":\n return gripStretchCircle(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n elif geom.whatIs() == \"LINEAROBJS\":\n return gripStretchQgsLinearObjectList(geom, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n\n return None",
"def setup(self):\n # Establish super block definition\n self.nxsup = min(self.nx, self.MAXSB[0])\n self.nysup = min(self.ny, self.MAXSB[1])\n self.nzsup = min(self.nz, self.MAXSB[2])\n\n self.xsizsup = self.nx * self.xsiz / self.nxsup\n self.ysizsup = self.ny * self.ysiz / self.nysup\n self.zsizsup = self.nz * self.zsiz / self.nzsup\n\n self.xmnsup = (self.xmn - 0.5 * self.xsiz) + 0.5 * self.xsizsup\n self.ymnsup = (self.ymn - 0.5 * self.ysiz) + 0.5 * self.ysizsup\n self.zmnsup = (self.zmn - 0.5 * self.zsiz) + 0.5 * self.zsizsup\n\n # partition data into each super block\n x_block = np.arange(self.xmnsup - 0.5 * self.xsizsup,\n self.xmnsup + (self.nxsup + 1) * self.xsizsup + 1,\n self.xsizsup)\n x_index = np.searchsorted(x_block, self.vr['x']) - 1\n\n y_block = np.arange(self.ymnsup - 0.5 * self.ysizsup,\n self.ymnsup + (self.nysup + 1) * self.ysizsup + 1,\n self.ysizsup)\n y_index = np.searchsorted(y_block, self.vr['y']) - 1\n\n z_block = np.arange(self.zmnsup - 0.5 * self.zsizsup,\n self.zmnsup + (self.nzsup + 1) * self.zsizsup + 1,\n self.zsizsup)\n z_index = np.searchsorted(z_block, self.vr['z']) - 1\n\n # self.super_block = np.full((self.nxsup, self.nysup, self.nzsup), [])\n temp = np.zeros_like(self.vr['x'])\n self.nisb = np.zeros((self.nxsup*self.nysup*self.nzsup,))\n for idx, (ix, iy, iz) in enumerate(zip(x_index, y_index, z_index)):\n ii = super_flat_index(ix, iy, iz, self.nxsup, self.nysup)\n temp[idx] = ii\n self.nisb[ii] += 1\n\n # sort data by asceding super block number:\n self.sort_index = np.argsort(temp)\n self.vr = self.vr[self.sort_index]\n # set up nisb\n self.nisb = np.cumsum(self.nisb, dtype=np.int)",
"def bndy_plasma(self):\n self.ne[0], self.ne[-1] = 1e11, 1e11\n self.ni[0], self.ni[-1] = 1e11, 1e11\n self.nn[0], self.nn[-1] = 1e11, 1e11\n self.Te[0], self.Te[-1] = 0.1, 0.1\n self.Ti[0], self.Ti[-1] = 0.01, 0.01\n # self.coll_em[0], self.coll_em[-1] = 1e5, 1e5\n # self.coll_im[0], self.coll_im[-1] = 1e5, 1e5"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create stratch point constraints on a chain of stretch joints. | def stretch_twist_jnts(start_jnt, end_jnt, twist_jnts):
div = 1.0 / (len(twist_jnts)+1)
for i, joint in enumerate(twist_jnts):
weight = div*(i+1)
mc.pointConstraint(start_jnt, joint, weight=1.0-weight)
mc.pointConstraint(end_jnt, joint, weight=weight) | [
"def main_str_transform_setup(self, stretch_bone, chain_length):\n\n\t\tcum_length = self.org_chain[0].length\n\t\tfor i, main_str_bone in enumerate(self.main_str_bones):\n\t\t\tif i == 0: continue\n\t\t\tif i == len(self.main_str_bones)-1: continue\n\t\t\tmain_str_helper = self.bone_infos.bone(\n\t\t\t\tname\t\t = main_str_bone.name.replace(\"STR-\", \"STR-S-\")\n\t\t\t\t,source\t\t = main_str_bone\n\t\t\t\t,bbone_width = 1/10\n\t\t\t\t,bone_group\t = self.bone_groups[\"IK Mechanism\"]\n\t\t\t\t,layers\t\t = self.bone_layers[\"IK Mechanism\"]\n\t\t\t\t,parent\t\t = main_str_bone.parent\n\t\t\t\t,hide_select = self.mch_disable_select\n\t\t\t)\n\t\t\tmain_str_bone.parent = main_str_helper\n\n\t\t\tcon_name = 'CopyLoc_IK_Stretch'\n\t\t\tmain_str_helper.add_constraint(self.obj, 'COPY_LOCATION'\n\t\t\t\t,true_defaults\t= True\n\t\t\t\t,target\t\t\t= self.obj\n\t\t\t\t,subtarget\t\t= stretch_bone.name\n\t\t\t\t,name\t\t\t= con_name\n\t\t\t\t,head_tail\t\t= cum_length/chain_length\t# How far this bone is along the total chain length\n\t\t\t)\n\t\t\tcum_length += self.org_chain[i].length\n\n\t\t\tstretchy_drv = Driver()\t\t# Influence driver\n\t\t\tstretchy_drv.expression = f\"ik * stretch * (distance > {chain_length} * scale)\"\n\t\t\tvar_stretch = stretchy_drv.make_var(\"stretch\")\n\t\t\tvar_stretch.type = 'SINGLE_PROP'\n\t\t\tvar_stretch.targets[0].id_type = 'OBJECT'\n\t\t\tvar_stretch.targets[0].id = self.obj\n\t\t\tvar_stretch.targets[0].data_path = f'pose.bones[\"{self.prop_bone.name}\"][\"{self.ik_stretch_name}\"]'\n\n\t\t\tvar_ik = stretchy_drv.make_var(\"ik\")\n\t\t\tvar_ik.type = 'SINGLE_PROP'\n\t\t\tvar_ik.targets[0].id_type = 'OBJECT'\n\t\t\tvar_ik.targets[0].id = self.obj\n\t\t\tvar_ik.targets[0].data_path = f'pose.bones[\"{self.prop_bone.name}\"][\"{self.ikfk_name}\"]'\n\n\t\t\tvar_dist = stretchy_drv.make_var(\"distance\")\n\t\t\tvar_dist.type = 'LOC_DIFF'\n\t\t\tvar_dist.targets[0].id = self.obj\n\t\t\tvar_dist.targets[0].bone_target = self.ik_tgt_bone.name\n\t\t\tvar_dist.targets[0].transform_space = 'WORLD_SPACE'\n\t\t\tvar_dist.targets[1].id = self.obj\n\t\t\tvar_dist.targets[1].bone_target = self.ik_chain[0].name\n\t\t\tvar_dist.targets[1].transform_space = 'WORLD_SPACE'\n\n\t\t\tvar_scale = stretchy_drv.make_var(\"scale\")\n\t\t\tvar_scale.type = 'TRANSFORMS'\n\t\t\tvar_scale.targets[0].id = self.obj\n\t\t\tvar_scale.targets[0].transform_type = 'SCALE_Y'\n\t\t\tvar_scale.targets[0].transform_space = 'WORLD_SPACE'\n\t\t\tvar_scale.targets[0].bone_target = self.ik_chain[0].name\n\n\t\t\tdata_path = f'constraints[\"{con_name}\"].influence'\n\n\t\t\tmain_str_helper.drivers[data_path] = stretchy_drv",
"def gripStretchQgsLineStringGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n obj = qad_utils.whatGeomIs(0, geom)\n if (type(obj) != list and type(obj) != tuple):\n objType = obj.whatIs()\n if objType == \"CIRCLE\": # se é cerchio\n newCircle = gripStretchCircle(obj, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n if newCircle is not None:\n return QgsGeometry.fromPolyline(newCircle.asPolyline(tolerance2ApproxCurve))\n elif objType == \"ARC\": # se é arco\n newArc = gripStretchArc(obj, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n if newArc is not None:\n return QgsGeometry.fromPolyline(newArc.asPolyline(tolerance2ApproxCurve))\n return None\n \n linearObjectListToStretch = qad_utils.QadLinearObjectList()\n linearObjectListToStretch.fromPolyline(geom.asPolyline())\n \n atPart = 0\n while atPart < linearObjectListToStretch.qty():\n linearObject = linearObjectListToStretch.getLinearObjectAt(atPart) \n if linearObject.isSegment():\n pt = linearObject.getStartPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto iniziale \n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setStartPt(pt)\n \n pt = linearObject.getEndPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto finale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setEndPt(pt)\n else: # se è arco\n newArc, newInverseFlag = gripStretchArc(linearObject.getArc(), ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve, linearObject.isInverseArc())\n if newArc is None:\n return None\n linearObject.setArc(newArc, newInverseFlag)\n\n atPart = atPart + 1\n \n pt = linearObjectListToStretch.getCentroid(tolerance2ApproxCurve) # verifico se polilinea ha un centroide\n if pt is not None:\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n linearObjectListToStretch.move(offSetX, offSetY)\n \n pts = linearObjectListToStretch.asPolyline(tolerance2ApproxCurve)\n stretchedGeom = QgsGeometry.fromPolyline(pts) \n \n return stretchedGeom",
"def biped_stretch(ik_ctrl,\n ik_last_node,\n pv_ctrl,\n switch_ctrl,\n up_arm_fk_ctrl,\n lo_arm_fk_ctrl,\n wrist_fk_ctrl,\n up_arm_ik_jnt,\n lo_arm_ik_jnt,\n wrist_ik_jnt,\n ik_handle,\n pin_attr_name='pinElbow',\n shift_attr_name='shiftElbow'):\n\n # add all my attrs on ctrls\n mc.addAttr(ik_ctrl, ln=pin_attr_name, at='double', min=0, max=1, k=1)\n mc.addAttr(ik_ctrl, ln=shift_attr_name, at='double', min=-1, max=1, k=1)\n\n mc.addAttr(ik_ctrl, ln='autoStretch', at='double', min=0, max=1, k=1)\n mc.addAttr(ik_ctrl, ln='upStretch', at='double', dv=1, min=0.001, k=1)\n mc.addAttr(ik_ctrl, ln='loStretch', at='double', dv=1, min=0.001, k=1)\n\n mc.addAttr(up_arm_fk_ctrl, ln='stretch', at='double', dv=1, min=0.001, k=1)\n mc.addAttr(lo_arm_fk_ctrl, ln='stretch', at='double', dv=1, min=0.001, k=1)\n\n # store initial length of joint\n lo_init_length = mc.getAttr(lo_arm_ik_jnt+'.tx')\n wrist_init_length = mc.getAttr(wrist_ik_jnt+'.tx')\n max_init_length = mc.getAttr(lo_arm_ik_jnt+'.tx')+mc.getAttr(wrist_ik_jnt+'.tx')\n\n lo_abs_init_length = abs(mc.getAttr(lo_arm_ik_jnt+'.tx'))\n wrist_abs_length = abs(mc.getAttr(wrist_ik_jnt+'.tx'))\n\n # Get parents for ik handle and root of the parm\n arm_root_grp = utils.get_parent(up_arm_ik_jnt)\n\n # Create distance nodes between base, end, and pv ctrl to get the length of side of the triangle\n root_to_end_dist = utils.create_distance_reader(arm_root_grp, ik_last_node)\n root_to_pv_dist = utils.create_distance_reader(arm_root_grp, pv_ctrl)\n pv_to_end_dist = utils.create_distance_reader(pv_ctrl, ik_last_node)\n\n # easy stuff first - create fk stretch nodes\n lo_arm_fk_mdl = mc.createNode('multDoubleLinear')\n wrist_fk_mdl = mc.createNode('multDoubleLinear')\n\n mc.setAttr(lo_arm_fk_mdl+'.input1', mc.getAttr(lo_arm_ik_jnt+'.tx'))\n mc.setAttr(wrist_fk_mdl+'.input1', mc.getAttr(wrist_ik_jnt+'.tx'))\n mc.connectAttr(up_arm_fk_ctrl+'.stretch', lo_arm_fk_mdl+'.input2')\n mc.connectAttr(lo_arm_fk_ctrl+'.stretch', wrist_fk_mdl+'.input2')\n\n utils.connect_abs(lo_arm_fk_mdl+'.output', lo_arm_fk_ctrl+'_ZERO.tx')\n if wrist_fk_ctrl and mc.objExists(wrist_fk_ctrl):\n utils.connect_abs(wrist_fk_mdl+'.output', wrist_fk_ctrl+'_ZERO.tx')\n\n # These arethe final fk stretch outputs to connect to joints\n fk_stretch_final_output = [lo_arm_fk_mdl+'.output', wrist_fk_mdl+'.output']\n\n # NOW creates node s for thew elbow pin\n lo_arm_pin_mdl = mc.createNode('multDoubleLinear')\n wrist_pin_mdl = mc.createNode('multDoubleLinear')\n\n mc.setAttr(lo_arm_pin_mdl+'.input1', 1)\n mc.setAttr(wrist_pin_mdl+'.input1', 1)\n\n if lo_init_length < 0.0:\n mc.setAttr(lo_arm_pin_mdl+'.input1', -1)\n\n if wrist_init_length < 0.0:\n mc.setAttr(wrist_pin_mdl+'.input1', -1)\n\n mc.connectAttr(root_to_pv_dist+'.localDistance', lo_arm_pin_mdl+'.input2')\n mc.connectAttr(pv_to_end_dist+'.localDistance', wrist_pin_mdl+'.input2')\n\n # These arethe final elbow pin stretch outputs to connect to joints\n pin_final_output = [lo_arm_pin_mdl+'.output', wrist_pin_mdl+'.output']\n\n # create shift nodes\n mc.addAttr(lo_arm_ik_jnt, ln='shiftLength', k=1)\n mc.addAttr(wrist_ik_jnt, ln='shiftLength', k=1)\n\n tt = 'linear'\n mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=0, v=lo_init_length, itt=tt, ott=tt)\n mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=1, v=0, itt=tt, ott=tt)\n mc.setDrivenKeyframe(lo_arm_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=-1, v=max_init_length, itt=tt, ott=tt)\n\n mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=0, v=wrist_init_length, itt=tt, ott=tt)\n mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=1, v=max_init_length, itt=tt, ott=tt)\n mc.setDrivenKeyframe(wrist_ik_jnt+'.shiftLength', cd=ik_ctrl+'.'+shift_attr_name, dv=-1, v=0, itt=tt, ott=tt)\n\n shift_final_output = [ lo_arm_ik_jnt+'.shiftLength', wrist_ik_jnt+'.shiftLength']\n\n # Create ik indivisual stretch nodes\n lo_arm_ik_scale_mdl = mc.createNode('multDoubleLinear')\n wrist_ik_scale_mdl = mc.createNode('multDoubleLinear')\n\n mc.connectAttr(shift_final_output[0], lo_arm_ik_scale_mdl+'.input1')\n mc.connectAttr(shift_final_output[1], wrist_ik_scale_mdl+'.input1')\n mc.connectAttr(ik_ctrl+'.upStretch', lo_arm_ik_scale_mdl+'.input2')\n mc.connectAttr(ik_ctrl+'.loStretch', wrist_ik_scale_mdl+'.input2')\n\n # This is the final output for scale and shift\n ik_stretch_final_output = [lo_arm_ik_scale_mdl+'.output', wrist_ik_scale_mdl+'.output']\n\n # Now create the IK auto stretch nodes\n lo_auto_stretch_mdl = mc.createNode('multDoubleLinear')\n wrist_auto_stretch_mdl = mc.createNode('multDoubleLinear')\n\n auto_stretch_clamp = mc.createNode('clamp')\n mc.setAttr(auto_stretch_clamp+'.minR', 1)\n mc.setAttr(auto_stretch_clamp+'.maxR', 10000000)\n\n mc.connectAttr(ik_stretch_final_output[0], lo_auto_stretch_mdl+'.input1', f=1)\n mc.connectAttr(ik_stretch_final_output[1], wrist_auto_stretch_mdl+'.input1', f=1)\n mc.connectAttr(root_to_end_dist+'.stretchFactor', auto_stretch_clamp+'.inputR')\n\n mc.connectAttr(auto_stretch_clamp+'.outputR', lo_auto_stretch_mdl+'.input2', f=1)\n mc.connectAttr(auto_stretch_clamp+'.outputR', wrist_auto_stretch_mdl+'.input2', f=1)\n\n adl = mc.createNode('addDoubleLinear')\n mc.connectAttr(lo_arm_ik_scale_mdl+'.output', adl+'.input1')\n mc.connectAttr(wrist_ik_scale_mdl+'.output', adl+'.input2')\n utils.connect_abs(adl+'.output', root_to_end_dist+'.jointChainLength')\n\n # handle soft ik handle constraint override\n pc = mc.pointConstraint(ik_last_node, ik_handle)[0]\n if mc.objExists(up_arm_ik_jnt+'.softIkChainLength'):\n\n # compensate feed in new chain length for soft ik chain length\n utils.connect_abs(adl+'.output', up_arm_ik_jnt+'.softIkChainLength')\n\n # blend off the soft ik constraint IF im in auto s tretch or pin mode\n mdl = mc.createNode('multDoubleLinear')\n utils.connect_reverse(ik_ctrl+'.'+pin_attr_name, mdl+'.input1')\n utils.connect_reverse(ik_ctrl+'.autoStretch', mdl+'.input2')\n mc.connectAttr(mdl+'.output', pc+'.w0')\n utils.connect_reverse(pc+'.w0', pc+'.w1')\n\n ik_auto_stretch_final_output = [lo_auto_stretch_mdl+'.output', wrist_auto_stretch_mdl+'.output']\n\n # now create all my blends\n\n # first blend btween FK and an empty ik input\n # (this ikl input will take another blend node for blending oall the IK options )\n fk_to_ik_blend = mc.createNode('blendColors')\n\n mc.connectAttr(switch_ctrl+'.IK', fk_to_ik_blend+'.blender')\n mc.connectAttr(fk_stretch_final_output[0], fk_to_ik_blend+'.color2R')\n mc.connectAttr(fk_stretch_final_output[1], fk_to_ik_blend+'.color2G')\n\n # now create a blender between pin elbow and the rest of the ik options\n auto_ik_blend = mc.createNode('blendColors')\n\n mc.connectAttr(ik_ctrl+'.autoStretch', auto_ik_blend+'.blender')\n mc.connectAttr(ik_auto_stretch_final_output[0], auto_ik_blend+'.color1R')\n mc.connectAttr(ik_auto_stretch_final_output[1], auto_ik_blend+'.color1G')\n\n # Now connect it toth fk blend\n mc.connectAttr(auto_ik_blend+'.outputR', fk_to_ik_blend+'.color1R')\n mc.connectAttr(auto_ik_blend+'.outputG', fk_to_ik_blend+'.color1G')\n\n # now create a blender between pin elbow and the rest of the ik options\n pin_ik_blend = mc.createNode('blendColors')\n\n mc.connectAttr(ik_ctrl+'.'+pin_attr_name, pin_ik_blend+'.blender')\n mc.connectAttr(pin_final_output[0], pin_ik_blend+'.color1R')\n mc.connectAttr(pin_final_output[1], pin_ik_blend+'.color1G')\n\n # Now connect it toth fk blend\n mc.connectAttr(pin_ik_blend+'.outputR', auto_ik_blend+'.color2R')\n mc.connectAttr(pin_ik_blend+'.outputG', auto_ik_blend+'.color2G')\n\n # now connect the shift and scale\n mc.connectAttr(ik_stretch_final_output[0], pin_ik_blend+'.color2R')\n mc.connectAttr(ik_stretch_final_output[1], pin_ik_blend+'.color2G')\n\n # now for the magic! Connect the blend networll to joints\n mc.connectAttr(fk_to_ik_blend+'.outputR', lo_arm_ik_jnt+'.tx')\n mc.connectAttr(fk_to_ik_blend+'.outputG', wrist_ik_jnt+'.tx')",
"def gripStretchQgsGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n wkbType = geom.wkbType()\n if wkbType == QGis.WKBPoint or wkbType == QGis.WKBPoint25D:\n pt = stretchPoint(geom.asPoint(), ptListToStretch, offSetX, offSetY)\n if pt is not None:\n return QgsGeometry.fromPoint(pt)\n \n if wkbType == QGis.WKBMultiPoint:\n stretchedGeom = QgsGeometry(geom)\n points = stretchedGeom.asMultiPoint() # vettore di punti\n atSubGeom = 0\n for pt in points:\n subGeom = QgsGeometry.fromPoint(pt)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n\n if wkbType == QGis.WKBLineString:\n return gripStretchQgsLineStringGeometry(geom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n \n if wkbType == QGis.WKBMultiLineString:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asMultiPolyline() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBPolygon:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asPolygon() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBMultiPolygon:\n stretchedGeom = QgsGeometry(geom)\n polygons = geom.asMultiPolygon() # vettore di poligoni\n atSubGeom = 0\n for polygon in polygons:\n subGeom = QgsGeometry.fromPolygon(polygon)\n stretchedSubGeom = gripStretchQgsGeometry(subGeom, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n return None",
"def gripStretchCircle(circle, basePt, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n newCenter = QgsPoint(circle.center)\n newRadius = circle.radius\n \n for ptToStretch in ptListToStretch:\n if qad_utils.ptNear(ptToStretch, circle.center): # se i punti sono sufficientemente vicini\n newCenter.set(circle.center.x() + offSetX, circle.center.y() + offSetY)\n elif circle.isPtOnCircle(ptToStretch):\n newPt = QgsPoint(basePt.x() + offSetX, basePt.y() + offSetY)\n newRadius = qad_utils.getDistance(circle.center, newPt)\n\n newCircle = qad_circle.QadCircle()\n if newCircle.set(newCenter, newRadius) == False:\n return None\n \n return newCircle",
"def generate_constraints_between_chains(self):\n node_to_chain_mapping = defaultdict(set)\n # collect all places where each node is used and at what subchain index\n for chain_idx in range(len(self.chains)):\n chain = self.chains[chain_idx]\n for subchain_idx in range(len(chain)):\n parent, child = chain[subchain_idx]\n node_to_chain_mapping[parent].add(\n AbstractConstraint(chain_idx, subchain_idx)\n )\n # don't forget about the final child in the chain (parents are already accounted for)\n final_parent, final_child = chain[-1]\n node_to_chain_mapping[final_child].add(\n AbstractConstraint(chain_idx, len(chain))\n )\n # our final mapping correlates constraints on a per-chain basis\n # e.g. for chain index 0 at subchain index 1, we have a constraint (shared node) in chain 2\n chain_constraints = list()\n for chain_idx in range(len(self.chains)):\n chain = self.chains[chain_idx]\n chain_constraint = [set() for i in range(len(chain) + 1)]\n for subchain_idx in range(len(chain)):\n parent, child = chain[subchain_idx]\n node_constraints = node_to_chain_mapping[parent]\n for constraint in node_constraints:\n if constraint.chain_index != chain_idx:\n chain_constraint[subchain_idx].add(constraint)\n # don't forget about the final child in the chain (parents are already accounted for)\n final_parent, final_child = chain[-1]\n node_constraints = node_to_chain_mapping[final_child]\n for constraint in node_constraints:\n if constraint.chain_index != chain_idx:\n chain_constraint[len(chain)].add(constraint)\n chain_constraints.append(chain_constraint)\n return chain_constraints",
"def prepare_constraints():\n # 1. Demand Constraints\n create_demand_constraints()\n\n # 2. Capacity Constraints (source node to transit node)\n create_capacity_constraints_st()\n\n # 3. Capacity Constraints (transit node to destination node)\n create_capacity_constraints_td()\n\n # 4. Split Paths Constraints\n create_split_paths_constraints()\n\n # 5. Equal Split Flow Constraints\n create_equal_split_flow_constraints()\n\n # 6. Transit node's balance load Constraints\n create_balance_load_constraints()\n\n # 7. Bounds\n create_bounds()\n\n # 8. Binary\n create_binary()",
"def stretchQgsLineStringGeometry(geom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve):\n obj = qad_utils.whatGeomIs(0, geom)\n if (type(obj) != list and type(obj) != tuple):\n objType = obj.whatIs()\n if objType == \"CIRCLE\": # se é cerchio\n if isPtContainedForStretch(obj.center, containerGeom): # se il punto è contenuto in containerGeom\n obj.center.setX(obj.center.x() + offSetX)\n obj.center.setY(obj.center.y() + offSetY)\n return QgsGeometry.fromPolyline(obj.asPolyline(tolerance2ApproxCurve)) \n\n stretchedGeom = QgsGeometry(geom)\n snapper = QadSnapper()\n points = snapper.getEndPoints(stretchedGeom)\n del snapper\n\n linearObjectListToStretch = qad_utils.QadLinearObjectList()\n linearObjectListToStretch.fromPolyline(geom.asPolyline())\n \n for point in points:\n if isPtContainedForStretch(point, containerGeom): # se il punto è contenuto in containerGeom\n atPart = linearObjectListToStretch.containsPt(point)\n while atPart >= 0:\n linearObject = linearObjectListToStretch.getLinearObjectAt(atPart)\n pt = linearObject.getStartPt() \n if qad_utils.ptNear(pt, point): # cambio punto iniziale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n if linearObject.isSegment():\n linearObject.setStartPt(pt)\n else:\n oldArc = linearObject.getArc()\n middlePt = oldArc.getMiddlePt()\n distFromMiddleChord = qad_utils.getDistance(middlePt, qad_utils.getPerpendicularPointOnInfinityLine(oldArc.getStartPt(), oldArc.getEndPt(), middlePt))\n \n newArc = QadArc()\n if linearObject.isInverseArc(): \n middlePt = qad_utils.getMiddlePoint(pt, oldArc.getStartPt())\n middlePt = qad_utils.getPolarPointByPtAngle(middlePt, \\\n qad_utils.getAngleBy2Pts(pt, oldArc.getStartPt()) + math.pi / 2, \\\n distFromMiddleChord) \n if newArc.fromStartSecondEndPts(oldArc.getStartPt(), middlePt, pt) == False:\n return None\n else:\n middlePt = qad_utils.getMiddlePoint(pt, oldArc.getEndPt())\n middlePt = qad_utils.getPolarPointByPtAngle(middlePt, \\\n qad_utils.getAngleBy2Pts(pt, oldArc.getEndPt()) - math.pi / 2, \\\n distFromMiddleChord) \n if newArc.fromStartSecondEndPts(pt, middlePt, oldArc.getEndPt()) == False:\n return None\n linearObject.setArc(newArc, linearObject.isInverseArc()) \n else:\n pt = linearObject.getEndPt()\n if qad_utils.ptNear(pt, point): # cambio punto finale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n if linearObject.isSegment():\n linearObject.setEndPt(pt)\n else:\n oldArc = linearObject.getArc()\n middlePt = oldArc.getMiddlePt()\n distFromMiddleChord = qad_utils.getDistance(middlePt, qad_utils.getPerpendicularPointOnInfinityLine(oldArc.getStartPt(), oldArc.getEndPt(), middlePt))\n \n newArc = QadArc()\n if linearObject.isInverseArc():\n middlePt = qad_utils.getMiddlePoint(pt, oldArc.getEndPt())\n middlePt = qad_utils.getPolarPointByPtAngle(middlePt, \\\n qad_utils.getAngleBy2Pts(pt, oldArc.getEndPt()) - math.pi / 2, \\\n distFromMiddleChord) \n if newArc.fromStartSecondEndPts(pt, middlePt, oldArc.getEndPt()) == False:\n return None\n else:\n middlePt = qad_utils.getMiddlePoint(pt, oldArc.getStartPt())\n middlePt = qad_utils.getPolarPointByPtAngle(middlePt, \\\n qad_utils.getAngleBy2Pts(pt, oldArc.getStartPt()) + math.pi / 2, \\\n distFromMiddleChord) \n if newArc.fromStartSecondEndPts(oldArc.getStartPt(), middlePt, pt) == False:\n return None\n linearObject.setArc(newArc, linearObject.isInverseArc()) \n \n atPart = linearObjectListToStretch.containsPt(point, atPart + 1)\n \n pts = linearObjectListToStretch.asPolyline(tolerance2ApproxCurve)\n stretchedGeom = QgsGeometry.fromPolyline(pts) \n \n return stretchedGeom",
"def bind(group_data, skeleton_data):\n # point constraint hip joint between left and right\n cmds.pointConstraint(\n [group_data[\"l_hip\"], group_data[\"r_hip\"]],\n group_data[\"hip\"],\n maintainOffset=False\n )\n\n # parent constraint spine joint between hip and neck\n constraint = cmds.parentConstraint(\n [group_data[\"hip\"], group_data[\"neck\"]],\n group_data[\"spine\"],\n maintainOffset=False,\n skipRotate=\"none\"\n )[0]\n\n # set aliases to position spine closer to the hip\n aliases = cmds.parentConstraint(\n constraint,\n query=True,\n weightAliasList=True\n )\n cmds.setAttr(\"{}.{}\".format(constraint, aliases[0]), 0.8)\n cmds.setAttr(\"{}.{}\".format(constraint, aliases[1]), 0.2)\n\n # create point constraints\n for key, group in group_data.iteritems():\n joint = skeleton_data.get(key)\n if not joint:\n continue\n\n cmds.pointConstraint(group, joint, maintainOffset=False)\n\n # aim constraints\n # body\n cmds.aimConstraint(\n group_data.get(\"neck\"),\n skeleton_data.get(\"hip\"),\n aimVector=[0, 1, 0],\n upVector=[1, 0, 0],\n worldUpVector=[0, 0, 1],\n worldUpType=\"object\",\n worldUpObject=group_data.get(\"l_hip\")\n )\n cmds.aimConstraint(\n group_data.get(\"head\"),\n skeleton_data.get(\"neck\"),\n aimVector=[0, 1, 0],\n upVector=[0, 0, 1],\n worldUpVector=[0, 0, -1],\n worldUpType=\"objectrotation\",\n worldUpObject=group_data.get(\"neck\")\n )\n\n # limbs\n for side, aimDirection in zip([\"l\", \"r\"], [1, -1]):\n # leg\n for leg1, leg2 in zip(LEG_HIERARCHY[:-1], LEG_HIERARCHY[1:]):\n joint = skeleton_data.get(\"{}_{}\".format(side, leg1))\n target = group_data.get(\"{}_{}\".format(side, leg2))\n\n up_name = LEG_HIERARCHY[:]\n up_name.remove(leg1)\n up_name.remove(leg2)\n up = group_data.get(\"{}_{}\".format(side, up_name[0]))\n\n cmds.aimConstraint(\n target,\n joint,\n aimVector=[0, -1, 0],\n upVector=[0, 0, -1],\n worldUpVector=[0, 0, 1],\n worldUpType=\"object\",\n worldUpObject=up\n )\n\n # arm\n for arm1, arm2 in zip(ARM_HIERARCHY[:-1], ARM_HIERARCHY[1:]):\n joint = skeleton_data.get(\"{}_{}\".format(side, arm1))\n target = group_data.get(\"{}_{}\".format(side, arm2))\n\n up_name = ARM_HIERARCHY[:]\n up_name.remove(arm1)\n up_name.remove(arm2)\n up = group_data.get(\"{}_{}\".format(side, up_name[0]))\n\n cmds.aimConstraint(\n target,\n joint,\n aimVector=[aimDirection, 0, 0],\n upVector=[0, 0, 1],\n worldUpVector=[0, 0, 1],\n worldUpType=\"object\",\n worldUpObject=up\n )\n\n # need to do more testing to confidently bind the wrist, ankles and head.\n # At the moment the results seem inaccurate.",
"def apply_constraints(pose):\r\n\t# Enzdes constraints\r\n\tcstm = AddOrRemoveMatchCsts()\r\n\tcstm.set_cst_action(ADD_NEW)\r\n\tcstm.apply(pose)\r\n\r\n\t# Determine peptide residues to constrain, preserving\r\n\t# H-bonding residues of original peptide, P3-P1\r\n\tfirst_pep_cst_res = pose.pdb_info().pdb2pose('B', 4)\r\n\tlast_pep_cst_res = pose.pdb_info().pdb2pose('B', 6)\r\n\tcst_range = '{}-{}'.format(first_pep_cst_res, last_pep_cst_res)\r\n\r\n\t# Coordinate constraints\r\n\tcg = CoordinateConstraintGenerator()\r\n\tors = OrResidueSelector()\r\n\tors.add_residue_selector(ChainSelector('A')) # Constrain main backbone\r\n\tors.add_residue_selector(ResidueIndexSelector(cst_range)) \r\n\tcg.set_residue_selector(ors)\r\n\r\n\tac = AddConstraints()\r\n\tac.add_generator(cg)\r\n\tac.apply(pose)\r\n\r\n\treturn",
"def makeSideStreets(self, curves):\n\n base.direct.grid.fXyzSnap = 0\n base.direct.grid.fHprSnap = 0\n self.panel.fPlaneSnap.set(0)\n bldgGroup = self.consolidateStreetBuildings()\n bldgs = bldgGroup.getChildren()\n\n # streetWidth puts buildings on the edge of the street, not the middle\n currPoint = Point3(0)\n bldgIndex = 0\n\n # Populate buildings on both sides of the street\n #sides = ['inner', 'outer','innersidest','outersidest']\n sides = ['innersidest', 'outersidest']\n maxGroupWidth = 50000\n for side in sides:\n print \"Building street for %s side\" % side\n # Subdivide the curve into different groups.\n bldgGroupIndex = 0\n curGroupWidth = 0\n\n\n for curve, curveType in curves[side]:\n print \"----------------- curve(%s, %s): %s --------------- \" % (side, curve.getName(), curve)\n #import pdb; pdb.set_trace()\n currT = 0\n endT = curve.getMaxT()\n\n #RAU side streets still too long, lets try arbitrarily dividing it in half\n #endT = endT / 2\n\n print (\"endT = %f\" % endT)\n #if (maxGroupWidth < endT):\n # self.notify.debug(\"changing endT from %f to %f\" % (endT, maxGroupWidth))\n # endT = maxGroupWidth\n\n\n currGroupWidth = 0\n self.makeNewBuildingGroup(bldgGroupIndex, side, curve.getName())\n\n while currT < endT:\n if curveType == 'urban':\n bldg, bldgIndex = self.getBldg(bldgIndex, bldgs, forceDuplicate = True)\n curve.getPoint(currT, currPoint)\n\n if side == \"inner\" or side == \"innersidest\":\n heading = 90\n else:\n heading = -90\n bldg.setPos(currPoint)\n bldgWidth = self.getBuildingWidth(bldg)\n\n curGroupWidth += bldgWidth\n # Adjust grid orientation based upon next point along curve\n currT, currPoint = self.findBldgEndPoint(bldgWidth, curve, currT, currPoint, rd = 0)\n bldg.lookAt(Point3(currPoint))\n bldg.setH(bldg, heading)\n\n # Shift building forward if it is on the out track, since we just turned it away from\n # the direction of the track\n if side == \"outer\" or side == \"outersidest\":\n bldg.setPos(currPoint)\n\n self.updateSelectedPose([bldg])\n self.adjustPropChildren(bldg)\n base.direct.reparent(bldg, fWrt = 1)\n print bldgIndex\n elif curveType == 'trees':\n curve.getPoint(currT, currPoint)\n # trees are spaced anywhere from 40-80 ft apart\n treeWidth = random.randint(40, 80)\n curGroupWidth += treeWidth\n # Adjust grid orientation based upon next point along curve\n currT, currPoint = self.findBldgEndPoint(treeWidth, curve, currT, currPoint, rd = 0)\n\n # Add some trees\n tree = random.choice([\"prop_tree_small_ul\",\n \"prop_tree_small_ur\",\n \"prop_tree_large_ur\",\n \"prop_tree_large_ul\"])\n\n #use snow tree if necessary\n if (useSnowTree):\n tree = random.choice([\"prop_snow_tree_small_ul\",\n \"prop_snow_tree_small_ur\",\n \"prop_snow_tree_large_ur\",\n \"prop_snow_tree_large_ul\"])\n\n\n self.addProp(tree)\n for selectedNode in base.direct.selected:\n # Move it\n selectedNode.setPos(currPoint)\n # Snap objects to grid and update DNA if necessary\n self.updateSelectedPose(base.direct.selected.getSelectedAsList())\n elif curveType == 'bridge':\n # Don't add any dna for the bridge sections, but add the length\n # of the bridge so we can increment our building groups correctly\n print \"adding bridge (%s), curT = %s\" % (side, currT)\n bridgeWidth = 1050\n curGroupWidth += bridgeWidth\n #currT, currPoint = self.findBldgEndPoint(bridgeWidth, curve, currT, currPoint, rd = 0)\n print \"currT after adding bridge = %s\" % currT\n # force move to next curve\n currT = endT + 1\n elif curveType == 'tunnel':\n # Don't add any dna for the tunnel sections, but add the length\n # of the bridge so we can increment our building groups correctly\n print \"adding tunnel (%s), curT = %s\" % (side, currT)\n tunnelWidth = 775\n curGroupWidth += tunnelWidth\n #currT, currPoint = self.findBldgEndPoint(tunnelWidth, curve, currT, currPoint, rd = 0)\n print \"currT after adding tunnel = %s\" % currT\n # force move to next curve\n currT = endT + 1\n elif curveType == 'barricade':\n print \"adding barricade (%s) %s, curT = %d\" % (side, curve.getName(), currT)\n barricadeWidth = curve.calcLength()\n print \"barricade width = %f\" % barricadeWidth\n\n simple =1\n if (simple):\n curGroupWidth += barricadeWidth\n # force move to next curve\n currT = endT + 1\n else:\n #add a prop_tree to force it to be shown\n curve.getPoint(currT, currPoint)\n #trees are spaced anywhere from 40-80 ft apart\n #treeWidth = random.randint(40, 80)\n treeWidth = barricadeWidth\n curGroupWidth += treeWidth\n # Adjust grid orientation based upon next point along curve\n currT, currPoint = self.findBldgEndPoint(treeWidth, curve, currT, currPoint, rd = 0)\n\n # Add some trees\n tree = random.choice([\"prop_snow_tree_small_ul\",\n \"prop_snow_tree_small_ur\",\n \"prop_snow_tree_large_ur\",\n \"prop_snow_tree_large_ul\"])\n self.addProp(tree)\n for selectedNode in base.direct.selected:\n # Move it\n selectedNode.setPos(currPoint)\n # Snap objects to grid and update DNA if necessary\n self.updateSelectedPose(base.direct.selected.getSelectedAsList())\n\n #done with for loop, increment bldgGroupIndex\n bldgGroupIndex += 1",
"def stretch(points, stretches=[1, 1]):\n x = stretches[0] * points[0]\n y = stretches[1] * points[1]\n return [x, y]",
"def vizualize_stretch_and_squash(self, chain, where=None):\n\n # if where is not None:\n # parent = where\n # else:\n # parent = chain[0].parent()\n #\n # viz_subnet = parent.createNode(\"subnet\")\n # viz_subnet.setName(\"subnet_viz_chain\")\n # viz_subnet.parm(\"tdisplay\").set(1)\n # viz_subnet.parm(\"display\").set(0)\n # vizer_list = []\n # for i in chain:\n # fetcher = viz_subnet.createNode(\"fetch\", \"fetch_\"+i.name())\n # fetcher.setColor(hou.Color((1.0, 0.4, 0.0)))\n # fetcher.parm(\"useinputoffetched\").set(1)\n # fetcher.parm(\"fetchsubnet\").set(1)\n # fetcher.parm(\"fetchobjpath\").set(fetcher.relativePathTo(i))\n # fetcher.setDisplayFlag(0)\n #\n # vizer = viz_subnet.createNode(\"geo\", \"visualize_\"+i.name())\n # vizer.node(\"file1\").destroy()\n # vizer.createNode(\"sphere\")\n #\n # vizer.parm(\"scale\").set(i.parm(\"crscalex\") )\n # vizer.parm(\"sx\").set(0.025)\n # vizer.parm(\"sy\").set(0.025)\n # vizer.parm(\"sz\").set(0.025)\n #\n # vizer.setInput(0, fetcher)\n # vizer_list.append(vizer)\n # viz_subnet.layoutChildren()\n #\n # return vizer_list",
"def standard_splitting(collision):\n constraints = []\n constraint1 = {\n 'agent': collision['a1'],\n 'loc': collision['loc'],\n 'timestep': collision['timestep'],\n }\n constraint2 = {\n 'agent': collision['a2'],\n 'loc': collision['loc'],\n 'timestep': collision['timestep'],\n }\n if len(collision['loc']) == 2: # Edge collision\n constraint2['loc'] = [collision['loc'][1], collision['loc'][0]]\n constraints.append(constraint1)\n constraints.append(constraint2)\n return constraints",
"def gripStretchQgsLinearObjectList(linearObjectList, ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve):\n linearObjectListToStretch = qad_utils.QadLinearObjectList(linearObjectList)\n \n atPart = 0\n while atPart < linearObjectListToStretch.qty():\n linearObject = linearObjectListToStretch.getLinearObjectAt(atPart) \n if linearObject.isSegment():\n pt = linearObject.getStartPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto iniziale \n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setStartPt(pt)\n \n pt = linearObject.getEndPt()\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n # cambio punto finale\n pt.setX(pt.x() + offSetX)\n pt.setY(pt.y() + offSetY)\n linearObject.setEndPt(pt)\n else: # se è arco\n newArc, newInverseFlag = gripStretchArc(linearObject.getArc(), ptListToStretch, offSetX, offSetY, tolerance2ApproxCurve, linearObject.isInverseArc())\n if newArc is None:\n return None\n linearObject.setArc(newArc, newInverseFlag)\n\n atPart = atPart + 1\n \n pt = linearObjectListToStretch.getCentroid(tolerance2ApproxCurve) # verifico se polilinea ha un centroide\n if pt is not None:\n if isPtContainedForStretch(pt, ptListToStretch): # se il punto è contenuto in ptListToStretch\n linearObjectListToStretch.move(offSetX, offSetY)\n\n return linearObjectListToStretch",
"def constraints(self):",
"def constraints(self, point, norm=False):\n g_inv = self.diff(point, inverse=True, diff=0)\n curv = self.get_curv() # pylint: disable=assignment-from-none\n K = curv(point)\n R = self.ricci_scalar(point)\n K_up = g_inv.dot(g_inv.dot(K).T).T\n KK = K_up.dot(K.T).trace()\n trK = g_inv.dot(K).trace()\n scal_constr = KK - trK**2 - R\n dg = self.diff(point, diff=1)\n dK = np.asarray(curv(point, diff=1))\n G = self.christoffel(point)\n vec_constr = (\n np.einsum('ac,cab', g_inv, dK)\n - np.einsum('ac,ica,ib', g_inv, G, K)\n - np.einsum('ac,icb,ai', g_inv, G, K)\n - np.einsum('ac,bac', g_inv, dK)\n + np.einsum('bac,ac', dg, K_up)\n )\n if norm:\n vec_constr = np.sqrt(g_inv.dot(vec_constr).dot(vec_constr))\n return scal_constr, vec_constr",
"def make_shaped_repertoire(RNs):\n # get objective distribution\n bin_edges, obj_dist, volume = objective_distribution()\n # get an antigenic epitope sequence, and in case of nkey=1,2 check whether\n # it can populate all required bins, thus avoiding infinite loop below\n AgEpitope = get_AgEpitope(RNs)\n if cf.nkey == 1 or cf.nkey == 2:\n while 1:\n # get list of all possible binding partners and their energies\n all_partners = get_all_partners()\n all_energies = [E_best(partner, AgEpitope)\n for partner in all_partners]\n # check whether all bins are occupiable with these energies,\n # if not, get new epitope sequence\n indices = np.digitize(all_energies, bin_edges, right=True)\n ind_set = set(indices)\n ind_set.discard(0)\n # if all bins can be occupied, move on\n if ind_set == set(range(1, len(bin_edges))):\n break\n # else get a new epitope and check its validity\n else:\n AgEpitope = get_AgEpitope(RNs)\n # initialise empty list for counting how many seqs have been found per bin\n ist_dist = np.zeros(len(obj_dist))\n # seq_list for collecting identified sequences\n seq_list = []\n E_list = []\n # while ist_dist and obj_dist are not equal, get new sequences and position\n # them if they are useful\n # introduce a tolerance of how far bins are allowed to deviate from the\n # goal, as otherwise runtime explodes due to very long waiting times for\n # high binding energy codes in large nkey cases - allow an absolute\n # deviation of volume*tolerance % for each bin.\n abs_tol = volume * 0.005\n while np.sum(np.abs((ist_dist-obj_dist)) > abs_tol) > 0:\n ab = Ab_seq(RNs)\n Emax = E_best(ab, AgEpitope)\n # find index bin of this energy\n indx = np.digitize(Emax, bin_edges, right=True)\n # if the index is in the useful range and the bin is not yet full,\n # count the sequence and store it\n if indx in range(1, len(bin_edges)):\n if obj_dist[indx-1] - ist_dist[indx-1] > 0:\n ist_dist[indx-1] += 1\n seq_list.append(ab)\n E_list.append(Emax)\n\n return seq_list, E_list, AgEpitope",
"def stretchQgsGeometry(geom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve):\n wkbType = geom.wkbType()\n if wkbType == QGis.WKBPoint or wkbType == QGis.WKBPoint25D:\n pt = stretchPoint(geom.asPoint(), containerGeom, offSetX, offSetY)\n if pt is not None:\n return QgsGeometry.fromPoint(pt)\n \n if wkbType == QGis.WKBMultiPoint:\n stretchedGeom = QgsGeometry(geom)\n points = stretchedGeom.asMultiPoint() # vettore di punti\n atSubGeom = 0\n for pt in points:\n subGeom = QgsGeometry.fromPoint(pt)\n stretchedSubGeom = stretchQgsGeometry(subGeom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom])\n atSubGeom = atSubGeom + 1\n return stretchedGeom\n\n if wkbType == QGis.WKBLineString:\n return stretchQgsLineStringGeometry(geom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve)\n \n if wkbType == QGis.WKBMultiLineString:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asMultiPolyline() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = stretchQgsGeometry(subGeom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBPolygon:\n stretchedGeom = QgsGeometry(geom)\n lines = stretchedGeom.asPolygon() # lista di linee\n atSubGeom = 0\n for line in lines: \n subGeom = QgsGeometry.fromPolyline(line)\n stretchedSubGeom = stretchQgsGeometry(subGeom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n if wkbType == QGis.WKBMultiPolygon:\n stretchedGeom = QgsGeometry(geom)\n polygons = geom.asMultiPolygon() # vettore di poligoni\n atSubGeom = 0\n for polygon in polygons:\n subGeom = QgsGeometry.fromPolygon(polygon)\n stretchedSubGeom = stretchQgsGeometry(subGeom, containerGeom, offSetX, offSetY, tolerance2ApproxCurve)\n stretchedGeom = qad_utils.setSubGeom(stretchedGeom, stretchedSubGeom, [atSubGeom]) \n atSubGeom = atSubGeom + 1\n return stretchedGeom\n \n return None"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Duplicate a joint chain. | def duplicate_chain(chain, search='', replace='', suffix=''):
if suffix:
suffix = '_'+suffix
new_jnts = []
for joint in chain:
new_name = joint.replace(search, replace, 1)+suffix
new_jnt = mc.duplicate(joint, po=1, n=new_name)[0]
if new_jnts:
mc.parent(new_jnt, new_jnts[-1])
new_jnts.append(new_jnt)
return new_jnts | [
"def _duplicate_joints(self):\n\n # Create new joints\n self.fk_joints = joint.duplicate_joints(self.joints, \"fk\")\n\n # Add to setups\n self.setups.extend(self.fk_joints)",
"def _duplicate_joints(self):\n\n # Create new joints\n self.ik_joints = joint.duplicate_joints(self.joints, \"ik\")\n\n # Add to setups\n self.setups.extend(self.ik_joints)\n\n # Connect fk jnts to source joints\n for ik_jnt, src_jnt in zip(self.ik_joints, self.joints):\n cmds.parentConstraint(ik_jnt, src_jnt, mo=True)\n\n # Add to setups\n self.setups.extend(self.ik_joints)",
"def create_chain(self):\n\n\t\tfor link in range(self.links):\n\t\t\tself.create_link()\n\n\t\tfor link in range(1,len(self.linkObjs)):\n\t\t\tprint (self.linkObjs[link]).get_name()\n\t\t\tcmds.parent((self.linkObjs[link]).get_name(),self.linkObjs[0].get_name())\n\t\t\tself.linkObjs[link].add_parent(self.linkObjs[0].get_transform())",
"def addChain(self, chain):\n\n\t\tself.chain.append(chain)\n\t\tchain.parentMolecule = self",
"def clone(self):\n screen = self.screen\n self._newLine(self._drawing)\n\n Myturtle = self.Myturtle\n self.screen = None\n self.Myturtle = None # too make self deepcopy-able\n\n q = deepcopy(self)\n\n self.screen = screen\n self.Myturtle = Myturtle\n\n q.screen = screen\n q.Myturtle = _TurtleImage(screen, self.Myturtle.shapeIndex)\n\n screen._turtles.append(q)\n ttype = screen._shapes[self.Myturtle.shapeIndex]._type\n if ttype == \"polygon\":\n q.Myturtle._item = screen._createpoly()\n elif ttype == \"image\":\n q.Myturtle._item = screen._createimage(screen._shapes[\"blank\"]._data)\n elif ttype == \"compound\":\n q.Myturtle._item = [screen._createpoly() for item in\n screen._shapes[self.Myturtle.shapeIndex]._data]\n q.currentLineItem = screen._createline()\n q._update()\n return q",
"def dup_slide(self,prs,slide):\n blank_slide_layout = self._get_blank_slide_layout(prs)\n clone = prs.slides.add_slide(blank_slide_layout)\n for shp in slide.shapes:\n el = shp.element\n newel = copy.deepcopy(el)\n clone.shapes._spTree.insert_element_before(newel, 'p:extLst')\n\n for key, value in slide.rels.iteritems():\n # Make sure we don't copy a notesSlide relation as that won't exist\n if not \"notesSlide\" in value.reltype:\n clone.rels.add_relationship( value.reltype, value._target, value.rId)\n\n return clone",
"def clone(self, *args, **kwargs):\r\n kwargs.setdefault(\"cascade\", [\"elements\"])\r\n return super(Environment, self).clone(*args, **kwargs)",
"def objectDuplicate(ob, context, linked = False):\n\tif linked:\n\t\tnew_ob = bpy.data.objects.new(ob.name, ob.data) # this will create a linked copy of ob.data\n\t\tcontext.scene.objects.link(new_ob) # adds the object to the active scene\n\t\treturn new_ob\n\telse:\n\t\tnew_ob = bpy.data.objects.new(ob.name, ob.data.copy()) # this will create a regular copy of ob.data\n\t\tcontext.scene.objects.link(new_ob)\n\t\treturn new_ob",
"def T_joint_chain(self, joint_name):\n if self.joint_syms[joint_name].get(\"T_joint\") is None:\n # go up the parent chain of transformations\n parent_joint_name = self.global_syms[\"Jname2parentJname\"].get(\n joint_name)\n if parent_joint_name is None:\n self.joint_syms[joint_name][\"T_joint\"] = \\\n self.joint_syms[joint_name][\"Tlocal_joint\"]\n else:\n self.joint_syms[joint_name][\"T_joint\"] = (\n self.T_joint_chain(parent_joint_name)\n * self.joint_syms[joint_name][\"Tlocal_joint\"]\n )\n return self.joint_syms[joint_name][\"T_joint\"]",
"def clone(self):\n return _libsbml.Association_clone(self)",
"def duplicate(*args, **kwargs):\n\n pass",
"def __deepcopy__(self, memo) -> \"IC_Chain\":\n existing = memo.get(id(self), False)\n if existing:\n return existing\n dup = type(self).__new__(self.__class__)\n memo[id(self)] = dup\n dup.chain = memo[id(self.chain)]\n dup.chain.child_dict = copy.deepcopy(self.chain.child_dict, memo)\n # now have all res and ic_res but ic_res not complete\n dup.chain.child_list = copy.deepcopy(self.chain.child_list, memo)\n dup.akset = copy.deepcopy(self.akset, memo)\n dup.aktuple = copy.deepcopy(self.aktuple, memo)\n # now have all ak w/.ric\n dup.ordered_aa_ic_list = copy.deepcopy(self.ordered_aa_ic_list, memo)\n\n dup.atomArrayIndex = self.atomArrayIndex.copy()\n dup.atomArrayValid = self.atomArrayValid.copy()\n dup.atomArray = self.atomArray.copy()\n\n dup.hedra = copy.deepcopy(self.hedra, memo)\n dup.dihedra = copy.deepcopy(self.dihedra, memo)\n\n dup.id3_dh_index = copy.deepcopy(self.id3_dh_index, memo)\n dup.id32_dh_index = copy.deepcopy(self.id32_dh_index, memo)\n\n # update missing items in ic_residues and\n # set all bp residue atom coords to be views on dup.atomArray\n # [similar in build_AtomArray() but does not copy from bpAtoms\n # or modify atomArrayValid, and accesses dup]\n dup.AAsiz = self.AAsiz\n\n dup.bpAtomArray = [None] * dup.AAsiz # rtm\n\n def setAtomVw(res, atm):\n ak = AtomKey(res.internal_coord, atm)\n ndx = dup.atomArrayIndex[ak]\n atm.coord = dup.atomArray[ndx, 0:3] # make view on atomArray\n\n dup.bpAtomArray[ndx] = atm # rtm\n\n def setResAtmVws(res):\n for atm in res.get_atoms():\n # copy not filter so ignore no_altloc\n if atm.is_disordered():\n for altAtom in atm.child_dict.values():\n setAtomVw(res, altAtom)\n else:\n setAtomVw(res, atm)\n\n for ric in dup.ordered_aa_ic_list:\n setResAtmVws(ric.residue)\n ric.rprev = copy.deepcopy(ric.rprev, memo)\n ric.rnext = copy.deepcopy(ric.rnext, memo)\n ric.ak_set = copy.deepcopy(ric.ak_set, memo)\n ric.akc = copy.deepcopy(ric.akc, memo)\n ric.dihedra = copy.deepcopy(ric.dihedra, memo)\n ric.hedra = copy.deepcopy(ric.hedra, memo)\n\n dup.sqMaxPeptideBond = self.sqMaxPeptideBond\n dup.initNCaCs = copy.deepcopy(self.initNCaCs, memo)\n\n dup.hedraLen = self.hedraLen\n dup.hedraL12 = self.hedraL12.copy()\n dup.hedraAngle = self.hedraAngle.copy()\n dup.hedraL23 = self.hedraL23.copy()\n dup.hedraNdx = copy.deepcopy(self.hedraNdx, memo)\n\n dup.dihedraLen = self.dihedraLen\n dup.dihedraAngle = self.dihedraAngle.copy()\n dup.dihedraAngleRads = self.dihedraAngleRads.copy()\n dup.dihedraNdx = copy.deepcopy(self.dihedraNdx, memo)\n\n dup.a2da_map = self.a2da_map.copy()\n dup.a2d_map = self.a2d_map.copy()\n dup.d2a_map = self.d2a_map.copy()\n\n dup.dH1ndx = self.dH1ndx.copy()\n dup.dH2ndx = self.dH2ndx.copy()\n\n dup.hAtoms = self.hAtoms.copy()\n dup.hAtomsR = self.hAtomsR.copy()\n dup.hAtoms_needs_update = self.hAtoms_needs_update.copy()\n\n dup.dRev = self.dRev.copy()\n dup.dFwd = self.dFwd.copy()\n dup.dAtoms_needs_update = self.dAtoms_needs_update.copy()\n\n dup.dAtoms = self.dAtoms.copy()\n dup.a4_pre_rotation = self.a4_pre_rotation.copy()\n\n dup.dCoordSpace = self.dCoordSpace.copy()\n dup.dcsValid = self.dcsValid.copy()\n\n for d in dup.dihedra.values():\n d.cst = dup.dCoordSpace[0][d.ndx]\n d.rcst = dup.dCoordSpace[1][d.ndx]\n\n return dup",
"def test_deepcopy_2(self):\n p = Phrase(head=self.phrase, complements=['that', 'stands', 'tall'])\n p2 = deepcopy(p)\n self.assertEqual(self.phrase, p.head)\n self.assertEqual(self.phrase, p2.head)\n self.assertNotEqual(id(self.phrase), id(p2.head))\n self.assertEqual(p2.head.parent, p2)\n self.assertEqual(p2.head.head.parent, p2.head)",
"def copy(self, newname=None):\n\n if not newname: newname = self.name + \"_copy\"\n newmol=Protein(name=newname, parent=self.parent,\n elementType=self.elementType, childrenName=self.childrenName,\n setClass=self.setClass, childrenSetClass=self.childrenSetClass,\n top=self.top)\n newmol.curChain=Chain()\n newmol.curRes=Residue()\n newmol.allAtoms= AtomSet()\n newmol.parser = self.parser\n for at in self.allAtoms:\n self._fit_atom_into_tree(newmol, at)\n newmol.buildBondsByDistance()\n return newmol",
"def crossover(self, dad, mom):\n pass",
"def dup(stack):\n stack += [ stack[-1].copy() ]",
"def op_dup(self, args):\n self.require_stack(1)\n self.stack.append(self.stack[-1])",
"def duplicate(self):\n\t\treturn Graph(self.vertices[:], self.edges[:])",
"def clone(self):\n return _libsbml.XMLTriple_clone(self)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This function loops through directory and updates dates of files in said directory. | def update_date(dest=dest):
for root, _, files in os.walk(dest):
ignore = ["README.md","SUMMARY.md"]
_ = [edit_files(root + "/" + file) for file in files if (file not in ignore and file.endswith(".md"))] | [
"def _update_files():\n configuration_settings = get_configuration()\n\n # Need to find all of the files that are stored in the input_files directories in order to start building the\n # reports that will be used to generate the static log files.\n for input_path in configuration_settings.processing.inputs:\n search_path = pathlib.Path(input_path)\n\n # Currently going to make the assumption that everyone is using the path naming convention that I'm dictating\n # which is YYYY/MM/DD/file.ext\n for file_component in search_path.glob('*/*/*/*'):\n # Store all of the files into a dictionary containing the keys and a list of the files that are associated\n # with that day\n updaters.update_files(search_path, file_component)",
"def update_from_dir(self, dirname):\r\n import os\r\n filenames = os.listdir(dirname)\r\n filenames.sort()\r\n for filename in filenames:\r\n full_name = os.path.join(dirname, filename)\r\n self.logger.info(\"Reading from file %s\" % full_name)\r\n page = open(full_name).read()\r\n self.update_from_string(page)",
"def upload_all_workout_from_directory(directory_path):\n day = datetime.date.today() - datetime.timedelta(days=datetime.date.today().weekday(), weeks=1)\n for root, dirs, files in os.walk(directory_path):\n for f in files:\n print(f)\n upload_workout_from_directory(os.path.relpath(os.path.join(root, f), \".\"), get_next_monday(day))\n day = get_next_monday(day)",
"def run(self):\n super().run()\n date_subdirs = sorted(self.list_directory(self.input_location,\n self.input_location_type))\n for date_subdir in date_subdirs:\n if not re.search(\"^([\\d]{4}-[\\d]{2}-[\\d]{2})\", date_subdir):\n print(\"{}: Directory name {} not in YYYY-MM-DD format\"\\\n .format(self.name, date_subdir))\n continue\n date_path = os.path.join(self.input_location, date_subdir, \"RAW\")\n if len(self.list_directory(date_path, self.input_location_type)) == 0:\n continue\n processed_ok = self.process_single_date(date_path)\n if not processed_ok:\n continue",
"def getFilesToProcess(directoryPath, nextDateToProcess) :\n fileNames = []\n for (dirpath, dirnames, filenames) in os.walk(directoryPath) :\n fileNames.extend(filenames)\n break\n\n nextDate = datetime.datetime.strptime(nextDateToProcess, \"%Y-%m-%d\")\n filesToProcess = []\n for fileName in fileNames :\n tokens = fileName.split('_')\n lastToken = tokens[len(tokens) - 1]\n tokens = lastToken.split('.')\n dateTimeString = tokens[0]\n dateTimeObj = datetime.datetime.strptime(dateTimeString, \"%Y-%m-%d-%H-%M\")\n if dateTimeObj.date() == nextDate.date() :\n filesToProcess.append(fileName)\n\n return filesToProcess",
"def __update_files_and_directories_in_cwd(self):\n file_infos = self.__get_file_infos('')\n files, directories = self.__split_files_and_directories(file_infos)\n\n self.__files = files\n self.__directories = directories",
"def update_list(self, directory):\n self.logger.debug(\"Updating file list...\")\n files = [\n path.basename(f)\n for f in glob.glob(path.join(directory, f\"*.{self.extension}\"))\n ]\n self.file_list.set(sorted(files))",
"def updates(cls, directory):\n\n contents = [i for i in os.listdir(directory)\n if i.startswith(cls.prefix) and i.endswith(cls.suffix)]\n contents = set(contents)\n return contents",
"def update(self, forceRecalc=False):\n # Traverse actual files\n self.logger.debug(\"Traversing the filesystem.\")\n currentFiles = []\n for sourcePath in self.fsPathsComplete:\n for root, _, fns in os.walk(sourcePath):\n for fn in fns:\n fnComp = os.path.join(root, fn)\n ## currentFiles.append(os.path.relpath(fnComp.self.mountPoint)) # Relative paths do not work as expected for start like '\\\\ZEYCUS'.\n # See https://stackoverflow.com/questions/47364579/unexpected-behaviour-of-pythons-os-path-relpath/47364931#47364931\n # Instead, I do it manually:\n fnAux = fnComp[len(self.mountPoint):]\n while fnAux[0] == '\\\\':\n fnAux = fnAux[1:]\n currentFiles.append(fnAux)\n currentFiles = set(currentFiles)\n\n # Obtain files stored in the DDBB\n storedFiles = dict(self) # We need to access so many times that it is convenient to build a dict with all the info\n storedFilesSet = set(self.container)\n\n # Delete DDBB entries for files that longer exist.\n self.logger.debug(\"Removing outdated entries.\")\n for fn in sorted(storedFilesSet - currentFiles):\n self.removeEntry(fn)\n\n # Update information for files with a newer timestamp, or a modified size.\n if forceRecalc:\n self.logger.debug(\"Updating entries with --force. All %s of them will be recalculated.\" % len(storedFilesSet & currentFiles))\n else:\n self.logger.debug(\"Updating entries. Files to check: %s.\" % len(storedFilesSet & currentFiles))\n for fn in sorted(storedFilesSet & currentFiles):\n fnStat = os.stat(self.compFn(fn))\n timestamp = fnStat.st_mtime\n size = fnStat.st_size\n if forceRecalc or (timestamp > storedFiles[fn]['timestamp']) or (size != storedFiles[fn]['size']):\n self.logger.debug('Modifying %s' % fn)\n self.container[fn] = dict(\n timestamp=timestamp,\n size=size,\n hash=sha256(self.compFn(fn)),\n )\n\n # Include information for new files\n self.logger.debug(\"Inserting new entries.\")\n for fn in sorted(currentFiles - storedFilesSet):\n self.logger.debug('Adding %s' % fn)\n fnStat = os.stat(self.compFn(fn))\n self.container[fn] = dict(\n timestamp=fnStat.st_mtime,\n size=fnStat.st_size,\n hash=sha256(self.compFn(fn)),\n )",
"def _date_filter_dir(self, feed):\n for cur_dir, subdirs, files in os.walk(os.path.join(self.raw_data_dir, feed)):\n if GTFS_ZIPFILE_BASENAME in files:\n filepath = os.path.join(cur_dir, GTFS_ZIPFILE_BASENAME)\n sub_feed = os.path.split(os.path.normpath(cur_dir))[-1]\n download_date = os.path.basename(os.path.split(os.path.normpath(cur_dir))[-2])\n yield download_date, sub_feed, filepath",
"def add_dates(data_dir):\n # Get all the GeoTiff files in data dir\n fnames = os.path.join(data_dir, '*.tif')\n fnames = glob(fnames)\n fnames.sort()\n\n for fname in fnames:\n # In this particular case, date is YYYYDDD\n str_date = os.path.basename(fname)\n str_date = str_date.split('.')[0]\n str_date = str_date.split('_')[1]\n\n # Convert YYYYDDD to YYYY-MM-DD\n fmt = '%Y%j'\n _date = datetime.strptime(str_date, fmt)\n str_date = _date.strftime('%Y-%m-%d')\n\n add_date_to_metadata(fname, str_date)\n LOG.info(f\"Metadata added for {fname}\")",
"def cvs_update(filenames=wingapi.kArgFilename):\r\n\r\n for filename in filenames:\r\n\r\n if os.path.isdir(filename):\r\n dirname = filename\r\n cmd = 'cvs update'\r\n else:\r\n dirname = os.path.dirname(filename)\r\n cmd = 'cvs update %s' % os.path.basename(filename)\r\n \r\n #result = __run_in_dir(cmd, dirname)\r\n print cmd",
"def get_dates(folder=os.getcwd()):\n \n \n res = []\n files = os.listdir(folder)\n for i in files:\n i = re.sub(\".xlsx\", \"\", i)\n i = datetime.strptime(i, \"%y%m%d\")\n res.append(i)\n return res",
"def update_source_files(source_directory_list, source_extension_list):\n # get source files in the directory list\n source_total = 0\n for unused, source_directory in enumerate(source_directory_list):\n source_files_list = []\n get_requested_files(source_directory, source_extension_list, source_files_list)\n # update the files with shared object references\n for unused, source_file in enumerate(source_files_list):\n updated_file = []\n file_changed = modify_input_file(source_file, updated_file)\n if file_changed:\n filepath = get_printble_filepath(source_file)\n print(filepath)\n source_total += 1\n if __file_update:\n write_output_file(updated_file, source_file)\n print(\"Total Files\", source_total)\n print()",
"def add_timestamps(dir_video):\n print(\"Adding creation dates to file names\")\n os.chdir(dir_video)\n # get only top level dir info\n dir_data_video_files = next(os.walk(dir_video))\n list_video_files = dir_data_video_files[2] # get file list\n for f_name in list_video_files:\n if GOPRO_PATTERN.search(f_name):\n f_time = time.strftime(r\"%Y-%m-%d_%H-%M\", time.localtime(os.path.getctime(f_name)))\n os.rename(f_name, f\"{f_time}_{f_name}\")",
"def scan_files(self):\n for filename in list_files(self.path):\n a = Audio(filename)\n self.files.append(a)\n\n self.cache_data[filename] = int(os.path.getmtime(\n os.path.join(self.path, filename)))",
"def find_files(base_dir, consumer_q):\n for dirpath, dirs, files in os.walk(base_dir, topdown=False):\n for f in files:\n fname = os.path.join(dirpath, f)\n stat = os.stat(fname).st_mtime\n consumer_q.put(fname, stat)",
"def updateIndex(self):\n for root, dirs, files in os.walk(self.serverdir):\n for d in dirs:\n if not d.startswith('.'):\n relpath = os.path.relpath(os.path.join(root, d), self.serverdir)\n self.serverindex[relpath] = (self.getNametype(os.path.join(root,d)), os.path.getmtime(os.path.join(root, d)))\n for f in files:\n if not f.startswith('.'):\n relpath = os.path.relpath(os.path.join(root, f), self.serverdir)\n self.serverindex[relpath] = (self.getNametype(os.path.join(root,f)), os.path.getmtime(os.path.join(root, f)))",
"def change_alldir(dir_list):\n for dir_name in dir_list: # For each class dir\n change_match_dir(dir_name)"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test combining each center's file errors | def test__combine_center_file_errors(syn):
expected_error = (
f"\t{ENT1.name} ({ENT1.id}):\n\nmy errors\nn\n\n"
f"\t{ENT1.name} ({ENT1.id}):\n\nerrors here\nf\n\n"
)
calls = [
mock.call("syn1234", downloadFile=False),
mock.call("syn2345", downloadFile=False),
]
with patch.object(syn, "get", return_value=ENT1) as patch_synget:
center_errors = write_invalid_reasons._combine_center_file_errors(
syn, CENTER_ERRORSDF
)
assert center_errors == expected_error
patch_synget.assert_has_calls(calls) | [
"def test_single_error_merge(self):\n test_folder = base_path +'/test_data/merging_tests/error_test/'\n output_file = os.path.join(test_folder, \"output1.jpg\")\n\n self.assertRaises(mi.ImageError, lambda: mi.add_background(test_folder+\"dummy.txt\", test_folder+\"background.jpg\", output_file))\n self.assertRaises(mi.ImageError, lambda: mi.add_background(test_folder+\"render_small.png\", test_folder+\"background.jpg\", output_file))\n self.assertRaises(mi.ImageError, lambda: mi.add_background(test_folder+\"render1.png\", test_folder+\"dummy.txt\", output_file))\n self.assertRaises(mi.ImageError, lambda: mi.add_background(test_folder+\"render1.png\", test_folder+\"background_small.jpg\", output_file))\n self.assertRaises(mi.ImageError, lambda: mi.add_background(test_folder+\"render1.png\", test_folder+\"background_large.jpg\", output_file))",
"def test_file_specific_errors(self):\n rc = ResultsCollator(factory.file_specific_error())\n\n self.assertEqual(\n Error(None, type='s', body={'a.txt': factory.anon_obj}),\n rc.errors[0])\n\n self.assertEqual(\n Error(None, type='s', body={'a.txt': [factory.anon_obj]}),\n rc.errors[1])\n\n self.assertEqual(\n Error(None, type='s', body={'a.txt': [1, None]}),\n rc.errors[2])\n\n self.assertEqual(\n Error(None, type='s', body={'a.txt': [[1, 2, 3, 4, 5]]}),\n rc.errors[3])",
"def test_get_center_invalid_errors(syn):\n with patch.object(\n syn, \"tableQuery\", return_value=QueryResponse\n ) as patch_query, patch.object(\n write_invalid_reasons, \"_combine_center_file_errors\", return_value=\"errors\"\n ) as patch_combine:\n center_invalid = write_invalid_reasons.get_center_invalid_errors(syn, \"syn3333\")\n assert center_invalid == {\"SAGE\": \"errors\", \"TEST\": \"errors\"}\n patch_query.assert_called_once_with(\"SELECT * FROM syn3333\")\n assert patch_combine.call_count == 2",
"def _check_output_consistency(self): \n\t\tfor job in range(len(self.output_files)):\n\t\t\tassert \"_1.fastq.gz\" in self.output_files[job][0], \"Output missing first strand\"\n\t\t\tassert \"_2.fastq.gz\" in self.output_files[job][1], \"Output missing second strand\"",
"def generate_second_list_corrupted_files(directory):\n \n paths = [\"test\", \"dev\", \"train\"]\n corrupted_files = []\n\n for path in paths:\n files = [\n f\n for f in listdir(join(directory, path))\n if isfile(join(directory, path, f))\n ]\n\n total_files=len(files)\n processed_files = 0\n \n for file in files:\n processed_files+=1\n if \".wav\" in file: \n print(\"Checking files from \" + path + \" set \" + str(processed_files) + \"/\" + str(total_files), end=\"\\r\")\n if os.path.getsize(join(directory, path, file)) <= 0:\n corrupted_files.append(file)\n continue\n data, _ = soundfile.read(join(directory, path, file))\n if len(data) <= 0:\n corrupted_files.append(file)\n\n print()\n print(\"Done checking \" + path + \" set\")\n print(\"=====================\")\n\n with open('tuda_corrupted2.txt', 'w') as f:\n for file in corrupted_files:\n f.write(\"%s\\n\" % file)\n \n print(\"Done writing tuda_corrupted2.txt\" +\n \"Together with tuda_corrupted.txt they contain all corrupted files in Tuda-De\")\n print(\"=====================\")",
"def verify_images(root_dir, root_listdir):\n counter = 0\n\n for index, image_dir in enumerate(root_listdir):\n images_listdir = os.listdir(root_dir + \"/\" + image_dir)\n list_of_images_indices = [\n image_index\n for image_index in range(3, len(images_listdir) - 1)\n if image_index % 2 == 0\n ]\n for image_ind in list_of_images_indices:\n filename = root_dir + \"/\" + image_dir + \"/\" + images_listdir[image_ind]\n try:\n im = Image.open(filename)\n im.verify()\n im.close()\n except (OSError, ValueError):\n counter += 1\n\n print(\"%d files caused error due to OSError and ValueError.\" % counter)",
"def test_merge_exceptions():\n # pylint: disable=R0914\n obj = putil.pcsv.merge\n common_exceptions(obj)\n with putil.misc.TmpFile(write_file) as fname1:\n with putil.misc.TmpFile(write_file) as fname2:\n exmsg = (\n 'Combined columns in data files and output '\n 'columns are different'\n )\n AE(\n obj, RE, exmsg,\n fname1, fname2, ['Ctrl'], ['Ref'], ocols=['a', 'b', 'c']\n )",
"def check_training_result_files(folder, ruleset, quiet, werror):\n\n too_many_errors = False\n result_folder = os.path.join(folder, 'results')\n for system_folder in _get_sub_folders(result_folder):\n for benchmark_folder in _get_sub_folders(system_folder):\n folder_parts = benchmark_folder.split('/')\n benchmark = folder_parts[-1]\n system = folder_parts[-2]\n\n # If it is not a recognized benchmark, skip further checks.\n if benchmark not in _ALLOWED_BENCHMARKS:\n print('Skipping benchmark: {}'.format(benchmark))\n continue\n\n # Find all result files for this benchmark.\n pattern = '{folder}/result_*.txt'.format(folder=benchmark_folder)\n result_files = glob.glob(pattern, recursive=True)\n\n # No result files were found. That is okay, because the organization\n # may not have submitted any results for this benchmark.\n if not result_files:\n print('No Result Files!')\n continue\n\n _print_divider_bar()\n print('System {}'.format(system))\n print('Benchmark {}'.format(benchmark))\n\n # If the organization did submit results for this benchmark, the number\n # of result files must be an exact number.\n if len(result_files) != _EXPECTED_RESULT_FILE_COUNTS[benchmark]:\n print('Expected {} runs, but detected {} runs.'.format(\n _EXPECTED_RESULT_FILE_COUNTS[benchmark],\n len(result_files)))\n\n errors_found = 0\n result_files.sort()\n for result_file in result_files:\n result_basename = os.path.basename(result_file)\n result_name, _ = os.path.splitext(result_basename)\n run = result_name.split('_')[-1]\n\n # For each result file, run the benchmark's compliance checks.\n _print_divider_bar()\n print('Run {}'.format(run))\n config_file = '{ruleset}/common.yaml'.format(\n ruleset=ruleset,\n benchmark=benchmark)\n checker = mlp_compliance.make_checker(\n ruleset=ruleset,\n quiet=quiet,\n werror=werror)\n valid, _, _, _ = mlp_compliance.main(result_file, config_file, checker)\n if not valid:\n errors_found += 1\n if errors_found == 1:\n print('WARNING: One file does not comply.')\n print('WARNING: Allowing this failure under olympic scoring rules.')\n if errors_found > 1:\n too_many_errors = True\n\n _print_divider_bar()\n if too_many_errors:\n raise Exception('Found too many errors in logging, see log above for details.')",
"def test_create_file_errors(self):\n\n # Ensure a file can't be written outside the build context.\n files_to_fail = ['../file', '../../file', 'wild/../../file']\n for file in files_to_fail:\n file_arg = {file: []}\n config = self._quick_test_cfg()\n config['build']['source_path'] = 'file_tests.tgz'\n config['build']['create_files'] = file_arg\n with self.assertRaises(RuntimeError) as context:\n self._quick_test(config)\n self.assertTrue('outside build context' in str(context.exception))\n\n # Ensure a file can't overwrite existing directories.\n files_to_fail = ['wild', 'rec']\n for file in files_to_fail:\n file_arg = {file: []}\n config = self._quick_test_cfg()\n config['build']['source_path'] = 'file_tests.tgz'\n config['build']['create_files'] = file_arg\n test = self._quick_test(config, build=False, finalize=False)\n self.assertFalse(test.build())",
"def validate_dirs(root: str) -> None:\n def is_unique(mylist: List) -> bool:\n \"\"\"returns true if the (flat) list contains no duplicates.\"\"\"\n return len(mylist) == len(set(mylist))\n\n def is_bijective(tifflist: List, metalist: List) -> bool:\n \"\"\"returns true if the list of tiffs and their metadata are bijective.\"\"\"\n tiff_stripped = sorted([path.splitext(x)[0] for x in tifflist])\n meta_stripped = sorted([path.splitext(x)[0].split('.')[0] for x in metalist])\n return tiff_stripped == meta_stripped\n\n def get_differences(metadata,tiffs):\n \"\"\"Returns a list of files that don't match\"\"\"\n metadata = set(['_'.join(m.split('_')[:-1]) for m in metadata])\n tiffs = set(tiffs)\n return list(set.difference(metadata,tiffs))\n\n error_text = ''\n for dirpath, dirnames, fnames in os.walk(root):\n tiffs = [f for f in fnames if path.splitext(f)[1].lower() == \".tiff\"]\n metadata = [f for f in fnames if path.splitext(f)[1].lower() == \".xml\"]\n if not (is_unique(tiffs) and is_unique(metadata)):\n error_text += f'\\n\\tduplicate files found in {path.abspath(dirpath)}'\n if not is_bijective(tiffs, metadata):\n error_text += f'\\n\\tdifferent number of metadata (n = {len(metadata)}) and tiff (n = {len(tiffs)})' \\\n f' files found in {path.abspath(dirpath)}'\n error_text += get_differences(metadata,tiffs)\n if \".tiff_files\" not in dirpath:\n metadata_files = [path.join(dirpath,f) for f in fnames if path.splitext(f)[1].lower() == \".xml\" and \"measurement_slide\" not in f]\n for f in metadata_files:\n _, success, msg = metadata.tryparse(f)\n if not success:\n error_text += f'\\n\\t\\tMetadata error in {f}:{msg}'\n if error_text:\n error(f'Found the following problems with the target root directory {root}:{error_text}')\n exit(-1)",
"def main():\n failed = []\n for root_dir, _, file_names in os.walk(directory_to_check):\n for file_name in file_names:\n file = open(\n os.path.join(root_dir, file_name), encoding=\"utf8\", errors=\"ignore\"\n )\n text = file.read()\n text = remove_comments(text)\n if \"lorem ipsum\" in text.lower():\n failed.append(file.name)\n\n if len(failed) != 0:\n error_message = '\"Lorem ipsum\"s found in the following files:\\n' + \"\\n\".join(\n failed\n )\n raise Exception(error_message)",
"def analyse_errors(folder, set, fails=[], wipe_fails=False):\r\n\r\n # Naming convention\r\n if wipe_fails:\r\n set = set + '_nofails'\r\n\r\n # check error SN and flag:\r\n error_file = folder + 'error_sn.txt'\r\n error_set = []\r\n f = open(error_file, \"r\")\r\n lines = f.readlines()\r\n\r\n for x in lines:\r\n y = x.translate(None, \"SN :\")\r\n error_set.append(int(y))\r\n f.close()\r\n\r\n\r\n # Import and strip true and fitted params\r\n fp = folder + 'fitted_errors.txt'\r\n\r\n fitted_c = []\r\n fitted_t0 = []\r\n fitted_x0 = []\r\n fitted_x1 = []\r\n\r\n with open(fp, 'rb') as f:\r\n\r\n reader = csv.reader(f, delimiter=' ')\r\n\r\n for row in reader:\r\n # these are actually errors, I should be less lazy\r\n fitted_c.append(float(row[1].replace('c:','')))\r\n fitted_t0.append(float(row[2].replace('t0:','')))\r\n fitted_x0.append(float(row[3].replace('x0:','')))\r\n fitted_x1.append(float(row[4].replace('x1:','')))\r\n\r\n for i in range(len(fitted_c)):\r\n if fitted_c[i] == 0 and fitted_t0[i] == 0 and fitted_x0[i] == 0 and fitted_x1[i] ==0:\r\n if i+1 not in error_set:\r\n error_set.append(i+1)\r\n error_set.sort()\r\n\r\n # Create sn_num array\r\n sn_num = range(1, len(fitted_c)+1)\r\n for i in error_set:\r\n sn_num[i-1] = 'fit_error'+ str(i)\r\n\r\n # Flag kepler errors (only applicable for combined seeing where kst t0 couldn't be passed)\r\n for i in fails:\r\n sn_num[i-1] = 'kst_error' + str(i)\r\n\r\n total_fails = filter(lambda x:x in error_set, fails)\r\n # Handles failures in both kst and current fit\r\n for i in total_fails:\r\n sn_num[i-1] = 'fit_and_kst_error' + str(i)\r\n\r\n # remove fails from data\r\n if wipe_fails:\r\n for i in sorted(error_set+fails, reverse=True):\r\n del sn_num[i-1]\r\n del fitted_c[i-1]\r\n del fitted_t0[i-1]\r\n del fitted_x0[i-1]\r\n del fitted_x1[i-1]\r\n\r\n t = PrettyTable()\r\n t.title = set\r\n t.add_column('SN', sn_num)\r\n t.add_column('c-error', fitted_c)\r\n t.add_column('t0-error', fitted_t0)\r\n t.add_column('x0-error', fitted_x0)\r\n t.add_column('x1-error', fitted_x1)\r\n\r\n table_txt = t.get_string()\r\n\r\n writefolder = folder + \"stats/\"\r\n if not os.path.isdir(writefolder):\r\n os.makedirs(writefolder)\r\n\r\n if wipe_fails:\r\n with open(writefolder + 'error_output_nofails.txt', 'w') as file:\r\n file.write(table_txt)\r\n else:\r\n with open(writefolder + 'error_output.txt', 'w') as file:\r\n file.write(table_txt)\r\n\r\n diffs = [fitted_c, fitted_t0, fitted_x0, fitted_x1]\r\n\r\n return error_set, diffs",
"def assert_filenames(self):\n print(\"Asserting filenames: \", end=\"\")\n error_files = []\n\n for data_dir in data_settings.BLOCK_DATA_DIRS:\n\n filenames = os.listdir(data_dir)\n\n for filename in filenames:\n\n if 'aux.xml' in filename or 'yield':\n\n continue\n\n try:\n\n filename_split = filename.split(\"_\")\n date = filename_split[0]\n _, suffix = filename_split[-1].split(\".\")\n\n assert suffix == 'tif', \"Wrong file suffix\"\n assert len(date) == 8, \"Wrong amount of numbers in date\"\n assert date[0:4] == '2017', \"Year is wrong\"\n assert date[4] == '0', \"No double digit months in dataset\"\n assert date[5] in ['4', '5', '6', '7', '8',\n '9'], \"Month outside dataset range\"\n assert date[6] in ['0', '1', '2',\n '3'], \"Ten-indicator for day is wrong\"\n assert date[7] in ['0', '1', '2', '3', '4', '5',\n '6', '7', '8', '9'], \"Date is not a digit\"\n assert 'ndvi' in filename or 'drone_rgb' in filename or 'drone_ndvi' in filename, \"Proper type is missing\"\n\n if 'sentinel_ndvi' in filename:\n\n assert len(filename) == 26, \"Filename wrong for {} in {}\".format(\n filename, data_dir)\n\n if 'drone_ndvi' in filename:\n\n assert len(filename) == 23, \"Filename wrong for {} in {}\".format(\n filename, data_dir)\n\n if 'drone_rgb' in filename:\n\n assert len(filename) == 22, \"Filename wrong for {} in {}\".format(\n filename, data_dir)\n\n except (AssertionError, ValueError) as ex:\n\n error_files.append(\"{}: {}\".format(\n ex, os.path.join(data_dir, filename)))\n\n if not error_files:\n\n print(\"All generated block datasets named correctly!\")\n\n else:\n\n print(\"There were some problems with the following files\")\n\n for error_file in error_files:\n print(\"\\t{}\".format(error_file))",
"def check_training_result_files(folder, ruleset, quiet, werror, rcp_bypass, rcp_bert_train_samples):\n\n if ruleset == '0.6.0':\n allowed_benchmarks = _ALLOWED_BENCHMARKS_V06\n elif ruleset == '0.7.0':\n allowed_benchmarks = _ALLOWED_BENCHMARKS_V07\n elif ruleset == '1.0.0':\n allowed_benchmarks = _ALLOWED_BENCHMARKS_V10\n seed_checker = SeedChecker(ruleset)\n too_many_errors = False\n result_folder = os.path.join(folder, 'results')\n for system_folder in _get_sub_folders(result_folder):\n for benchmark_folder in _get_sub_folders(system_folder):\n folder_parts = benchmark_folder.split('/')\n benchmark = folder_parts[-1]\n system = folder_parts[-2]\n\n # Find whether submission is closed and only then run seed and RCP checkers\n system_desc_file = os.path.join(folder, 'systems/') + system + '.json'\n division = ''\n with open(system_desc_file, 'r') as f:\n contents = json.load(f)\n if contents['division'] == 'closed':\n division = 'closed'\n\n # If it is not a recognized benchmark, skip further checks.\n if benchmark not in allowed_benchmarks:\n print('Skipping benchmark: {}'.format(benchmark))\n continue\n\n # Find all result files for this benchmark.\n pattern = '{folder}/result_*.txt'.format(folder=benchmark_folder)\n result_files = glob.glob(pattern, recursive=True)\n any_pattern = '{folder}/*'.format(folder=benchmark_folder)\n all_files = glob.glob(any_pattern, recursive=True)\n\n print(\"LOOK:\", benchmark, result_files)\n\n # Find all source codes for this benchmark.\n source_files = find_source_files_under(\n os.path.join(folder, 'benchmarks', benchmark))\n\n _print_divider_bar()\n print('System {}'.format(system))\n print('Benchmark {}'.format(benchmark))\n\n # The number of result files must be an exact number.\n # Print a comprehensive message if some files in results\n # directory do not match naming convention (results_*.txt)\n if len(result_files) != _EXPECTED_RESULT_FILE_COUNTS[benchmark]:\n print('Expected {} runs, but detected {} runs.'.format(\n _EXPECTED_RESULT_FILE_COUNTS[benchmark],\n len(result_files),\n ))\n too_many_errors = True\n if len(all_files) > 0:\n print(all_files)\n print('Detected {} total files in directory {}, but some do not conform '\n 'to naming convention, should you rename them to result_*.txt ?'.format(\n len(all_files), benchmark_folder,\n ))\n if len(result_files) < len(all_files):\n print('WARNING: Unknown files in results directory {}'.format(benchmark_folder))\n\n errors_found = 0\n result_files.sort()\n for result_file in result_files:\n result_basename = os.path.basename(result_file)\n result_name, _ = os.path.splitext(result_basename)\n run = result_name.split('_')[-1]\n\n # For each result file, run the benchmark's compliance checks.\n _print_divider_bar()\n print('Run {}'.format(run))\n config_file = '{ruleset}/common.yaml'.format(\n ruleset=ruleset,\n benchmark=benchmark,\n )\n checker = mlp_compliance.make_checker(\n ruleset=ruleset,\n quiet=quiet,\n werror=werror,\n )\n valid, _, _, _ = mlp_compliance.main(\n result_file,\n config_file,\n checker,\n )\n if not valid:\n errors_found += 1\n if errors_found == 1 and benchmark != 'unet3d':\n print('WARNING: One file does not comply.')\n print('WARNING: Allowing this failure under olympic scoring '\n 'rules.')\n elif errors_found > 0 and errors_found <= 4 and benchmark == 'unet3d':\n print('WARNING: {errors} file does not comply.'.format(errors=errors_found))\n print('WARNING: Allowing this failure for unet3d under olympic scoring '\n 'rules.')\n elif errors_found > 0:\n too_many_errors = True\n\n # Check if each run use unique seeds.\n if ruleset == '1.0.0' and division == 'closed':\n if not seed_checker.check_seeds(result_files, source_files):\n too_many_errors = True\n\n # Run RCP checker for 1.0.0\n if ruleset == '1.0.0' and division == 'closed' and benchmark != 'minigo':\n rcp_chk = rcp_checker.make_checker(ruleset, verbose=False, bert_train_samples=rcp_bert_train_samples)\n rcp_chk._compute_rcp_stats()\n\n # Now go again through result files to do RCP checks\n rcp_pass, rcp_msg = rcp_chk._check_directory(benchmark_folder, rcp_bypass)\n if not rcp_pass:\n print('ERROR: RCP Test Failed: {}.'.format(rcp_msg))\n too_many_errors = True\n\n _print_divider_bar()\n if too_many_errors:\n raise Exception(\n 'Found too many errors in logging, see log above for details.')",
"def test_verify_corrupt_archive(self):\n self.backup(u\"full\", u\"testfiles/various_file_types\", options=[])\n output_files = os.listdir(\"testfiles/output\")\n archives = [elem for elem in output_files if \"vol\" in elem]\n for archive in archives:\n # Edit source file\n with open(\"testfiles/output/\" + archive, 'r+') as f:\n f.write('This writes text into each archive file to corrupt it.')\n # Test verify for the file\n try:\n self.verify(u'testfiles/various_file_types/executable', file_to_verify=u'executable', options=[])\n except CmdError as e:\n # Should return a 21 error code for \"hash mismatch\"\n self.assertEqual(e.exit_status, 21, str(e))\n else:\n self.fail('Expected Hash Mismatch Error not thrown')",
"def combine(files, output):\n # read all files\n bxrs = [h5py.File(f,'r') for f in files]\n # some paths we might care about & will copy\n metadata_paths = [\n '3BRecInfo/3BRecVars/MaxVolt',\n '3BRecInfo/3BRecVars/MinVolt',\n '3BRecInfo/3BRecVars/BitDepth',\n '3BRecInfo/3BRecVars/SignalInversion',\n '3BRecInfo/3BRecVars/SamplingRate',\n '3BRecInfo/3BRecVars/ExperimentType',\n '3BRecInfo/3BMeaChip/NRows',\n '3BRecInfo/3BMeaChip/NCols',\n '3BRecInfo/3BMeaChip/Layout',\n '3BRecInfo/3BMeaChip/MeaType',\n '3BRecInfo/3BMeaSystem/FwVersion',\n '3BRecInfo/3BMeaSystem/HwVersion',\n '3BRecInfo/3BMeaSystem/System'\n ]\n\n # count n_frames, n_samples from each file\n # also verify that key metadata matches\n n_frames = bxrs[0]['3BRecInfo/3BRecVars/NRecFrames'][0]\n n_samples = [bxrs[0]['3BData/Raw'].shape[0]]\n sampling_rate = bxrs[0]['3BRecInfo/3BRecVars/SamplingRate'][0]\n print(\"checking that all brw files have matching metadata\")\n for b in bxrs[1:]:\n for m in metadata_paths:\n try:\n if len(bxrs[0][m])==1:\n assert bxrs[0][m][:] == b[m][:]\n else:\n assert np.all(bxrs[0][m][:] == b[m][:])\n except Exception as E:\n logger.warn(f\"\"\"metadata does not match for {m}:\n found {bxrs[0][m]} and {b[m]}\n \"\"\")\n n_frames += b['3BRecInfo/3BRecVars/NRecFrames'][0]\n n_samples.append(b[\"3BData/Raw\"].shape[0])\n print(f\"combined duration: {n_frames/sampling_rate/60:.2f} minutes\")\n\n out_bxr = h5py.File(output, \"w\")\n # copy metadata\n bxrs[0].visititems(partial(glia.copy_metadata, copy_to=out_bxr))\n\n # copy data\n out_bxr['3BRecInfo/3BRecVars/NRecFrames'] = [n_frames]\n out_bxr['nSamplesPerRecording'] = n_samples\n tot_samples = sum(n_samples)\n assert np.isclose(tot_samples/n_frames, 4096) #4096 channels\n \n # copy raw data\n raw_dtype = bxrs[0][\"3BData/Raw\"].dtype\n dset = out_bxr.create_dataset(\"3BData/Raw\", (tot_samples,),\n dtype=raw_dtype)\n start_sample = 0\n max_chunk = int(1e8) # <1GiB \n for i, b in enumerate(bxrs):\n print(f\"Copying {files[i]}\")\n end_sample = start_sample+n_samples[i]\n for s in tqdm(range(0,n_samples[i],max_chunk)):\n e = min(s+max_chunk, end_sample)\n dset[start_sample+s:start_sample+e] = b[\"3BData/Raw\"][s:e]\n start_sample = end_sample\n\n # cleanup\n out_bxr.close()\n [b.close() for b in bxrs]",
"def test_emission_files(self):\n assert len(context.data_extract.get_absorption_files(testing_dir)) == \\\n 3, 'get_emission_files gets improper number of files'",
"def test_collect_files(self):\n test_files = (os.path.join(self.data_dir, 'sdR-12345678.fits'),\n os.path.join(self.data_dir, 'sdR-01234567.fits'),\n os.path.join(self.data_dir, 'spPlate-1234-54321.fits'),\n os.path.join(self.data_dir, 'extraneous.fits'))\n for f in test_files:\n open(f, 'a').close()\n root = os.path.join(os.environ[DM], 'doc', 'examples')\n files = scan_model(root)\n files_to_regexp(self.data_dir, files)\n\n self.assertInLog(log, (\"{0}/doc/examples/badModel.rst has no file \" +\n \"regexp!\").format(os.environ[DM]))\n collect_files(self.data_dir, files)\n self.assertInLog(log, 'Extraneous file detected: {0}'.format(test_files[3]))\n for f in files:\n if os.path.basename(f.filename) == 'badModel.rst':\n self.assertIsNone(f.regexp)\n self.assertIsNone(f._prototypes)\n else:\n self.assertIsNotNone(f.regexp)\n self.assertIsNotNone(f._prototypes)\n for f in test_files:\n os.remove(f)",
"def testFailFiles(self):\n # Cleaning possible files already occupying the available set\n self.dummySubscription.failFiles([])\n\n # First test - Test if initial file (on available set) is inserted in the\n # failed set - no arguments\n\n dummyFile2 = File('/tmp/dummyfile2,8888', 1, 1, 1)\n # Insert dummyFile2 into the available files Set at dummySubscription\n self.dummySubscription.available.addFile(dummyFile2)\n\n S = self.dummySubscription.availableFiles()\n # Fail all files\n self.dummySubscription.failFiles(S)\n\n assert len(self.dummySubscription.availableFiles()) == 0, \\\n \"failed subscription still has %s files, what's up with that?\" % \\\n len(self.dummySubscription.availableFiles())\n\n # Second test - Test if target files are inserted at the failed set\n\n dummyFileList = []\n # Populating the dummy List with a random number of files\n for i in range(1, random.randint(100, 1000)):\n lfn = '/store/data/%s/%s/file.root' % (random.randint(1000, 9999),\n random.randint(1000, 9999))\n size = random.randint(1000, 2000)\n events = 1000\n run = random.randint(0, 2000)\n lumi = random.randint(0, 8)\n\n file = File(lfn=lfn, size=size, events=events,\n checksums={\"cksum\": \"1\"})\n file.addRun(Run(run, *[lumi]))\n dummyFileList.append(file)\n # Add the new files\n self.dummySubscription.available.addFile(dummyFileList)\n # and fail them\n self.dummySubscription.failFiles(files=dummyFileList)\n # Check there are no files available - everything should be failed\n assert len(self.dummySubscription.availableFiles()) == 0, \\\n \"failed subscription still has %s files, what's up with that?\" % \\\n len(self.dummySubscription.availableFiles())\n\n # Check if all files were inserted at subscription's failed files Set\n for x in dummyFileList:\n assert x in self.dummySubscription.failed.getFiles(type='set'), \\\n 'Couldn\\'t make file failed %s' % x.dict['lfn']\n\n # Third test - Test if a replicate file is erased from the other Sets,\n # when a file is considered failed\n\n dummyFile3 = File('/tmp/dummyfile3,5555', 1, 1, 1)\n dummyFileList = []\n dummyFileList.append(dummyFile3)\n\n # Inserting dummyFile3 to be used as an argument, into each of the other\n # file sets\n self.dummySubscription.acquired.addFile(dummyFile3)\n self.dummySubscription.available.addFile(dummyFile3)\n self.dummySubscription.completed.addFile(dummyFile3)\n\n # Run the method failFiles\n self.dummySubscription.failFiles(files=dummyFileList)\n\n # Check if dummyFile3 was inserted at the failed Set\n assert dummyFile3 in self.dummySubscription.failed.getFiles(type='set'), \\\n 'Replicated file could\\'nt be inserted at failed Set'\n\n # Check if dummyFile3 was erased from all the other Sets\n assert dummyFile3 not in self.dummySubscription.acquired.getFiles(type='set'), \\\n 'Failed file still present at acquired Set'\n assert dummyFile3 not in self.dummySubscription.completed.getFiles(type='set'), \\\n 'Failed file still present at completed Set'\n assert dummyFile3 not in self.dummySubscription.available.getFiles(type='set'), \\\n 'Failed file still present at available Set'"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Test getting all center invalid errors | def test_get_center_invalid_errors(syn):
with patch.object(
syn, "tableQuery", return_value=QueryResponse
) as patch_query, patch.object(
write_invalid_reasons, "_combine_center_file_errors", return_value="errors"
) as patch_combine:
center_invalid = write_invalid_reasons.get_center_invalid_errors(syn, "syn3333")
assert center_invalid == {"SAGE": "errors", "TEST": "errors"}
patch_query.assert_called_once_with("SELECT * FROM syn3333")
assert patch_combine.call_count == 2 | [
"def test_get_data_errors(self):\n with self.assertRaises(Exception):\n self.all_struct.get_cross_validation_chunks(-1)\n with self.assertRaises(Exception):\n self.all_struct.get_cross_validation_chunks(0)\n with self.assertRaises(Exception):\n self.all_struct.get_cross_validation_chunks(1)\n self.assertIsNotNone(self.all_struct.get_cross_validation_chunks(2))\n\n with self.assertRaises(Exception):\n self.all_struct.get_trainin_testing(-1)\n with self.assertRaises(Exception):\n self.all_struct.get_cross_validation_chunks(0)\n with self.assertRaises(Exception):\n self.all_struct.get_cross_validation_chunks(1)\n self.assertIsNotNone(self.all_struct.get_training_testing(.5))",
"def testErrors(self):\n factor = 100\n before = fitLine(self.spectrum, int(self.center), int(self.rmsSize), 0, self.fittingRadius)\n\n spectrum = makeSpectrum(self.length, self.center, factor*self.amplitude, self.rmsSize,\n self.bgConst, self.bgSlope)\n after = fitLine(spectrum, int(self.center), int(self.rmsSize), 0, self.fittingRadius)\n\n # The amplitude and background errors doesn't change, because they are related to the variance,\n # which we aren't using.\n self.assertFloatsAlmostEqual(after.amplitudeErr, before.amplitudeErr, atol=1.0e-5)\n self.assertFloatsAlmostEqual(after.bg0Err, before.bg0Err, atol=1.0e-2)\n self.assertFloatsAlmostEqual(after.bg1Err, before.bg1Err, atol=1.0e-5)\n # The center and rmsSize errors decrease proportionally with the line flux\n # (they scale inversely with the S/N, and the noise is constant)\n self.assertFloatsAlmostEqual(after.centerErr, before.centerErr/factor, atol=1.0e-5)\n self.assertFloatsAlmostEqual(after.rmsSizeErr, before.rmsSizeErr/factor, atol=1.0e-5)",
"def test_bbox_centers_exception():\n with pytest.raises(TypeError):\n _ = _calculate_bbox_centers({'bad_data_type': True})",
"def test_get_error_data_all_col_errors(self):\n field_setup = None\n error_names = None\n prepared_info = self.setup_error_data(field_setup, error_names)\n for row in prepared_info:\n self.assertEqual(row['expected'], row['actual'])",
"def test_does_not_exceed_tolerance(self):\n\n results = collections.defaultdict(int)\n for error in self.single_errors:\n results[round(error, 1)] += 1\n self.assertLessEqual(error, MAX_ERR)\n self.results.append(('single errors', results))",
"def error(self) -> Sequence[float]:\n errors = []\n for line, sign in zip(self.marker_lines, (-1, 1)):\n if self._orientation == Orientation.UP_DOWN:\n picket_pos = self._fit(line.center.y)\n mlc_pos = line.center.x\n else:\n picket_pos = self._fit(line.center.x)\n mlc_pos = line.center.y\n if (\n self._separate_leaves\n ): # offset the picket position by the DLG and nominal gap\n mag_factor = self._image.sid / 1000\n picket_pos += (\n sign * self._nominal_gap_mm * mag_factor / 2 * self._image.dpmm\n )\n errors.append((mlc_pos - picket_pos) / self._image.dpmm)\n return errors",
"def test_normalize_with_multiple_errors(self) -> None:\n errors_address = address_with_errors()\n try:\n normalize_an_address(errors_address)\n except ShipEngineError as err:\n assert err.request_id is not None\n assert err.request_id.startswith(\"req_\") is True\n assert err.source is ErrorSource.SHIPENGINE.value\n assert err.error_type is ErrorType.ERROR.value\n assert err.error_code is ErrorCode.INVALID_ADDRESS.value\n assert (\n err.message\n == \"Invalid address.\\nInvalid City, State, or Zip\\nInsufficient or Incorrect Address Data\"\n )",
"def reliable(errors):\n return (np.array(errors) < 200).sum(axis=0) / len(errors)",
"def local_error_test(self):\n data = [[0.0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]]\n tsOrg = TimeSeries.from_twodim_list(data)\n tsCalc = TimeSeries.from_twodim_list(data)\n\n bem = BaseErrorMeasure()\n\n for idx in xrange(len(tsOrg)):\n try:\n bem.local_error([tsOrg[idx][1]], [tsCalc[idx][1]])\n except NotImplementedError:\n pass\n else:\n assert False # pragma: no cover",
"def has_errors(self) -> bool:",
"def test_kyc_get_validation_legal(self):\n pass",
"def error_test():\n checkresult(lib.ErrorTest())",
"def test_get_xy_invalid_space():\n pass",
"def check_errors(self, is_global=False):\n errors = self.global_errors if is_global else self.errors\n if errors:\n print('dfTimewolf encountered one or more errors:')\n for error, critical in errors:\n print('{0:s} {1:s}'.format('CRITICAL: ' if critical else '', error))\n if critical:\n print('Critical error found. Aborting.')\n sys.exit(-1)",
"def test__combine_center_file_errors(syn):\n expected_error = (\n f\"\\t{ENT1.name} ({ENT1.id}):\\n\\nmy errors\\nn\\n\\n\"\n f\"\\t{ENT1.name} ({ENT1.id}):\\n\\nerrors here\\nf\\n\\n\"\n )\n calls = [\n mock.call(\"syn1234\", downloadFile=False),\n mock.call(\"syn2345\", downloadFile=False),\n ]\n with patch.object(syn, \"get\", return_value=ENT1) as patch_synget:\n center_errors = write_invalid_reasons._combine_center_file_errors(\n syn, CENTER_ERRORSDF\n )\n assert center_errors == expected_error\n patch_synget.assert_has_calls(calls)",
"def lat_errors(self):\r\n try:\r\n _lat_errors = self._validate_latlon(self.sourceLatCol)\r\n return _lat_errors\r\n except:\r\n return None",
"def check_errors(self, data):\n for entry in data:\n if entry.find('ERROR') != -1:\n return entry\n return False",
"def get_error_initialization_test(self):\n bem = BaseErrorMeasure()\n\n try:\n bem.get_error()\n except StandardError:\n pass\n else:\n assert False # pragma: no cover",
"def get_simple_reg_error_and_points(self, center_position):\n assert center_position.shape == (1,3)\n self.iteration += 1\n traj_matlab, difference = self.get_traj_matlabarray(center_position, self.traj, self.traj_center)\n errors = np.zeros((center_position.shape[0],1))\n idx = 0\n for trajectory in traj_matlab: #list comprehension wouldnt work on mlarray\n registered_points, _, regError = self.perform_reg(self.aorta, trajectory, nargout=3)\n errors[idx] = regError / trajectory.size[0]\n idx += 1\n return errors, registered_points"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the highest magnification for the slide | def highest_mag(slide):
return int(slide.properties['aperio.AppMag']) | [
"def get_thumbnail_magnification(slide):\n ratio = np.asarray(slide.dimensions) / np.asarray(slide.associated_images[\"thumbnail\"].size)\n # np.sqrt(np.prod(ratio))\n return ratio",
"def get_size_for_mag(slide, mag):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = max_mag/mag\n return [np.int(np.round(dim/downsample)) for dim in max_size]",
"def get_mile_per_galon_max(self):\n return self.car_dict['mile_per_galon_max']",
"def getNativeMagnification(self):\n pixelInfo = self._tiffDirectories[-1].pixelInfo\n mm_x = pixelInfo.get('mm_x')\n mm_y = pixelInfo.get('mm_y')\n # Estimate the magnification if we don't have a direct value\n mag = pixelInfo.get('magnification') or 0.01 / mm_x if mm_x else None\n return {\n 'magnification': mag,\n 'mm_x': mm_x,\n 'mm_y': mm_y,\n }",
"def get_level_for_mag(slide, mag):\n level_mags_rounded = list(np.round(level_mags(slide), decimals = 2))\n if mag in level_mags_rounded:\n return level_mags_rounded.index(mag)\n else: \n return None",
"def get_level_mag(slide, level):\n return level_mags(slide)[level]",
"def get_mag(self):\n return self.sem_api.Get('AP_MAG', 0)[1]",
"def level_mags(slide):\n return [highest_mag(slide)/downsample for downsample in slide.level_downsamples]",
"def largestResolution(resolutions):\n return resolutions[0]",
"def _select_largest_photo(self, sizes):\n\n max_size = 0\n photo = ''\n for size in sizes:\n w = size['width']\n h = size['height']\n if w * h >= max_size:\n max_size = w * h\n photo = size['url']\n return photo",
"def _get_max_scale(self) -> int:",
"def satellite_maximum(self):\n raise NotImplementedError(\"satellite_maximum\")\n index = np.argmax(self._sat_magnification)\n return (self._sat_time[index], self._sat_magnification[index], self._sat_magnification[index])",
"def max(self) -> int:\n return self.sky[\"max\"]",
"def spindlemax(self):\n if(self.gearbox.curratio != 0.0):\n return self.io.param.motor_max / self.gearbox.curratio\n else:\n return 0.0",
"def maxResolution(self,wave = None):\n\n d = 2000.0*self.height*math.tan(self.angle/2) # Max pathlength in microns.\n dn = self.n.getDerivative(wave) # dn/dy of materail\n return d*dn #",
"def max_scale_image(self):\n maximum = np.argmax(self.transform, 0)\n return self.scale_array[maximum] * (self.support.sum(0) > 0)",
"def sort_maxside(sprite):\n return max(sprite.width, sprite.height)",
"def get_max(self):",
"def getImageMax(self):\n fname = '%s::%s'%(self.__class__.__name__, self.getImageMax.__name__)\n if (not self.lhaveImage):\n print(\"%s: DSM image not yet computed\"%fname)\n return None, None\n maxIndex = c_int(1)\n maxValue = c_float(1)\n ierr = c_int(1)\n self.lib.xcloc_getImageMax(maxIndex, maxValue, ierr)\n if (ierr.value != 0):\n print(\"%s: Failed to get max value and index of DSM image\"%fname)\n return None, None\n imax = maxIndex.value - 1 # Fortran to C\n vmax = maxValue.value\n return imax, vmax"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the magnification for each level in a slide | def level_mags(slide):
return [highest_mag(slide)/downsample for downsample in slide.level_downsamples] | [
"def get_level_mag(slide, level):\n return level_mags(slide)[level]",
"def get_thumbnail_magnification(slide):\n ratio = np.asarray(slide.dimensions) / np.asarray(slide.associated_images[\"thumbnail\"].size)\n # np.sqrt(np.prod(ratio))\n return ratio",
"def get_level_for_mag(slide, mag):\n level_mags_rounded = list(np.round(level_mags(slide), decimals = 2))\n if mag in level_mags_rounded:\n return level_mags_rounded.index(mag)\n else: \n return None",
"def _calculate_magnification(self, times):\n if self._model.n_lenses == 2:\n factor = 10.\n params = self._model.parameters\n t_1 = params.t_0 - factor * params.t_E\n t_2 = params.t_0 + factor * params.t_E\n self._model.set_magnification_methods([t_1, 'VBBL', t_2])\n self._model.set_default_magnification_method(\n 'point_source_point_lens')\n\n magnification = self._model.magnification(times)\n return magnification",
"def get_size_for_mag(slide, mag):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = max_mag/mag\n return [np.int(np.round(dim/downsample)) for dim in max_size]",
"def highest_mag(slide):\n return int(slide.properties['aperio.AppMag'])",
"def get_level_size(slide, level):\n return slide.level_dimensions[level]",
"def getNativeMagnification(self):\n pixelInfo = self._tiffDirectories[-1].pixelInfo\n mm_x = pixelInfo.get('mm_x')\n mm_y = pixelInfo.get('mm_y')\n # Estimate the magnification if we don't have a direct value\n mag = pixelInfo.get('magnification') or 0.01 / mm_x if mm_x else None\n return {\n 'magnification': mag,\n 'mm_x': mm_x,\n 'mm_y': mm_y,\n }",
"def level_to_mag(level: Optional[int], min_mag: float,\n max_mag: float) -> float:\n if level is None:\n return round(np.random.rand() * (max_mag - min_mag) + min_mag, 1)\n else:\n return round(level / _MAX_LEVEL * (max_mag - min_mag) + min_mag, 1)",
"def zoomlevels(self):\n return self._bboxes[0][1] #TODO: merge all coverages",
"def _magsamples(self):\n if self._derived_properties[\"magsamples\"] is None:\n if self.lbda is None:\n raise AttributeError(\"lbda not set.\")\n self.derive_magsamples()\n \n return self._derived_properties[\"magsamples\"]",
"def get_image_from_slide( file , mag=1.25 ): \n\n # ----- Check if slide is already the slide or the input name -----\n if type(file) is str:\n # Read the slide \n slide = large_image.getTileSource( file )\n else:\n slide = file \n \n # ----- Get slide at given magnification -----\n if mag == 'base':\n mag = slide.getNativeMagnification()[ 'magnification' ] \n\n image , _ = slide.getRegion( scale = dict( magnification=mag ),\n format = large_image.tilesource.TILE_FORMAT_NUMPY )\n\n return image[:,:,:3]",
"def extract_level_from_name(self):\n images = glob.glob(os.path.join(self.frame_dir, '*'))\n level = []\n for i, im in enumerate(images):\n base, tail = os.path.split(im)\n name = tail.split('.')[-2]\n number = name.split('_')[-1]\n level.append(float(number))\n return np.array(level)",
"def zoomlevels(self):\n return self._bboxes[0][1] #TODO: merge all coverages",
"def getMagnitudes(self):\n return self._bmag, self._vmag, self._jmag, self._hmag, self._kmag",
"def zoomlevels(self):\n pages = []\n lastwidth = None\n result = []\n for page in self.pages:\n if page.tags[\"SamplesPerPixel\"].value != 1: continue\n if page.imagewidth != lastwidth:\n lastwidth = page.imagewidth\n if pages: result.append(QPTiffZoomLevel(pages))\n pages = []\n pages.append(page)\n if pages: result.append(QPTiffZoomLevel(pages))\n return result",
"def lum2mag(self, luminosity):\n return 4.76 - 2.5*np.log10(luminosity)",
"def reducedimensions(image, lane_locs, character_locs, mob_locs):",
"def mole_ratio(self):\n return self._model.mole_ratio[self._shot_start:]"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the dimensions of a level | def get_level_size(slide, level):
return slide.level_dimensions[level] | [
"def level_dimensions(self, level: int = 0) -> Tuple[int, int]:\n try:\n return self._wsi.level_dimensions[level]\n except IndexError:\n raise LevelError(\n f\"Level {level} not available. Number of available levels: \"\n f\"{len(self._wsi.level_dimensions)}\"\n )",
"def dimensions():",
"def depth(self):\n return _libsbml.Dimensions_depth(self)",
"def getDimensions(self):\n return self.get('cube.dimensions')",
"def GetDimensions(self):\n ...",
"def get_map_size(level):\n if level < 5:\n return 5, 5\n if level < 70:\n return 10, 10\n if level < 150:\n return 25, 25\n return 50, 50",
"def get_dimension_length(self):\n pass",
"def size(self, level=None):\n if level is None:\n return len(self._nodes)\n else:\n try:\n level = int(level)\n return len(\n [\n node\n for node in self.all_nodes_itr()\n if self.level(node.identifier) == level\n ]\n )\n except Exception:\n raise TypeError(\n \"level should be an integer instead of '%s'\" % type(level)\n )",
"def levels(self) -> List[int]:\n return list(range(len(self._wsi.level_dimensions)))",
"def dimensions(self):\n return self._dimensions",
"def getDimensions( self ) :\n\n return( 1 )",
"def getDepth(self):\n return _libsbml.Dimensions_getDepth(self)",
"def levshape(self) -> Shape:\n return tuple(len(x) for x in self.levels)",
"def dims(self):\n return self._dims",
"def get_dimensions(self):\n\t\treturn SCREEN_DATA_MAP[self.screen_key][DIMENSIONS]",
"def len_on_level(d, level):\n counter = 0\n for node in DictTree.v_level(d, level-1):\n counter += DictTree.length(node)\n return counter",
"def dimension(self) -> float:\n return self._dimensions",
"def n_levels(self):\n return len(self.scales)",
"def dimensions(self):\n if self._dimensions is None:\n self._dimensions = [\n Dimension(x) for x in self.metadata[\"variables\"]\n ]\n return self._dimensions"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the magnification at a particular level | def get_level_mag(slide, level):
return level_mags(slide)[level] | [
"def level_to_mag(level: Optional[int], min_mag: float,\n max_mag: float) -> float:\n if level is None:\n return round(np.random.rand() * (max_mag - min_mag) + min_mag, 1)\n else:\n return round(level / _MAX_LEVEL * (max_mag - min_mag) + min_mag, 1)",
"def _calculate_magnification(self, times):\n if self._model.n_lenses == 2:\n factor = 10.\n params = self._model.parameters\n t_1 = params.t_0 - factor * params.t_E\n t_2 = params.t_0 + factor * params.t_E\n self._model.set_magnification_methods([t_1, 'VBBL', t_2])\n self._model.set_default_magnification_method(\n 'point_source_point_lens')\n\n magnification = self._model.magnification(times)\n return magnification",
"def getNativeMagnification(self):\n pixelInfo = self._tiffDirectories[-1].pixelInfo\n mm_x = pixelInfo.get('mm_x')\n mm_y = pixelInfo.get('mm_y')\n # Estimate the magnification if we don't have a direct value\n mag = pixelInfo.get('magnification') or 0.01 / mm_x if mm_x else None\n return {\n 'magnification': mag,\n 'mm_x': mm_x,\n 'mm_y': mm_y,\n }",
"def get_level_for_mag(slide, mag):\n level_mags_rounded = list(np.round(level_mags(slide), decimals = 2))\n if mag in level_mags_rounded:\n return level_mags_rounded.index(mag)\n else: \n return None",
"def level_mags(slide):\n return [highest_mag(slide)/downsample for downsample in slide.level_downsamples]",
"def get_thumbnail_magnification(slide):\n ratio = np.asarray(slide.dimensions) / np.asarray(slide.associated_images[\"thumbnail\"].size)\n # np.sqrt(np.prod(ratio))\n return ratio",
"def lum2mag(self, luminosity):\n return 4.76 - 2.5*np.log10(luminosity)",
"def highest_mag(slide):\n return int(slide.properties['aperio.AppMag'])",
"def get_mag(self):\n return self.sem_api.Get('AP_MAG', 0)[1]",
"def get_mile_per_galon_max(self):\n return self.car_dict['mile_per_galon_max']",
"def get_level_size(slide, level):\n return slide.level_dimensions[level]",
"def mag2lum(self, magnitude):\n return 10**((4.76 - magnitude)/2.5)",
"def mole_ratio(self):\n return self._model.mole_ratio[self._shot_start:]",
"def resolution(self, level):\n return 2 ** (level - 1)",
"def magnitude(x):\n return x.magnitude if hasattr(x, 'magnitude') else x",
"def mario_number(level):\n if level == 11:\n return 1\n elif :\n \n else:",
"def glare_matte_mapping(level, src_img):\n width, height = src_img.size\n cov = (level*50) ** 2\n return glare_matte(src_img, [([np.random.uniform(0, width),\n np.random.uniform(0, height)],\n [[cov, 0], [0, cov]], level*100)], level)",
"def get_mile_per_galon_min(self):\n return self.car_dict['mile_per_galon_min']",
"def difficulty(mag):\n mag = float(mag)\n if mag <= -4:\n return \"Visible in daytime.\"\n elif mag <= 6:\n return \"Visible at night.\"\n else:\n flux = mag_def(\"%s x\" % mag)\n needed_flux = mag_def(\"6 x\")\n eye_area = math.pi * (0.005**2)\n needed_power = needed_flux * eye_area\n diameter = 2 * math.sqrt(needed_power / (flux*math.pi))\n return \"%s m telescope needed.\" % diameter"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get the level corresponding to a certain magnification, if available | def get_level_for_mag(slide, mag):
level_mags_rounded = list(np.round(level_mags(slide), decimals = 2))
if mag in level_mags_rounded:
return level_mags_rounded.index(mag)
else:
return None | [
"def get_level_mag(slide, level):\n return level_mags(slide)[level]",
"def level_to_mag(level: Optional[int], min_mag: float,\n max_mag: float) -> float:\n if level is None:\n return round(np.random.rand() * (max_mag - min_mag) + min_mag, 1)\n else:\n return round(level / _MAX_LEVEL * (max_mag - min_mag) + min_mag, 1)",
"def getLevel(unique_name):",
"def level_mags(slide):\n return [highest_mag(slide)/downsample for downsample in slide.level_downsamples]",
"def get_luminosity(name):\n all_data = mc.get('sensor_values')\n name = _lookup(name)\n try:\n return all_data[name][3]\n except KeyError:\n raise KeyError(\"No sensor with that name\")",
"def getNativeMagnification(self):\n pixelInfo = self._tiffDirectories[-1].pixelInfo\n mm_x = pixelInfo.get('mm_x')\n mm_y = pixelInfo.get('mm_y')\n # Estimate the magnification if we don't have a direct value\n mag = pixelInfo.get('magnification') or 0.01 / mm_x if mm_x else None\n return {\n 'magnification': mag,\n 'mm_x': mm_x,\n 'mm_y': mm_y,\n }",
"def zoomlevels(self):\n return self._bboxes[0][1] #TODO: merge all coverages",
"def get_level(rol):\n\treturn rol.level",
"def getLevel(self, level):\n mingroup = None\n groups = self.console.storage.getGroups()\n\n for x in groups:\n\n if x.level < level:\n continue\n\n if mingroup is None:\n mingroup = x\n continue\n\n if x.level < mingroup.level:\n mingroup = x\n\n return mingroup.name",
"def lum2mag(self, luminosity):\n return 4.76 - 2.5*np.log10(luminosity)",
"def highest_mag(slide):\n return int(slide.properties['aperio.AppMag'])",
"def _get_ms_level(self):\n try:\n return self.scan[\"ms level\"]\n except KeyError:\n return 0",
"def resolution(self, level):\n return 2 ** (level - 1)",
"def zoomlevels(self):\n return self._bboxes[0][1] #TODO: merge all coverages",
"def mag2lum(self, magnitude):\n return 10**((4.76 - magnitude)/2.5)",
"def _level_info(entity):\n if entity.is_max_level():\n return 'Maxed'\n if entity.max_level is not None:\n return '{entity.level}/{entity.max_level}'.format(entity=entity)\n return entity.level",
"def get_level(self):\r\n \r\n return self.level",
"def extract_level_from_name(self):\n images = glob.glob(os.path.join(self.frame_dir, '*'))\n level = []\n for i, im in enumerate(images):\n base, tail = os.path.split(im)\n name = tail.split('.')[-2]\n number = name.split('_')[-1]\n level.append(float(number))\n return np.array(level)",
"def get_level(self):\r\n \r\n return self.level"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.