query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
sequencelengths
19
20
metadata
dict
Merges metric data from a snapshot. This is used both when merging data from a single transaction into the main stats engine, and for performing a rollback merge. In either case, the merge is done the exact same way.
def merge_metric_stats(self, snapshot): if not self.__settings: return for key, other in six.iteritems(snapshot.__stats_table): stats = self.__stats_table.get(key) if not stats: self.__stats_table[key] = other else: stats.merge_stats(other)
[ "def merge(self, snapshot):\n\n if not self.__settings:\n return\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot)\n self._merge_synthetics_events(snapshot)\n self._merge_error_events(snapshot)\n self._merge_error_traces(snapshot)\n self._merge_custom_events(snapshot)\n self._merge_span_events(snapshot)\n self._merge_sql(snapshot)\n self._merge_traces(snapshot)", "def rollback(self, snapshot):\n\n if not self.__settings:\n return\n\n _logger.debug('Performing rollback of data into '\n 'subsequent harvest period. Metric data and transaction events'\n 'will be preserved and rolled into next harvest')\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot, rollback=True)\n self._merge_synthetics_events(snapshot, rollback=True)\n self._merge_error_events(snapshot)\n self._merge_custom_events(snapshot, rollback=True)\n self._merge_span_events(snapshot, rollback=True)", "def merge_custom_metrics(self, metrics):\n\n if not self.__settings:\n return\n\n for name, other in metrics:\n key = (name, '')\n stats = self.__stats_table.get(key)\n if not stats:\n self.__stats_table[key] = other\n else:\n stats.merge_stats(other)", "def _mergeBHSnapshotFiles(run, logger):\n\n logger.info(\"BHSnapshotData._mergeBHSnapshotFiles()\")\n\n # Randomize order of snapshots\n snapList = np.arange(NUM_SNAPS-1)\n np.random.shuffle(snapList)\n\n # Initialize variables\n count = 0\n newFiles = 0\n oldFiles = 0\n num_pos = 0\n num_neg = 0\n num_val = 0\n num_tar = 0\n\n # Load BH Mergers\n # ---------------\n logger.info(\"Loading BH Mergers\")\n mrgs = mergers.load_fixed_mergers(run, loadsave=True)\n numMergers = mrgs[MERGERS.NUM]\n logger.debug(\"Loaded %d mrgs\" % (numMergers))\n\n # Initialize storage\n allData = _initStorage(numMergers)\n\n # Load each snapshot file\n # -----------------------\n logger.warning(\"Iterating over snapshots\")\n beg = datetime.now()\n pbar = zio.getProgressBar(NUM_SNAPS-1)\n for snap in snapList:\n\n # Get mrgs for just after this snapshot (hence '+1')\n mrgs = mrgs[MERGERS.MAP_STOM][snap+1]\n nums = len(mrgs)\n targetIDs = mrgs[MERGERS.IDS][mrgs]\n logger.debug(\"- Snap %d, Count %d. %d Mergers\" % (snap, count, nums))\n\n # Load Snapshot Data\n logger.debug(\"- Loading single snapshot\")\n data, pos, neg, new = _loadSingleSnapshotBHs(run, snap, numMergers, mrgs, targetIDs, logger,\n loadsave=True)\n logger.debug(\"- - pos %d, neg %d, new %d\" % (pos, neg, new))\n\n # Store to dictionary\n valids = data[BH_SNAP.VALID]\n numValid = np.count_nonzero(valids)\n numTargets = np.count_nonzero(data[BH_SNAP.TARGET] > 0)\n\n # Copy valid elements\n allData[BH_SNAP.TARGET][valids] = data[BH_SNAP.TARGET][valids]\n allData[BH_SNAP.VALID][valids] = data[BH_SNAP.VALID][valids]\n for key in SNAPSHOT_FIELDS:\n allData[key][valids] = data[key][valids]\n\n # Collect and log data\n if (new == 1):\n newFiles += 1\n logger.debug(\"- - New\")\n logger.debug(\"- - - pos %d, neg %d, expected %d\" % (pos, neg, pos+neg, nums))\n logger.debug(\"- - - Targets %d, Valid %d\" % (numTargets, numValid))\n else:\n oldFiles += 1\n pos = numValid\n neg = 2*nums - pos\n logger.debug(\"- - Old\")\n logger.debug(\"- - - pos %d, expected %d, neg %d\" % (pos, nums, neg))\n logger.debug(\"- - - Targets %d, Valid %d\" % (numTargets, numValid))\n\n # Increment tracking data\n num_pos += pos\n num_neg += neg\n num_val += numValid\n num_tar += numTargets\n count += 1\n pbar.update(count)\n\n pbar.finish()\n end = datetime.now()\n\n logger.info(\"Done after %s\" % (str(end-beg)))\n logger.info(\"%d new, %d old. Pos %d, Neg %d\" % (newFiles, oldFiles, num_pos, num_neg))\n logger.info(\"Targets %d, Valid %d\" % (num_tar, num_val))\n numValid = np.count_nonzero(allData[BH_SNAP.VALID])\n logger.info(\"%d/%d = %.4f valid\" % (numValid, 2*numMergers, 0.5*numValid/numMergers))\n\n return allData", "def add_snapshot(self, snapshot):\n self.last_snapshot = snapshot", "def merge_df(summary_df, metric_df, expr_name=None):\n df = pd.merge(metric_df, summary_df, how=\"outer\", on=\"name\")\n # replace mem operation's utilization (NaN because they are\n # in the summary but not the metric report) with 0\n df = df.fillna(0)\n df = df.sort_values(by=[\"time_%\"], ascending=False)\n convert_type = {\n 'time_%': 'float64',\n 'time_ms': 'float64',\n 'calls': 'int32',\n 'avg_ms': 'float64',\n }\n df = df.astype(convert_type)\n if expr_name is not None:\n df.to_csv(f\"{LOG_DIR}{expr_name}/merged.csv\")\n return df", "def merge(data1, data2, ContainerType=pg.DataContainer, snap=0.001):\n data = ContainerType(data1)\n data.add(data2, snap)\n return data", "def _merge_report(self, target, new):\n time = None\n if 'ts' in new['parsed']:\n time = new['parsed']['ts']\n\n if (target.get('lastSeenDate', None) and\n time and\n target['lastSeenDate'] < time):\n target['lastSeenDate'] = time\n\n query_millis = int(new['parsed']['stats']['millis'])\n target['stats']['totalTimeMillis'] += query_millis\n target['stats']['count'] += 1\n target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']", "def merge(self, dataset):\n def merge_data(source, dest):\n for key, value in source.items():\n if isinstance(value, dict):\n merge_data(value, dest.setdefault(key, {}))\n else:\n dest[key] = value\n return dest\n\n merge_data(dataset.data, self._data)\n\n for h in dataset.task_history:\n if h not in self._task_history:\n self._task_history.append(h)", "def merge_snapshot(self):\n disks = self.get_disks()\n disk_files_tree = []\n for disk in disks:\n disk_files_tree += (DiskImageHelper.get_backing_files_tree(disk.file))\n merge_snapshot_cmd = \"virsh blockpull --domain {domain_name} {disk_path} --wait\".format(\n domain_name=self.name, disk_path=disk.file)\n\n logging.debug(\"Executing: '%s'\" % merge_snapshot_cmd)\n logging.info(\"Merging base to new snapshot for '%s' device\" % disk.device)\n\n # launch command\n merge_snapshot_cmds = shlex.split(merge_snapshot_cmd)\n merge_snapshot = subprocess.Popen(merge_snapshot_cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n shell=False)\n\n # wait to terminate\n status = merge_snapshot.wait()\n\n if status != 0:\n logging.error(\"Error for '%s': %s\" % (merge_snapshot_cmd, merge_snapshot.stderr.read()))\n logging.critical(\"{exe} returned {status} state\".format(exe=merge_snapshot_cmds[0], status=status))\n raise Exception(\"blockpull didn't work properly\")\n\n current_disk_files = [disk.file for disk in self.get_disks()]\n\n # remove old disk device files without current ones\n for file in [disk_file_tree for disk_file_tree in disk_files_tree if disk_file_tree not in current_disk_files]:\n logging.info(\"Removing old disk file: '%s'\" % file)\n os.remove(file)", "def get_snapshot_metadata(self, snapshot):\n snapshot = self._get_resource(_snapshot.Snapshot, snapshot)\n return snapshot.fetch_metadata(self)", "def mergeAggregatedCsvData(self, contexts, obj, aggData1, aggData2):\n return aggData1 + aggData2", "def from_snapshot_to_aggregate_root(\n snapshot: dict, current_hash: int, decoder: Callable = jsonpickle.decode\n) -> Union[AggregateRoot, None]:\n\n if not snapshot:\n return None\n\n data, hash = snapshot['data'], snapshot['hash']\n if data and current_hash == hash:\n return decoder(data)\n else:\n return None", "def revert_to_snapshot(self, context, share, snapshot):\n\n reservations = self._handle_revert_to_snapshot_quotas(\n context, share, snapshot)\n\n try:\n if share.get('has_replicas'):\n self._revert_to_replicated_snapshot(\n context, share, snapshot, reservations)\n else:\n self._revert_to_snapshot(\n context, share, snapshot, reservations)\n except Exception:\n with excutils.save_and_reraise_exception():\n if reservations:\n QUOTAS.rollback(\n context, reservations,\n share_type_id=share['instance']['share_type_id'])", "def snapshot(snapshot_type, result_q, time_delta):", "def _read_job_metrics(self, data):\n\n stats = data['stats']\n\n # In case of multiple revisions, we pick the first revision with revision set to a major revision\n has_multiple_revisions = True if len(data['revisions']) > 1 else False\n if has_multiple_revisions:\n major_revisions = [s for s in data['revisions'] if s['revision'] in MAJOR_REVISIONS]\n if not major_revisions:\n raise Exception('Cannot find a major revision for job id: {}'.format(self.job_id))\n\n logger.debug(\"Job '{}' has multiple revisions, only '{}' stats will be used.\".format(\n self.job_id, major_revisions[0]['revision']))\n stats = [s for s in data['stats'] if s['revision'] == major_revisions[0]['revision']]\n\n for operation in stats:\n if operation['type'] != 'stress':\n continue\n operation_name = operation.get('test', operation['id'])\n self.metrics[operation_name] = {}\n s = self.metrics[operation_name]\n for m in self.metrics_list:\n try:\n s[m] = float(operation['op rate'].split(' ')[0].replace(',',''))\n except KeyError:\n pass", "def load_from_snapshot(self, snapshot):\n\n data = self.load_json(snapshot)\n for key in data:\n setattr(self, key.lower(), data[key])\n\n # This is a Gurobi specific step.\n # If reading a model from another service, please replace this line.\n self.model = gp.read(self.model_name)\n \n self.add_modules(self.module_names)\n\n # Increment the solve count and update the filename\n # so we don't overwrite the current snapshot.\n self.solve_count += 1\n self.update_filename()\n \n for function, args, kwargs in self.function_list:\n self.meta_function(function, args, kwargs, no_record=True)", "def _parse_snapshot_hits(self, file_obj):\n for _ in range(self.n_snapshot_hits):\n dom_id, pmt_id = unpack('<ib', file_obj.read(5))\n tdc_time = unpack('>I', file_obj.read(4))[0]\n tot = unpack('<b', file_obj.read(1))[0]\n self.snapshot_hits.append((dom_id, pmt_id, tdc_time, tot))", "def _aggregate_log_values(self, source, dest):\n remove = []\n for key, item in source.items():\n if \"data\" not in item:\n # Assume it's a sub-group\n dest[key] = {}\n self._aggregate_log_values(item, dest[key])\n else:\n aggregator = self._get_aggregator_for_key(key, item['agg'])\n value = aggregator(item['data'])\n if item['precision'] is not None:\n value = round(value, item['precision'])\n dest[key] = value\n if item['scope'] == 'get':\n remove.append(key)\n for key in remove:\n del source[key]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Merges in a set of custom metrics. The metrics should be provide as an iterable where each item is a tuple of the metric name and the accumulated stats for the metric.
def merge_custom_metrics(self, metrics): if not self.__settings: return for name, other in metrics: key = (name, '') stats = self.__stats_table.get(key) if not stats: self.__stats_table[key] = other else: stats.merge_stats(other)
[ "def add_metrics(self, metrics: List[Tuple]):\n for metric in metrics:\n metric_name, metric_function = metric\n self[metric_name] = metric_function\n return self", "def add_metrics(self, metrics):\n for i,metric in enumerate(self.config.metrics):\n tf.summary.scalar(metric, metrics[i])", "def add_metrics(self, metrics):\n for i, metric in enumerate(self.config.metrics):\n tf.summary.scalar(metric, metrics[i])", "def aggregate(all_metrics, reducer, suffix):\n # Collect metric separately\n separated_metrics = {} # type: dict[frozenset, list[dict]]\n for el in all_metrics:\n key = frozenset(el[\"metric\"][\"dimensions\"].items())\n if key not in separated_metrics:\n separated_metrics[key] = [el]\n else:\n separated_metrics[key].append(el)\n\n # Collect all dimensions\n dims = {}\n for metric_dims in separated_metrics.keys():\n for prop, val in dict(metric_dims).iteritems():\n if prop in dims:\n dims[prop].add(val)\n else:\n dims[prop] = set(val)\n\n # Sort each metric\n for _, metric in separated_metrics.iteritems():\n metric.sort(key=lambda v: v[\"metric\"][\"timestamp\"])\n\n separated_metrics = sorted(separated_metrics.values(), key=len)\n separated_metrics.reverse()\n\n # Compute the new values\n new_values = []\n all_timestamps = map(\n lambda l: map(\n lambda x: x[\"metric\"][\"timestamp\"], l),\n separated_metrics)\n metric_count = len(separated_metrics)\n for index in range(0, len(separated_metrics[0])):\n new_value = reducer[0](\n separated_metrics[0][index][\"metric\"][\"value\"],\n metric_count)\n new_timestamp = separated_metrics[0][index][\"metric\"][\"timestamp\"]\n for metric_index in range(1, metric_count):\n new_value = reducer[1](new_value, helpers.interpolate(\n new_timestamp,\n separated_metrics[metric_index],\n all_timestamps[metric_index]\n ), metric_count)\n new_values.append((new_timestamp, new_value))\n\n # Aggregate the other details:\n metric_name = separated_metrics[0][0][\"metric\"][\"name\"] + suffix\n meta = separated_metrics[0][0][\"meta\"]\n new_metrics = [\n helpers.create_agg_metric(\n metric_name,\n meta,\n dims,\n val[0],\n val[1]\n ) for val in new_values\n ]\n return new_metrics", "def _build_metric_list_to_collect(self, additional_metrics):\n metrics_to_collect = {}\n\n # Defaut metrics\n for default_metrics in self.DEFAULT_METRICS.itervalues():\n metrics_to_collect.update(default_metrics)\n\n # Additional metrics metrics\n for option in additional_metrics:\n additional_metrics = self.AVAILABLE_METRICS.get(option)\n if not additional_metrics:\n if option in self.DEFAULT_METRICS:\n self.log.warning(\n u\"`%s` option is deprecated.\"\n u\" The corresponding metrics are collected by default.\", option\n )\n else:\n self.log.warning(\n u\"Failed to extend the list of metrics to collect:\"\n u\" unrecognized `%s` option\", option\n )\n continue\n\n self.log.debug(\n u\"Adding `%s` corresponding metrics to the list\"\n u\" of metrics to collect.\", option\n )\n metrics_to_collect.update(additional_metrics)\n\n return metrics_to_collect", "def add_stats(self):\n units = self.get_unit_map()\n for metric in self.raw_metrics:\n unit, metric_type = units.get(metric, (DEFAULT_UNIT, DEFAULT_TYPE))\n if metric_type == \"counter\":\n # Unit/Second\n unit = \"/\".join((unit, \"Second\"))\n self.add_derive_value(metric, unit, self.raw_metrics[metric], rate=True)\n else:\n self.add_gauge_value(metric, unit, self.raw_metrics[metric])", "def update_metrics(self, round_num: int, metrics_to_append: Dict[str, Any]):\n raise NotImplementedError", "def merge_measurements(measurements_list: List[Measurements]) -> \\\n Tuple[Measurements, List[MetricName]]:\n summed_metrics: Measurements = {}\n\n all_metrics_names = set() # Sum of set of names.\n for measurements in measurements_list:\n all_metrics_names.update(measurements.keys())\n\n for metric_name in all_metrics_names:\n if metric_name in METRICS_METADATA:\n\n if METRICS_METADATA[metric_name].type == MetricType.GAUGE:\n operation = lambda values: sum(values) / len(values) # noqa\n else:\n assert METRICS_METADATA[metric_name].type == MetricType.COUNTER\n operation = sum\n\n else:\n log.debug('By default, unknown metric %r uses \"sum\" as merge operation.', metric_name)\n operation = sum\n\n summed_metrics[metric_name] = operation(\n [measurements[metric_name] for measurements in measurements_list\n if metric_name in measurements])\n\n return summed_metrics", "def _build_metric_list_to_collect(self):\n metrics_to_collect = {}\n\n # Default metrics\n for default_metrics in itervalues(metrics.DEFAULT_METRICS):\n metrics_to_collect.update(default_metrics)\n\n # Additional metrics metrics\n for option in self._config.additional_metrics:\n if option not in metrics.AVAILABLE_METRICS:\n if option in metrics.DEFAULT_METRICS:\n self.log.warning(\n u\"`%s` option is deprecated. The corresponding metrics are collected by default.\", option\n )\n else:\n self.log.warning(\n u\"Failed to extend the list of metrics to collect: unrecognized `%s` option\", option\n )\n continue\n additional_metrics = metrics.AVAILABLE_METRICS[option]\n self.log.debug(u\"Adding `%s` corresponding metrics to the list of metrics to collect.\", option)\n metrics_to_collect.update(additional_metrics)\n\n return metrics_to_collect", "def accumulate_metrics__(metrics, cum_metrics, batch_metrics, validation_dataset=False):\n if metrics is not None:\n for metric in metrics:\n if validation_dataset:\n cum_metrics['val_%s' % metric] += batch_metrics['val_%s' % metric]\n else:\n cum_metrics[metric] += batch_metrics[metric]\n\n # check for loss separately\n if 'loss' not in metrics:\n if validation_dataset:\n cum_metrics['val_loss'] += batch_metrics['val_loss']\n else:\n cum_metrics['loss'] += batch_metrics['loss']\n return cum_metrics", "def _sum_metric(mol_h5_list, metric_name, metric_type):\n assert metric_type is LIBRARIES_METRIC or \\\n metric_type is GEM_GROUPS_METRIC\n combined = defaultdict(int)\n for mol_h5 in mol_h5_list:\n with MoleculeCounter.open(mol_h5, mode='r') as counter:\n for key, metrics in counter.get_metric(metric_type).iteritems():\n combined[key] += metrics[metric_name]\n return combined", "def __add__(self, other_metric_set):\n new_metric_set = self\n for name, metric in other_metric_set.items():\n new_metric_set[name] = metric\n return new_metric_set", "def optimize_metrics(self,\n metrics: list = None,\n verbose: bool = True):\n\n if metrics is None:\n metrics = self._supported_metrics\n else:\n metrics = [metric.lower() for metric in metrics]\n assert all(metric in self._supported_metrics for metric in metrics)\n for i in metrics:\n super(ThresholdOptimizer, self).__getattribute__(f'get_best_{i}_metrics')(verbose=verbose)", "def _add_metrics_to_metrics_provider(cls, mp, metrics):\n providers_info = cls._METRICS_PROVIDER_INFO[mp.type][mp.namespace][\"providers\"]\n provided_metrics = next(\n provider_info[\"provided_metrics\"]\n for provider_info in providers_info\n if provider_info[\"name\"] == mp.name\n )\n\n # Check if the provided metrics are equal to the metrics\n num_metrics = len(metrics)\n if len(provided_metrics) != num_metrics:\n raise ValueError(\n f\"Found {len(provided_metrics)} metrics for metrics provider \"\n f\"{mp.name}. Expected {num_metrics}.\"\n )\n\n # Check what type of provider is used at the moment\n if mp.type == MetricsProviderType.STATIC:\n valued_metric_class = StaticMetric\n elif mp.type == MetricsProviderType.PROMETHEUS:\n valued_metric_class = PrometheusMetric\n else:\n raise NotImplementedError()\n # Iterate through the provided metrics\n valued_metrics = []\n for i, (metric_name, metric_value) in enumerate(provided_metrics):\n metric = metrics[i]\n if metric.mp_metric_name != metric_name:\n msg = (\n f\"Unexpected name {metric.mp_metric_name}. Expected: {metric_name}.\"\n )\n raise ValueError(msg)\n valued_metric = valued_metric_class(metric, metric_value)\n valued_metrics.append(valued_metric)\n mp.set_valued_metrics(valued_metrics)", "def register_additional_metric_ops(\n self, metric_ops: Dict[str, Tuple[tf.Tensor, tf.Tensor]]) -> None:\n for metric_name, (value_op, update_op) in metric_ops.items():\n if metric_name in self._metric_names:\n raise ValueError('tried to register new metric with name %s, but a '\n 'metric with that name already exists.' % metric_name)\n self._metric_names.append(metric_name)\n self._metric_value_ops.append(value_op)\n self._metric_update_ops.append(update_op)\n\n # Update metric variables incrementally with only the new elements in the\n # metric_variables collection.\n collection = self._graph.get_collection(\n tf.compat.v1.GraphKeys.METRIC_VARIABLES)\n collection = collection[len(self._metric_variable_nodes):]\n\n # Note that this is a node_list - it's not something that TFMA\n # configures, but something that TF.Learn configures.\n #\n # As such, we also use graph.get_tensor_by_name directly, instead of\n # TFMA's version which expects names encoded by TFMA.\n for node in collection:\n self._metric_variable_nodes.append(node)\n with self._graph.as_default():\n placeholder = tf.compat.v1.placeholder(\n dtype=node.dtype, shape=node.get_shape())\n self._metric_variable_placeholders.append(placeholder)\n self._metric_variable_assign_ops.append(\n tf.compat.v1.assign(node, placeholder))\n\n with self._graph.as_default():\n self._all_metric_variable_assign_ops = tf.group(\n *self._metric_variable_assign_ops)\n self._all_metric_update_ops = tf.group(*self._metric_update_ops)\n self._reset_variables_op = tf.compat.v1.local_variables_initializer()\n self._session.run(self._reset_variables_op)\n\n self._perform_metrics_update_fn = self._session.make_callable(\n fetches=self._all_metric_update_ops,\n feed_list=self._perform_metrics_update_fn_feed_list)", "def generateDerivedMetrics(kernelMetrics, statistics, throughputMetrics = {}, countMetrics = {}, combinedMetrics = {}):\n\n # combine single metrics \n for combinedMetric in combinedMetrics:\n for kernel in kernelMetrics:\n logging.debug(\"Combining metrics for kernel {}\".format(kernel))\n # iterate over each run, take the number of runs to be\n # the length of the first source metric\n if combinedMetrics[combinedMetric][0] in kernelMetrics[kernel]:\n combinedMetricCounts = []\n sourceMetricMissing = False\n # go through each run\n for run in range(0, len(kernelMetrics[kernel][ combinedMetrics[combinedMetric][0] ])):\n\n combinedMetricRunCount = 0\n # take all the source metrics and add them into the\n # combined metric\n for sourceMetric in combinedMetrics[combinedMetric]:\n if sourceMetric in kernelMetrics[kernel]:\n # TODO delete once debugged print(\"runs of {} {}\".format(sourceMetric, kernelMetrics[kernel][sourceMetric]))\n combinedMetricRunCount = combinedMetricRunCount + kernelMetrics[kernel][sourceMetric][run]\n else:\n sourceMetricMissing = True\n logging.info(\"Source metric {} missing for combined metric {}, combined metric will not be\"\n \"added\".format(sourceMetric, combinedMetric))\n # append this run ot the end of the list\n combinedMetricCounts.append(combinedMetricRunCount)\n if not sourceMetricMissing:\n kernelMetrics[kernel][combinedMetric] = combinedMetricCounts\n\n # take throughputs and convert them to counts\n # doesn't use averages since that can skew results\n for throughputMetricName, countMetricName in zip(throughputMetrics, countMetrics):\n for kernel in kernelMetrics:\n logging.debug(\"Generating count metrics for {} in kernel {}\".format(throughputMetricName, kernel))\n if throughputMetricName in kernelMetrics[kernel]:\n counts = []\n for run in range(0, len(kernelMetrics[kernel][throughputMetricName])):\n count = kernelMetrics[kernel][throughputMetricName][run] * kernelMetrics[kernel][\"Duration\"][run]\n counts.append(count)\n kernelMetrics[kernel][countMetricName] = counts", "def compute_metrics(self, x, extra=None):\n if self.__metrics is None and extra is None:\n return None\n\n ret = {}\n if self.__metrics is not None:\n for m in self.__metrics:\n ret[m.name] = self._mdmetric(x, m)\n\n if extra is not None and extra.name not in ret:\n ret[extra.name] = self._mdmetric(x, extra)\n\n return ret", "def merge_accumulators(self, accumulators):\n raise NotImplementedError", "def _update_metric(\n metrics: List[mlflow.entities.Metric], dataset: MetricsDict = {}\n ) -> MetricsDict:\n for metric in metrics:\n metric_dict = {\"step\": metric.step, \"value\": metric.value}\n if metric.key in dataset:\n if isinstance(dataset[metric.key], list):\n dataset[metric.key].append(metric_dict)\n else:\n dataset[metric.key] = [dataset[metric.key], metric_dict]\n else:\n dataset[metric.key] = metric_dict\n return dataset" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks if player ready to be rendered on the character sheet
def is_player_ready(self): player = self.base.game_instance['player_ref'] if (player and base.player_states["is_alive"] and base.player_states["is_idle"] and not base.player_states["is_moving"] and not base.player_states["is_running"] and not base.player_states["is_crouch_moving"] and not base.player_states["is_crouching"] and not base.player_states["is_standing"] and not base.player_states["is_jumping"] and not base.player_states["is_h_kicking"] and not base.player_states["is_f_kicking"] and not base.player_states["is_using"] and not base.player_states["is_attacked"] and not base.player_states["is_busy"] and not base.player_states["is_turning"] and not base.player_states["is_mounted"] and not base.player_states["horse_riding"] and not self.base.game_instance["is_player_sitting"] and not player.get_python_tag("is_on_horse") ): return True else: return False
[ "def is_ready(self):\n if self.game.has_started():\n return True\n return self.status == self.PLAYER_READY", "def ready(self):\n return self.shader is not None and self.texturesReady()", "def ready(self):\n return self.shader is not None and self.textureReady()", "def check_game_draw(self):\n if self._board.get_board_is_full():\n self._game_state = True", "def set_ready(self):\n if self.game.has_started() or self.status == self.PLAYER_READY:\n return\n self.status = self.PLAYER_READY\n self.game.player_is_ready()", "def start_game_check(self):\n if len(self.pending_players) > 0:\n return False\n else:\n return True", "def checkPlayer(self):\n player = self.findCharObj(\"MacReady\")\n return player.isAlive()", "def enough_players():\n return True", "def check_game_finished(self):\r\n for player in self.players:\r\n if player.own_cards():\r\n return False\r\n return True", "def is_win(self, player):\n\t\tpass", "def check_status():\n\n global is_treasure_near_player\n global is_trap_near_player\n\n is_treasure_near_player = is_item_near_player(map_generator.TREASURE_SYMBOL)\n is_trap_near_player = is_item_near_player(map_generator.TRAP_SYMBOL)", "def areComponentsReady(self):", "def check_player_status(self):\n status = 'Playing'\n if self.bag >= 3 :\n status = 'You won!'\n elif self.hp <= 0 :\n status = 'You lost'\n\n return status", "def wait_to_play(self):\n\n\t\tself.player_model.current_player = self.player_model.rival_player\n\t\tself.player_frame.prepare_to_wait_turn(self.player_model.rival_player.name, self.player_model.available_cells)", "def does_biome_need_to_be_drawn(self, player: Player):\n return True\n\n if -SCREEN_MAX_X < self.centre[0] - player.x < SCREEN_MAX_X:\n if -SCREEN_MAX_Y < self.centre[1] - player.y < SCREEN_MAX_Y:\n return True\n else:\n return False", "def set_not_ready(self):\n if self.game.has_started() or self.status == self.PLAYER_NOT_READY:\n return\n self.status = self.PLAYER_NOT_READY", "def won_game(self):\n for player in self.players:\n if len(player.cards) == 0:\n\n return True\n return False", "def texturesReady(self):\n return (self.modulateTexture is not None and\n self.clipTexture is not None and\n self.colourTexture is not None and\n self.modulateTexture.ready() and\n self.clipTexture .ready() and\n self.colourTexture .ready())", "def can_encounter(self, player):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Run the script at given path catching exceptions. This function should only be used internally by Pyto.
def runScriptAtPath(path): sys.argv = [path] for arg in PytoClasses.Python.shared.args: sys.argv.append(str(arg)) def run() -> None: os.system = PytoClasses.Python.shared.system directory = os.path.expanduser(os.path.dirname(path)) sys.path.insert(0, directory) try: global __script__ spec = importlib.util.spec_from_file_location("__main__", path) __script__ = importlib.util.module_from_spec(spec) spec.loader.exec_module(__script__) PytoClasses.Python.shared.values = [item for item in dir(__script__) if not item.startswith("__")] except SystemExit: print("SystemExit") except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() extracts = traceback.extract_tb(sys.exc_info()[2]) count = len(extracts) lineNumber = -1 fileName = path for i, extract in enumerate(extracts): if extract[0] == fileName: lineNumber = extract[1] break count -= 1 if (type(e) == SyntaxError): # The last word in a `SyntaxError` exception is the line number lineNumber = [int(s) for s in (str(e)[:-1]).split() if s.isdigit()][-1] PytoClasses.Python.shared.errorType = exc_type.__name__ PytoClasses.Python.shared.errorReason = str(e) PytoClasses.EditorViewController.visible.showErrorAtLine(lineNumber) print(traceback.format_exc(limit=-count)) sys.path.remove(directory) PytoClasses.ReviewHelper.shared.launches = PytoClasses.ReviewHelper.shared.launches+1 PytoClasses.ReviewHelper.shared.requestReview() PytoClasses.Python.shared.isScriptRunning = False thread = threading.Thread(target=run, args=()) def loop(): while PytoClasses.Python.shared.isScriptRunning: time.sleep(1) ignoredThreads.append(thread) raise Exception("Stopped script!") def runLoop(): try: loop() except: pass thread.start() runLoop() return __script__
[ "def run_file(file_path, globals_, script_dir=SCRIPT_DIR):\n fix_sys_path()\n script_name = os.path.basename(file_path)\n script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)\n script_path = os.path.join(script_dir, script_name)\n execfile(script_path, globals_)", "def script(self, path):\n exec(compile(open(path).read(), path, 'exec'), locals(), globals())", "def run_script():\n print(\"run script\")", "def runscript(script):\r\n addscriptpath(script)\r\n watchdog.reset()\r\n argv = sys.argv\r\n sys.argv = [ script ]\r\n execfile(script, globals())\r\n sys.argv = argv", "def run_script (script, *l) :\n if not os.path.exists (script) :\n raise PQHException (\"file %s not found\" % script)\n py = get_interpreter_path ()\n cmd = \"%s %s\" % (py, script)\n if len (l) > 0 :\n cmd += \" \" + \" \".join ( [str (x) for x in l])\n out,err = run_cmd (cmd)\n return out,err", "def run_script(self):\n script = askopenfilename(initialdir=normalize_path(),filetypes=SCRIPT_FILETYPES)\n if script in ('',(),None): # (representing the various ways no script was selected in the dialog)\n self.messageBar.response('Run canceled')\n else:\n execfile(script,__main__.__dict__)\n self.messageBar.response('Ran ' + script)\n sim_name_from_filename(script)\n self.title(topo.sim.name)", "def run_script(self):\n pass", "def run(self, script, *args, **kwargs):\n return self._run('run', script, *args, **kwargs)", "def run_python(self, params, script_path=None):\r\n if not script_path: script_path = self.bin_path\r\n full_path = join_path(self.get_dir_path(), script_path)\r\n return self._run([\"python\", full_path] + params)", "def run_script(script_name, namespace):", "def exec_file(self, path):\n assert os.path.isabs(path)\n\n source = None\n\n try:\n with open(path, 'rt') as fd:\n source = fd.read()\n except Exception as e:\n raise SandboxLoadError(self._context.source_stack,\n sys.exc_info()[2], read_error=path)\n\n self.exec_source(source, path)", "def _run_python_script(self, script):\n output = StringIO()\n command = f'\"{self._python_executable}\" -c \"{script}\"'\n self.output.info(f\"running {command}\")\n try:\n self.run(command, output, scope=\"run\")\n except ConanException:\n self.output.info(\"(failed)\")\n return None\n output = output.getvalue()\n # Conan is broken when run_to_output = True\n if \"\\n-----------------\\n\" in output:\n output = output.split(\"\\n-----------------\\n\", 1)[1]\n output = output.strip()\n return output if output != \"None\" else None", "def execute(self, code, environment = dict()):\r\n if not self.config.get('scripting', 'enable') and type(code) == str:\r\n self.send(code, log = False)\r\n else:\r\n if type(code) == str:\r\n c = compile(code, 'errors.log', 'exec')\r\n else:\r\n c = code\r\n eval(c, self.getEnvironment(environment))", "def exec_file(path: str, global_vars: Dict[str, Any]) -> None:\n with open(path) as file:\n exec(compile(file.read(), path, \"exec\"), global_vars) # pylint: disable=exec-used", "def main():\n ScriptDirectory(op.split(__file__)[0])()", "def run_file(path):\n if PY26:\n dirpath, name = splitname(path)\n found = imp.find_module(name, [dirpath])\n module = imp.load_module(\"__main__\", *found)\n return vars(module)\n else:\n return runpy.run_path(path, run_name=\"__main__\")", "def run_program(path, args=[], raises=DummyException):\n old_args = sys.argv\n assert all(isinstance(a, str) for a in args)\n warnings.simplefilter(\"ignore\", ResourceWarning)\n try:\n sys.argv = [path] + args\n with redirect_stdout(StringIO()) as output:\n with redirect_stderr(output):\n try:\n if '__main__' in sys.modules:\n del sys.modules['__main__']\n spec = spec_from_file_location('__main__.py', path)\n module = module_from_spec(spec)\n sys.modules['__main__'] = module\n spec.loader.exec_module(module)\n except raises:\n pass\n except SystemExit as e:\n if e.args != (0,):\n raise SystemExit(fix_ends(output.getvalue())) from e\n if raises is not DummyException:\n raise AssertionError(\"{} not raised\".format(raises))\n return fix_ends(output.getvalue())\n finally:\n sys.argv = old_args\n if '__main__' in sys.modules:\n sys.modules['__main__'].__dict__.clear()\n sys.modules.pop('__main__', None)\n gc.collect()", "def runScript(script):\n if script != \"\":\n exec(script, localDict, localDict)", "def main():\n if os.path.isdir(path):\n for filename in os.listdir(path):\n if filename.endswith('.asm'):\n execute_asm_file(path + '/' + filename, filename)\n else:\n execute_asm_file(path, path[path.rfind(\"/\") + 1:])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Expected defaults when no project exists
def test_no_project_defaults(self): ep = exposed.ExposedProject() self.assertIsNone(ep.display) self.assertIsNone(ep.shared) self.assertIsNone(ep.settings) self.assertIsNone(ep.title) self.assertIsNone(ep.id) self.assertIsNone(ep.path()) with self.assertRaises(RuntimeError): ep.title = 'Some Title'
[ "def test_get_project(self):\n pass", "def _determine_default_project(project=None):\n if project is None:\n project = _get_gcd_project()\n\n if project is None:\n project = _helpers._determine_default_project(project=project)\n\n return project", "def test_project(self):\n pass", "def test_create_project(self):\n pass", "def project():", "def project():\n pass", "def test_read_project(self):\n pass", "def test_replace_project(self):\n pass", "def test_validate_project(self):\n pass", "def test_get_projects(self):\n pass", "def test_create_project_request(self):\n pass", "def test_add_project(self):\n pass", "def test_no_such_project(self):\n project = cd.project.get_internal_project()\n cd.project.load(None)\n\n with self.assertRaises(Exception):\n self.run_step('FAKE')\n\n cd.project.load(project)", "def test_iam_project_get(self):\n pass", "def test_project_file(self):\n pass", "def test_provider_project_development_create(self):\n pass", "def get_user_default_project(user):\n name = f\"{user.username}'s Project\"\n user_projects = user.project_owner.filter(name=name, organization=user)\n\n if user_projects:\n project = user_projects[0]\n else:\n metadata = {\"description\": \"Default Project\"}\n project = Project.objects.create(\n name=name, organization=user, created_by=user, metadata=metadata\n )\n\n return project", "def test_provider_project_development_get(self):\n pass", "def test_all_projects(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should abort stopping and not raise an error when no internal step is available to stop.
def test_step_stop_aborted(self, _step: PropertyMock): _step.return_value = None es = exposed.ExposedStep() es.stop()
[ "def abort(self):\n self.__stop()", "def _force_stop(self):\n if self.force_stop_func(instance=self.instance):\n self.force_stop_func(instance=self.instance, _set=True)\n raise StoppedException", "def _gracefully_stop(self):\n pass", "def test_v1_stop(self):\n pass", "def abort(self):\n self._result = self.inst.abort(\"Abort forced by suite runner.\")\n return self._result", "def test_v1alpha3_stop(self):\n pass", "def abort(self):\n raise NotImplementedError", "def _check_stop_conditions(self):\n self.normal_stop_condition(self)\n try:\n self.abnormal_stop_condition(self)\n except StopIteration as e:\n raise AbnormalStopIteration(e)", "def stopSolution(self):\n raise NotImplementedError", "def abort(self):", "def stop(self, message):\r\n raise StopTestException(message)", "def STOP(event):\n raise Boom()", "def test_error_on_graceful_stop(self):\n # if not self.is_input:\n inst = self.get_fresh_error_instance()\n inst.comm.error_replace('n_msg')\n nt.assert_raises(MagicTestError, inst.stop)\n inst.comm.restore_all()\n inst.close_comm()\n assert(inst.is_comm_closed)", "def is_stop_when_fail(self):\n return self._is_stop_when_fail", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def test_stop(self):\n pass", "def stop(self):\n self._stop_flag = True", "def stop(self):\n self._run_flag = False\n self.wait()", "def stopvm(self):\n raise" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should abort stopping and not raise an error when no internal project is available to stop.
def test_project_stop_aborted(self, get_internal_project: MagicMock): get_internal_project.return_value = None ep = exposed.ExposedProject() ep.stop()
[ "def stopBuild(reason=\"<no reason given>\"):", "def _gracefully_stop(self):\n pass", "def abort(self):\n self.__stop()", "def test_provider_project_development_stop(self):\n pass", "def stopSolution(self):\n raise NotImplementedError", "def test_v1_stop(self):\n pass", "def test_v1alpha3_stop(self):\n pass", "def _force_stop(self):\n if self.force_stop_func(instance=self.instance):\n self.force_stop_func(instance=self.instance, _set=True)\n raise StoppedException", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def exit_engine(self):\n self.stop_flag = True", "def terminate(self):\n self._running = False", "def stop(self):\n self._run_flag = False\n self.wait()", "def stopvm(self):\n raise", "def abort(self):\n self._result = self.inst.abort(\"Abort forced by suite runner.\")\n return self._result", "def test_stop(self):\n pass", "def stop():\r\n\t\tglobal running\r\n\t\trunning = False", "def abort(self):\n raise NotImplementedError", "def _stopped(self):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should fail to get internal project and return None after eventually timing out.
def test_get_internal_project_fail( self, sleep: MagicMock, time_time: MagicMock, internal_project: PropertyMock ): project = exposed.ExposedProject() time_time.side_effect = range(20) internal_project.return_value = None result = project.get_internal_project() self.assertIsNone(result) self.assertEqual(10, sleep.call_count)
[ "def test_get_project(self):\n pass", "def get_project(con):\n try:\n return con.project_read(fq_name=conf.get('default_project', 'UNEXPECTED_VALUE'))\n except:\n log.debug('Unable to find project default-domain, admin:', exc_info=True)\n return None", "def Project(self):\n\n if not self.connected:\n return None\n\n try:\n return _ReadNoProxy(GOOGLE_GCE_METADATA_PROJECT_URI)\n except urllib2.HTTPError as e:\n raise MetadataServerException(e)\n except urllib2.URLError as e:\n raise CannotConnectToMetadataServerException(e)", "def test_iam_project_get(self):\n pass", "def test_get_project(self):\n self.assertEqual(self.remote_project.get_project(), self.project)", "def active_project() -> Optional[Project]:\n _check_active_client()\n _check_active_project()\n return _active_project", "def get_current_project():\n return get_from_session(KEY_PROJECT)", "def _test_get_my_project(api, project_id):\n # https://xyz.here.com/studio/project/5c54716d-f900-4b89-80ac-b21518e94b30\n project = api.get_project(project_id=\"5c54716d-f900-4b89-80ac-b21518e94b30\")\n print(project)", "def get_project(id):\n for p in get_projects():\n if p.id == id:\n return p\n return None", "def test_project_stop_aborted(self, get_internal_project: MagicMock):\n get_internal_project.return_value = None\n ep = exposed.ExposedProject()\n ep.stop()", "def get_user_project(self, owner_email, project_name):\n try:\n project_path = GitLabServer.build_server_project_path(project_name, owner_email)\n self._logger.info(f'Getting project info from server <= {project_path}')\n project = self._server.projects.get(project_path)\n return project\n except:\n return None", "def ex_get_project(self):\n response = self.connection.request('', method='GET').object\n return self._to_project(response)", "def ping_project() -> None:\n requests.get(MAIN_DOMAIN)", "def getMain(self):\n\n if self.__projects:\n return self.__projects[0]\n else:\n return None", "def get_project(issue):\n proj = None\n try:\n # This should handle the current API\n proj = issue['fields']['project']['key']\n except KeyError:\n # This shouldn't happen, but...\n proj = None\n\n return proj", "def test_no_such_project(self):\n project = cd.project.get_internal_project()\n cd.project.load(None)\n\n with self.assertRaises(Exception):\n self.run_step('FAKE')\n\n cd.project.load(project)", "def test_get_pending(self):\n pd_project = self._make_project(\n title='TestProject2',\n type=PROJECT_TYPE_PROJECT,\n parent=None,\n submit_status=SUBMIT_STATUS_PENDING_TASKFLOW,\n )\n\n request = self.req_factory.post(\n reverse('projectroles:taskflow_project_get'),\n data={\n 'project_uuid': str(pd_project.sodar_uuid),\n 'sodar_secret': settings.TASKFLOW_SODAR_SECRET,\n },\n )\n response = views.TaskflowProjectGetAPIView.as_view()(request)\n self.assertEqual(response.status_code, 404)", "def test_get_projects(self):\n pass", "def test_api_projects_get(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should write to the console using a write_source function call on the internal step report's stdout_interceptor.
def test_write_to_console(self, _step: PropertyMock): trials = [2, True, None, 'This is a test', b'hello'] for message in trials: _step_mock = MagicMock() write_source = MagicMock() _step_mock.report.stdout_interceptor.write_source = write_source _step.return_value = _step_mock step = exposed.ExposedStep() step.write_to_console(message) args, kwargs = write_source.call_args self.assertEqual('{}'.format(message), args[0])
[ "def test_render_to_console(self, _step: PropertyMock):\n message = ' {{ a }} is not {{ b }}.'\n\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.render_to_console(message, a=7, b='happy')\n\n args, kwargs = write_source.call_args\n self.assertEqual('7 is not happy.', args[0])", "def test_writes_stdout(self):\n log.info(\"executing ExampleTestCase.test_writes_stdout\")\n print \"here's some text from tests.test_example:ExampleTestCase.test_writes_stdout\"", "def test_print_cmd_lines(self) -> None:\n\n save_stdout = sys.stdout\n\n try:\n def execfunc(target, source, env) -> None:\n pass\n\n a = SCons.Action.Action(execfunc)\n\n sio = io.StringIO()\n sys.stdout = sio\n a.print_cmd_line(\"foo bar\", None, None, None)\n s = sio.getvalue()\n assert s == \"foo bar\\n\", s\n\n finally:\n sys.stdout = save_stdout", "def _print_step(self, step_stream, step, env):\n step_stream.write_line(' '.join(map(_shell_quote, step['cmd'])))\n step_stream.write_line('in dir %s:' % (step.get('cwd') or os.getcwd()))\n for key, value in sorted(step.items()):\n if value is not None:\n step_stream.write_line(\n ' %s: %s' % (key, self._render_step_value(value)))\n step_stream.write_line('full environment:')\n for key, value in sorted(env.items()):\n step_stream.write_line(' %s: %s' % (key, value))\n step_stream.write_line('')", "def enable(self):\n self.out = StringIO()\n self._stdout = sys.stdout\n sys.stdout = self.out", "def javaScriptConsoleMessage(self, message, line_number, source_id):\n print 'Console:', message, line_number, source_id", "def print_cmd_line(s, target, src, env):\n sys.stdout.write(\" Making %s...\\n\"% (' and '.join([str(x) for x in target])))", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def log_stdout(self, function):\n return function()", "def stdout(self) -> str:\n pass", "def stdout(self):\n if not hasattr(self, \"my_stdout_proxy\"):\n self.my_stdout_proxy = self.outfile_proxy()\n self.my_stdout_proxy_created = 1\n return self.my_stdout_proxy", "def test_export_custom(self): # pylint: disable=no-self-use\n mock_record_str = Mock(str)\n\n def formatter(record): # pylint: disable=unused-argument\n return mock_record_str\n\n mock_stdout = Mock()\n exporter = ConsoleLogExporter(out=mock_stdout, formatter=formatter)\n log_data = LogData(\n log_record=LogRecord(),\n instrumentation_scope=InstrumentationScope(\n \"first_name\", \"first_version\"\n ),\n )\n exporter.export([log_data])\n mock_stdout.write.assert_called_once_with(mock_record_str)", "def run_to_stdout(self) -> None:\n\n for line in self.run(rstrip=False):\n sys.stdout.write(line)", "def redirect_stdout():\n save_stdout = sys.stdout\n sys.stdout = _TQDMFile(sys.stdout)\n yield\n sys.stdout = save_stdout", "def test_unittests_changing_stdout(self):\n class TestTest(unittest.TestCase):\n\n def test_foo(self):\n sys.stdout = XMLTestRunnerTest.NullStream()\n\n runner = XMLTestRunner(self._stream)\n runner.run(unittest.makeSuite(TestTest))", "def redirect_stdout(self):\n self.stdout_old = sys.stdout\n self.stdout_redirect = StringIO()\n sys.stdout = self.stdout_redirect", "def add_std_out(self, message=None):\n if self.activated:\n self.test_cases[-1].stdout = self.format_error(message)", "def test_PrintLoudlySmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._PrintLoudly('hi there')\n self.AssertOutputContainsLine(r'\\*{10}', check_stderr=True)\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def write_output(self):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should render to the console using a write_source function call on the internal step report's stdout_interceptor.
def test_render_to_console(self, _step: PropertyMock): message = ' {{ a }} is not {{ b }}.' _step_mock = MagicMock() write_source = MagicMock() _step_mock.report.stdout_interceptor.write_source = write_source _step.return_value = _step_mock step = exposed.ExposedStep() step.render_to_console(message, a=7, b='happy') args, kwargs = write_source.call_args self.assertEqual('7 is not happy.', args[0])
[ "def test_write_to_console(self, _step: PropertyMock):\n trials = [2, True, None, 'This is a test', b'hello']\n\n for message in trials:\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.write_to_console(message)\n\n args, kwargs = write_source.call_args\n self.assertEqual('{}'.format(message), args[0])", "def test_writes_stdout(self):\n log.info(\"executing ExampleTestCase.test_writes_stdout\")\n print \"here's some text from tests.test_example:ExampleTestCase.test_writes_stdout\"", "def test_print_cmd_lines(self) -> None:\n\n save_stdout = sys.stdout\n\n try:\n def execfunc(target, source, env) -> None:\n pass\n\n a = SCons.Action.Action(execfunc)\n\n sio = io.StringIO()\n sys.stdout = sio\n a.print_cmd_line(\"foo bar\", None, None, None)\n s = sio.getvalue()\n assert s == \"foo bar\\n\", s\n\n finally:\n sys.stdout = save_stdout", "def enable(self):\n self.out = StringIO()\n self._stdout = sys.stdout\n sys.stdout = self.out", "def _print_step(self, step_stream, step, env):\n step_stream.write_line(' '.join(map(_shell_quote, step['cmd'])))\n step_stream.write_line('in dir %s:' % (step.get('cwd') or os.getcwd()))\n for key, value in sorted(step.items()):\n if value is not None:\n step_stream.write_line(\n ' %s: %s' % (key, self._render_step_value(value)))\n step_stream.write_line('full environment:')\n for key, value in sorted(env.items()):\n step_stream.write_line(' %s: %s' % (key, value))\n step_stream.write_line('')", "def log_stdout(self, function):\n return function()", "def stdout(self):\n if not hasattr(self, \"my_stdout_proxy\"):\n self.my_stdout_proxy = self.outfile_proxy()\n self.my_stdout_proxy_created = 1\n return self.my_stdout_proxy", "def javaScriptConsoleMessage(self, message, line_number, source_id):\n print 'Console:', message, line_number, source_id", "def show_log(self):\n if self.output:\n te = TextEditor(\n self.output,\n title=_(\"Unit testing output\"),\n readonly=True,\n parent=self)\n te.show()\n te.exec_()", "def __execute_reporter(self):\n if not self.__args.report:\n return\n reporter.HTMLReporter().generate_report_from_file(\n self.__lst_json_files)", "def test_PrintLoudlySmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._PrintLoudly('hi there')\n self.AssertOutputContainsLine(r'\\*{10}', check_stderr=True)\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def print_cmd_line(s, target, src, env):\n sys.stdout.write(\" Making %s...\\n\"% (' and '.join([str(x) for x in target])))", "def on_but_srcs(self) :\n self.exportLocalPars()\n txt = '\\n' + 50*'-' + '\\nSources from DB:\\n' \\\n + cp.blsp.txt_of_sources_in_run()\n logger.info(txt, __name__)", "def setup(self) -> \"None\":\n # Patch the renderer to extend the output height\n renderer._output_screen_diff = _patched_output_screen_diff\n\n if config.page and sys.stdout.isatty():\n # Use a temporary file as display output if we are going to page the output\n from tempfile import TemporaryFile\n\n self.out_file = TemporaryFile(\"w+\")\n\n else:\n if config.page:\n log.warning(\"Cannot page output because standard output is not a TTY\")\n # If we are not paging output, determine when to print it\n if config.dump_file is None or str(config.dump_file) in (\n \"-\",\n \"/dev/stdout\",\n ):\n self.out_file = sys.stdout\n elif str(config.dump_file) == \"/dev/stderr\":\n self.out_file = sys.stderr\n else:\n try:\n self.out_file = open(config.dump_file, \"w+\")\n except (\n FileNotFoundError,\n PermissionError,\n io.UnsupportedOperation,\n ) as error:\n log.error(error)\n log.error(\n f\"Output file `{config.dump_file}` cannot be opened. \"\n \"Standard output will be used.\"\n )\n self.out_file = sys.stdout\n\n # Ensure we do not recieve the \"Output is not a terminal\" message\n Vt100_Output._fds_not_a_terminal.add(self.out_file.fileno())\n # Do not use stderr instead of stdout if stdout is not a tty\n self.out_file = cast(\"TextIO\", self.out_file)\n self.output = create_output(self.out_file, always_prefer_tty=False)\n\n # Use the width and height of stderr (this gives us the terminal size even if\n # output is being piped to a non-tty)\n # if hasattr(self.output, '_get_size'):\n setattr(self.output, \"get_size\", create_output(stdout=sys.stderr).get_size)\n\n # Disable character position requests when dumping output to stop extra output\n # This also speeds things up as we do not need to wait for the response\n # Ignore typing here as mypy does not understand __class__\n class DumpingOutput(self.output.__class__): # type: ignore\n # Disable character position requests when dumping output\n responds_to_cpr = False\n\n # Patch the output to prevent CPR detection\n self.output.__class__ = DumpingOutput\n\n # Set pre-run commands\n self.pre_run.append(self.post_dump)", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def show_output(self):\r\n if self.output_status == 'less':\r\n self.show_message(self.short_output)\r\n else:\r\n self.show_message(self.output)", "def test_export_custom(self): # pylint: disable=no-self-use\n mock_record_str = Mock(str)\n\n def formatter(record): # pylint: disable=unused-argument\n return mock_record_str\n\n mock_stdout = Mock()\n exporter = ConsoleLogExporter(out=mock_stdout, formatter=formatter)\n log_data = LogData(\n log_record=LogRecord(),\n instrumentation_scope=InstrumentationScope(\n \"first_name\", \"first_version\"\n ),\n )\n exporter.export([log_data])\n mock_stdout.write.assert_called_once_with(mock_record_str)", "def test_writes_stdout_from_cache(self, flags):\n with open(self._executable_file, \"w\") as executable_file:\n executable_file.write(_PYTHON_SHEBANG +\n \"import sys\\n\"\n \"sys.stdout.write(\\\"stdout\\\\n\\\")\\n\")\n\n with capture():\n run_executable(*flags)\n\n with capture() as captured:\n run_executable(*flags)\n self.assertEqual(captured.stdout.replace(\"\\r\\n\", \"\\n\"), \"stdout\\n\")", "def run_to_stdout(self) -> None:\n\n for line in self.run(rstrip=False):\n sys.stdout.write(line)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should raise a ValueError when there is no current step to operate upon by the write function call.
def test_write_to_console_fail(self, _step: PropertyMock): _step.return_value = None step = exposed.ExposedStep() with self.assertRaises(ValueError): step.write_to_console('hello')
[ "def save_step(self, step: 'BaseTransformer', context: 'CX') -> 'BaseTransformer':\n raise NotImplementedError()", "def bad_step(self):\n assert False, \"This step is meant to fail.\"", "def save_to_file(self, iter_num, iter_step):\n if iter_num%iter_step==0:\n current_date = datetime.datetime.now()\n year = current_date.year\n month = calendar.month_name[current_date.month]\n day = current_date.day\n\n attrs = {'cov': self.move.cov,\n 'move': self.move.__str__(),\n 'date': \"{} {} {}\".format(day, month, year),\n 'folder_and_run': self.config.my_analysis_dir + \"-Run_\" + str(self.config.RUN_NUM),\n 'data_array_dict': str(self.config.data_array_dict),\n 'upload_to_S3': str(self.config.upload_to_S3),\n 'comments': self.config.comments,\n 'step_save': self.config.step_save,\n }\n if hasattr(self.move, 'cov_joint'):\n attrs.update({'cov_joint': self.move.cov_joint})\n else:\n pass\n self.backend.to_file(solver=self.solver, config=self.config, \n move_probs=self.move.move_probs, attrs=attrs)\n else:\n pass", "def _report_step(self, learning_rate, step, train_stats=None,\n valid_stats=None):\n if self.report_manager is not None:\n return self.report_manager.report_step(\n learning_rate, step, train_stats=train_stats,\n valid_stats=valid_stats)", "def test_error_on_default_output_product(tmpdir):\n\n class X(Step):\n keep = True\n\n with pytest.raises(ex.StepUndefinedOutput):\n X(tmpdir, {})._execute(tmpdir)", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def test_save_npy_with_invalid_step(temp_dir):\n data = np.array([[1, 2, 3], [4, 5, 6]])\n\n with pytest.raises(ValueError):\n save_npy(temp_dir, data, step={\"invalid\": \"dict\"})", "def writeNoException(self):\n pass", "def store_job_output_error(self, payload):\n self._job_step_failed(\"Storing job output failed\", payload)", "def test_failed_glue(self):\n sink = self.tool.glue(self.line, self.head, (90, 50))\n self.assertTrue(sink is None)", "def _step(self, whence):\n pass", "def save_step(self, step: 'TruncableSteps', context: CX):\n\n # First, save all of the sub steps with the right execution context.\n sub_steps_savers = []\n for i, (name, sub_step) in enumerate(step.items()):\n if sub_step.should_save():\n sub_steps_savers.append((name, sub_step.get_savers()))\n sub_step.save(context)\n else:\n sub_steps_savers.append((name, None))\n\n step.sub_steps_savers = sub_steps_savers\n\n # Third, strip the sub steps from truncable steps before saving\n if hasattr(step, 'steps'):\n del step.steps\n del step.steps_as_tuple\n\n return step", "def test_write_to_console(self, _step: PropertyMock):\n trials = [2, True, None, 'This is a test', b'hello']\n\n for message in trials:\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.write_to_console(message)\n\n args, kwargs = write_source.call_args\n self.assertEqual('{}'.format(message), args[0])", "def report_step_progress(self, step):\n pass", "def test_step_out_of_bounds_indices(self):\n _, backend = _collect_episode_data(num_episodes=6)\n data_reader = in_memory_backend.InMemoryBackendReader(backend)\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n len(data_reader.steps))\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n -len(data_reader.steps) - 1)", "def end_step(self):\n self.fh.end_step()", "def create_step(self, step):\n raise NotImplementedError", "def test_save_json_with_invalid_step(temp_dir):\n data = json.dumps({\"k\": \"v\", \"list\": [1, 2, 3]})\n\n with pytest.raises(ValueError):\n save_json(temp_dir, data, step={\"invalid\": \"dict\"})", "def finishWriting(self, x=None):\n\t\tself.finishedWriting = True" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Should render an empty stack frame when the stack data is invalid.
def test_render_stop_display_error( self, get_formatted_stack_frame: MagicMock, render_template: MagicMock ): get_formatted_stack_frame.return_value = None step = MagicMock() exposed.render_stop_display(step, 'FAKE') self.assertEqual({}, render_template.call_args[1]['frame'])
[ "def stack_bad(self):\n self.dyn_stack_current_state = STACK_STATE_BAD", "def is_stack_empty(self):\n if self.stack.__len__() == 0:\n return True\n else:\n return False", "def test_empty_stack() -> None:\n with raises(GrammarParseError):\n grammar_parser.parse(\"ab}\", lexer_mode=\"VALUE_MODE\")", "def show_stack(self) -> None:\n print(\"Show stack: \")\n ok = 1\n for i in reversed(self.items):\n print(i)\n ok = 0\n if ok:\n print(\"The stack is empty!\")\n print(\"\\n\")", "def is_empty(self):\n return len(self.the_stack) == 0", "def test_stack_empty_len_zero():\n assert len(Stack()) == 0", "def is_empty(self):\n return len(self.__stack) == 0", "def test_empty_stack_has_no_value(empty_stack):\n assert empty_stack.top is None", "def is_empty(self):\n return len(self.stack) == 0", "def is_empty(self):\n return len(self.__stack) == 0", "def check_stacks(self):\n\n if len(self.dynamic_stack) != len(self.return_stack) + 2:\n\n e = Exception(\"Potential return stack corruption: dynamic={} return={}\".format(len(self.dynamic_stack), len(self.return_stack)))\n\n while len(self.dynamic_stack) < len(self.return_stack) + 2:\n self.dynamic_stack.append({})\n\n while len(self.dynamic_stack) > len(self.return_stack) + 2:\n self.pop_dynamic()\n\n raise e", "def undo_stack_not_empty(self):\n return self.undo_stack.stack_not_empty()", "def check_empty_dataframe(dataframe: pd.DataFrame) -> None:\n if dataframe.empty:\n no_data_message = \"Empty dataframes cannot be published.\"\n raise ValueError(no_data_message)", "def test_peek_empty():\n test_stack = stack.Stack()\n\n with pytest.raises(stack.StackEmptyError):\n test_stack.peek()", "def empty_stack(stack):\n if stack.top is None:\n return True\n else:\n return False", "def _is_empty(_stack):\n if _check_type(_stack):\n \n _dt, _struct = _stack # `unpck\n return True if len(_struct[_LIST]) == 0 else False\n else:\n assert False, 'TypeError: Expected _pyStack but got {0}'.format(type(_stack))", "def test_empty_stack_constructor():\n from stack import Stack\n s = Stack()\n assert s.top is None\n assert s._values.length == 0", "def showError(self):\n self.controller.showFrame(\"frame_showError\")", "def unknown_frame(self):\n s = \"Frame #{}, Level #{}, State: Unknown (#{})\"\n s = s.format(self.movenum, self.game_view.level_num,\n self.unknown_frame_error)\n logging.info(s)\n \n if self.unknown_frame_error == 0:\n logging.error(\"Pausing Bot for 5secs due to unknown frame error.\")\n self.unknown_frame_error += 1\n time.sleep(4.0)\n else:\n logging.error(\"Exiting Bot due to unknown frame error.\")\n raise GameBotUnknownFrameError" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates % of alphanumeric characters in string.
def _alnum_percent(line): total = len(line) test_set = set() for letter in string.ascii_letters: test_set.add(letter) test_set.add(' ') # Return a failure (no good characters) if there are no characters if total < 1: return 0 alnum_count = 0 star_count = 0 bar_count = 0 for letter in line: # if letter.isalnum(): if letter in test_set: alnum_count += 1 if letter == '*': star_count += 1 if letter == 'I' or letter == 'i' or letter == 'l' or letter == '|': bar_count += 1 # TODO(searow): properly implement this, but sticking this here for now. if star_count / total > 0.1: return 0 if bar_count / total > 0.5: return 0 return alnum_count / total
[ "def letter_percent(s):\r\n\r\n alpha = 'abcdefghijklmnopqrstuvwxyz'\r\n s_lower = s.lower()\r\n s_length = 0\r\n letter_count = {} # empty dictionary\r\n keys = letter_count.keys()\r\n\r\n for char in s_lower:\r\n if char in alpha:\r\n s_length = s_length + 1\r\n if char in letter_count:\r\n letter_count[char] = letter_count[char] + 1\r\n else:\r\n letter_count[char] = 1\r\n\r\n for char in sorted(keys):\r\n letter_count[char] = (letter_count[char] / s_length) * 100\r\n print(char, \"{:.1f}%\".format(letter_count[char]))", "def percent_without_letter(l):\n\treturn len(words_without_letter(l)) / len(word_set)", "def _percentage(value):\n return _numeral(value.replace(' ', '').replace('%', ''))", "def escPercent(text):\n pat = re.compile(r'%(?!\\()')\n return pat.sub('%%', text)", "def percent_uppercase(text):\r\n text = text.replace(\" \",\"\")\r\n if len(text) == 0:\r\n return 0\r\n return sum([1 for i in text if i.isupper()])/len(text)", "def token_percentage(word, text):\n word_count = text.count(word)\n text_len = len(text)\n return percentage(word_count, text_len)", "def count_string_indiv(num, num_patients):\n output = \"%.0f/\" % num\n output += str(num_patients)\n if num_patients is not 0:\n percentage = (num / num_patients) * 100\n else:\n percentage = 0.0\n output += ' (%.1f%%)' % percentage\n return output", "def convert_percent(string):\n string = string.rstrip(\"%\")\n percent = float(string) / 100\n\n return percent", "def alphanum_score(words):\n\n\t# Add your code here\n\treturn", "def mask_percentage_text(mask_percentage):\n return \"%3.2f%%\" % mask_percentage", "def percent_encode(char: str) -> str:\n return \"\".join([f\"%{byte:02x}\" for byte in char.encode(\"utf-8\")]).upper()", "def urlify(word):\n\n # strip whitespace from both sides of the text\n # replace the whitespace within the text with %20\n # then return the percentage encoded text.\n return (word.strip()).replace(\" \", \"%20\")", "def percent_of(part, whole):\n return part * 100 / whole", "def alpha_num_ratio_ft(string):\n alphas = 0.\n nums = 0.01\n for char in string:\n if char in '1234567890':\n nums += 1.\n if char in 'abcdefghijklmnopqrstuvxyz':\n alphas += 1.\n return alphas/nums #{'alpha_num_ratio': alphas / nums}", "def convert_percent_str(x):\n if x:\n return float(str(x).strip(\"% \"))\n return 0", "def protectNonSubstPercents(s):\n\n return re.sub(r\"%(?!\\()\", \"%%\", s)", "def percentage_text(value):\n return \"{}%\".format(value)", "def myHash(string, base=91, mod=1000000321):\n value = 0\n for pos, elem in enumerate(string[::-1]): # считаем значение полинома\n value += ord(elem) * base**pos # в последней задаче сделано с помощью массива (динамика)\n return value % mod", "def getGCpercentage(DNA):\n dnaLength = len(DNA) #counts the length of the DNA string\n findG = DNA.count(\"G\") #finds the letter G in DNA string\n findC = DNA.count(\"C\") #finds the letter C in DNA string\n print(findG)\n print(findC)\n print(dnaLength)\n GCpercent = ((findC + findG)/dnaLength) * 100 #calculates percentage of Gs and Cs\n print(\"Percentage of G and C:\",\" %6.2f\" % GCpercent)\n \n return getGCpercentage" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Analyzes text lines, in order read from OCR processing. Populates the MailFields object with information gathered from OCR. Uses information from each of the lines to best figure out who is the main addresssee and which box it is trying to reach.
def parse_text_lines(self, text_lines): self.__fields = mail_fields.MailFields() alphanum_threshold = 0.5 # Only evaluate lines that are predominantly alphanumeric for line in text_lines: if _alnum_percent(line) > alphanum_threshold: try: parsed = usaddress.tag(line)[0] except usaddress.RepeatedLabelError as e: # If usaddress gets confused, just throw away the answer as if # we got nothing for now. # TODO(searow): fix this to handle multiple tags and labels. parsed = {} for tag in parsed: self._add_to_fields(tag, parsed[tag]) return self.__fields
[ "def readFile(self):\n \n doc = self.__openFile() #Get opened file from __openFile()\n readDoc = doc.read() #Read document contents\n message = email.message_from_string(readDoc) #Get message object from string\n\n #Get the body of the message\n mBody = message.get_payload()\n mBody = mBody.split()\n #Split the message into a list of each word\n sentences = []\n curSent = \"\"\n fwd = 0\n #This loop checks each word in the list that was\n #created and pieces together a single sentence object\n for word in mBody:\n if \".\" in word or \"?\" in word or \"!\" in word:\n curSent = curSent + word\n sentences.append(Sentence(curSent)) #Make sentence object\n curSent = \"\"\n elif \"Forwarded\" in word or \"Original\" in word:\n fwd = 1\n break\n else:\n curSent = curSent + word + \" \"\n\n #Get from email info\n #fromInfo holds info needed for __fromInfo\n fromInfo = os.path.basename(os.path.dirname(self.__fname))\n\n #Get toInfo holds info needed for __toInfo\n toInfo = ''\n emails = message['To']\n #If for some reason message['To'] is none, resort to parsing email\n #by way of indices\n if message['To'] == None:\n for i in range(len(readDoc)):\n if readDoc[i:i+6] == 'X-To: ':\n index = i+6\n toInfo = ''\n for j in range(len(readDoc) - index):\n if readDoc[index + j] == ',' or readDoc[index + j] == '\\n'\\\n or readDoc[index + j] == '/':\n break\n else:\n toInfo += readDoc[index + j]\n #Else get the to info normally\n else:\n for char in emails:\n if char == \",\":\n break\n toInfo += char\n\n #Get date\n fullDate = message['Date']\n #Because this is the full date, it must be changed into only day, month, year\n fullDate = fullDate.split()\n #Since we only want three values, and the format\n #Is always the same with MIME, the values\n #can be obtained by taking values 1-4\n #and joining them\n fullDate = fullDate[1:4]\n #Reverse the list because it is not in the format that is needed for the document\n fullDate = fullDate[::-1]\n fullDate = \"\".join(fullDate)\n \n #From fullDate, get integers for day, month, year in order to use date object\n #Get year\n year = int(fullDate[0:4])\n month = fullDate[4:7]\n #Convert month to an integer\n monthWord = str.lower(month)\n posMonths = ['jan','feb','mar', 'apr','may','jun','jul','aug','sep','oct','nov','dec']\n month = 0\n for i in range(len(posMonths)):\n if posMonths[i] == monthWord:\n month = i+1\n #Get day\n day = int(fullDate[7:])\n\n #Get the subject and check if Re: in subject then a reply occured\n subject = message['Subject']\n if \"Re:\" in subject:\n reply = 1\n else:\n reply = 0\n\n numSentences = len(sentences)\n #Create document, setting info found above\n docRead = Document(toInfo, fromInfo)\n docRead.setDate(year, month, day)\n docRead.setFwd(fwd)\n docRead.setReply(reply)\n \n for i in range(len(sentences)):\n docRead[i] = sentences[i]\n\n #return created document\n return docRead", "def parse(self):\n\t\tfor part in self.mail.walk():\n\t\t\tself.process_part(part)", "def process(Email):\n # convert to lower case\n email = Email.read().lower()\n # strip any HTML\n temp = regx.sub(\"<.*?>\", \" \", email)\n # replace numbers for 0-9 with \"number\"\n temp = regx.sub(\"[0-9]+\", \"number\", temp)\n # replace Http adress to \"httpaddr\"\n temp = regx.sub(\"(http|https)://[^\\s]*\", \"httpaddr\", temp)\n # replace email adress with \"emailaddr\"\n temp = regx.sub(\"[^\\s]+@.*?\\s+\", \"emailaddr\", temp)\n # replace currency sign\n temp = regx.sub(\"[$]+\", \"dollar\", temp)\n temp = regx.sub(\"[']\", \" \", temp)\n # ========================== Tokenize Email ===========================\n # temp = regx.sub(\">+|:+|#+|[$]+|[.]+|@+|/+|-+|&+|[*]+|[+]+|=+|[]]+|[?]+|[()]+|[{}]+|,+|[']+|<+|_+|;+|%+\", \"\", temp)\n\n # remove punctuation\n temp = temp.translate(str.maketrans('', '', string.punctuation))\n\n # split the string in list of words\n tokenized_list = temp.split()\n stemmer = PorterStemmer()\n a = []\n vocab = VocabArray.getVocab()\n extracted_features = mat.zeros((1, len(vocab)))\n\n i = 0\n print(\"========================== Processed Email =========================\")\n for w in range(len(tokenized_list)):\n if len(tokenized_list[w]) < 1:\n continue\n\n # stem the word\n word = stemmer.stem(tokenized_list[w])\n print(word, end=\" \")\n if i > 20:\n i = 0\n print(\"\\n\")\n # get index of the word from vocab list\n indices = mat.where(vocab == word)[0]\n i += 1\n if len(indices) == 0:\n continue\n\n a.append(indices)\n extracted_features[:, indices] = 1\n\n word_indices = mat.c_[mat.array(a)]\n print(\"\\n\")\n return word_indices, extracted_features", "def parse_emails(email_texts):\n emails = []\n header_re = re.compile(r\"[a-zA-Z-]*: \")\n email = {'email_body_text': '', 'label': 1}\n\n for em in email_texts:\n for line in em.splitlines():\n if header_re.match(line):\n email[line.split()[0]] = ' '.join(line.split()[1:])\n else:\n email['email_body_text'] += line + '\\n'\n emails.append(email)\n email = {'email_body_text': '', 'label': 1}\n\n return emails", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n ### split off metadata\n \n content = re.split(\"X-FileName:.*$\", all_text, flags=re.MULTILINE, maxsplit=1)\n words = \"\"\n if len(content) > 1:\n text_string = content[1]\n\n ## remove mails that are forwarded or to which are responded\n # e.g. ---------------------- Forwarded\"\n text_string = re.split(\"-*\\sForwarded\", text_string, maxsplit=1)[0]\n\n # -----Original Message-----\n text_string = re.split(\"-*\\Original\\sMessage\", text_string, maxsplit=1)[0]\n\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # To:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n # or\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # to:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n \n text_string = re.split(\"((.*\\n){2})[Tt]o:\\s\", text_string, maxsplit=1)[0]\n\n ### remove punctuation\n # should be autopmatically by scikit learn\n #text_string = text_string.translate(string.maketrans(\"\", \"\"), string.punctuation)\n\n ### project part 2: comment out the line below\n #words = text_string\n\n ### split the text string into individual words, stem each word,\n ### and append the stemmed word to words (make sure there's a single\n ### space between each stemmed word)\n from nltk.stem.snowball import SnowballStemmer\n\n stemmer = SnowballStemmer(\"english\")\n words = [stemmer.stem(word) for word in text_string.split()]\n\n\n\n return \" \".join(words)", "def _parse_contact_information(self):\n left_column = self.content.find(\"div\", class_=\"linkeSpalte40\")\n graubox = left_column.find(\n lambda tag: tag.name == \"div\" and tag[\"class\"] == [\"grauBox\"]\n )\n\n emails_raw = graubox.find_all(\"a\", class_=\"mail\")\n websites_raw = graubox.find_all(\"a\", class_=\"noDecoration\")\n telephone_raw = graubox.find_all(\"span\", class_=\"telefonnummer\")\n address_raw = [\n e.nextSibling for e in graubox.find_all(\"em\") if e.text == \"Anschrift:\"\n ]\n\n address = address_raw[0].li.get_text(\"\\n\") if address_raw else None\n emails = [re.sub(r\"^mailto:\", \"\", e.attrs[\"href\"]) for e in emails_raw]\n phone_numbers = [t.text for t in telephone_raw]\n websites = [w.attrs[\"href\"] for w in websites_raw]\n\n return {\n \"address\": address,\n \"emails\": emails,\n \"phone_numbers\": phone_numbers,\n \"websites\": websites,\n }", "def detect_email(resume_txt):\n # by default, assumes email address not found\n email_found = False\n student_email = \"\"\n\n # for each line of resume after first line *student name)\n for line in resume_txt[1:]:\n\n # first, removes each line's leading and trailing whitespace and saves\n line_content = line.strip()\n\n # debugging\n # print(\"line_content:\", line_content)\n # print(\"stripped line type:\", type(line_content))\n # print(\"student email function iterating through this line:\", line)\n\n # still, for each line...\n # checks to see if requirements for correct email are met\n if \"@\" in line_content: # condition 1: looks for an atsign to detect the email address\n # print(True)\n index_of_atsign = line_content.index(\"@\") # if first condition met, finds the line index of line that has atsign\n\n # indicates email address found\n email_found = True\n\n # and then checks for second condition\n if (line_content[-4:] == \".com\") or (line_content[-4:] == \".edu\"): # condition 2: looks to make sure email's domain suffix is either \".com\" or \".edu\"\n # print(True)\n # if second condition is met, checks for third condition\n if line_content[index_of_atsign + 1].isalpha() and (line_content[index_of_atsign + 1].islower()): # condition 3: looks to make sure the letter after atsign is both alphabetical and lowercase\n # print(True)\n # if third condition is true, checks for fourth condition\n\n # condition 4: checks to make sure no chatacter in the email address is a digit.\n\n digit_found = False\n\n # for loop looks to see if there is any digits in the email address\n for char in line_content:\n if char.isdigit() == True:\n digit_found = True\n\n # if there is a digit in the email address, condition 4 fails.\n if digit_found == True:\n print(\"Email Error 1: Please make sure not to include digits anywhere in the email address.\")\n # print(student_email)\n # if condition 4 is met (i.e. all conditions are met), stores the stripped version of line containing the atsign in str student_email\n elif digit_found == False:\n print(\"Valid email found.\")\n student_email = line_content\n # breaks the for loop to stop searching for the next atsign\n break\n # if any of the conditions are not met, writes on screen to inform user (of the first condition that failed) and assigns blank to student_email\n else:\n print(\"Email Error 2: Please make sure the email provider's website starts with a lowercase letter.\")\n else:\n print(\"Email Error 3: Please provide a '.com' or '.edu' email address.\") \n else:\n if email_found == True:\n print(\"Email Error 4: Please provide an email address with an @ sign\")\n else:\n continue\n\n # informs user if no email address with an @ sign was\n if email_found == False:\n print(\"No email address with @ sign found\")\n \n # debugging\n # print(\"student email:\", student_email)\n\n # returns extracted student email\n return student_email", "def get_fields(file_lines):\n fields = {} \n \n fields[\"content_type_tag\"] = file_lines[1]\n law_name = file_lines[2]\n fields[\"law_name\"] = law_name\n\n # Get law number & exact date from the name\n law_num = law_name.split()[1]\n law_exact_date = law_name.split()[3]\n law_num_date = law_num + '_' + law_exact_date\n fields[\"law_num_date\"] = law_num_date \n \n # Get Official Gazette from 4th line (4rd index)\n pub_year_raw = file_lines[3].split()[-1]\n day, month, year = [int(i) for i in pub_year_raw.split('/')]\n pub_year = datetime.datetime(year, month, day)\n fields[\"pub_year\"] = pub_year.year\n \n fields[\"agency_tag\"] = file_lines[4]\n\n # Get first few lines of article-one\n \"\"\" The lines below save the title of article-one\n Then get the first 50 words of article one or all\n words (whichever has the lower count\n \"\"\"\n article_one_title = ' '.join(file_lines[6].split()[2:])\n article_one_words = file_lines[7].split()\n article_one_len = len(article_one_words)\n word_count = article_one_len if article_one_len < 50 else 50\n article_one_str = ' '.join(article_one_words[:word_count])\n \n fields[\"article_one_title\"] = article_one_title\n fields[\"article_one_str\"] = article_one_str\n\n # Get the body of the law\n \"\"\" From article-one and everything below.\n Intentionally skipping 'intro' - may add later\n \"\"\"\n law_body = ' '.join(file_lines[6:])\n fields[\"law_body\"] = law_body\n\n return fields", "def process_lines():\n for (idx, line) in enumerate(lines):\n if pattern.match(line) and len(lines[idx - 1]) > 0:\n title_line = lines[idx - 1]\n title = title_line.split(\" \")[0]\n\n state_line = lines[idx + 1]\n for txt in state_line.split(\" \"):\n if len(txt) == 2:\n state = txt\n format_request(title, state)\n # only try the first organization\n break\n break", "def processEmail(email_contents):\n\n# Load Vocabulary\n vocabList = getVocabList()\n\n# Init return value\n word_indices = []\n\n# ========================== Preprocess Email ===========================\n\n# Find the Headers ( \\n\\n and remove )\n# Uncomment the following lines if you are working with raw emails with the\n# full headers\n\n# hdrstart = strfind(email_contents, ([chr(10) chr(10)]))\n# email_contents = email_contents(hdrstart(1):end)\n\n# Lower case\n email_contents = email_contents.lower()\n\n # Strip all HTML\n# Looks for any expression that starts with < and ends with > and replace\n# and does not have any < or > in the tag it with a space\n rx = re.compile('<[^<>]+>|\\n')\n email_contents = rx.sub(' ', email_contents)\n# Handle Numbers\n# Look for one or more characters between 0-9\n rx = re.compile('[0-9]+')\n email_contents = rx.sub('number ', email_contents)\n\n# Handle URLS\n# Look for strings starting with http:// or https://\n rx = re.compile('(http|https)://[^\\s]*')\n email_contents = rx.sub('httpaddr ', email_contents)\n\n# Handle Email Addresses\n# Look for strings with @ in the middle\n rx = re.compile('[^\\s]+@[^\\s]+')\n email_contents = rx.sub('emailaddr ', email_contents)\n\n# Handle $ sign\n rx = re.compile('[$]+')\n email_contents = rx.sub('dollar ', email_contents)\n\n# ========================== Tokenize Email ===========================\n\n# Output the email to screen as well\n print('==== Processed Email ====')\n\n# Process file\n l = 0\n\n # Remove any non alphanumeric characters\n rx = re.compile('[^a-zA-Z0-9 ]')\n email_contents = rx.sub('', email_contents).split()\n\n for content in email_contents:\n\n # Tokenize and also get rid of any punctuation\n # str = re.split('[' + re.escape(' @$/#.-:&*+=[]?!(){},''\">_<#')\n # + chr(10) + chr(13) + ']', str)\n\n # content = word_tokenize(content)\n\n # Stem the word\n # (the porterStemmer sometimes has issues, so we use a try catch block)\n try:\n content = porterStemmer(content)\n except:\n content = ''\n continue\n\n # Skip the word if it is too short\n if len(content) < 1:\n continue\n\n # Look up the word in the dictionary and add to word_indices if\n # found\n # ====================== YOUR CODE HERE ======================\n # Instructions: Fill in this function to add the index of str to\n # word_indices if it is in the vocabulary. At this point\n # of the code, you have a stemmed word from the email in\n # the variable str. You should look up str in the\n # vocabulary list (vocabList). If a match exists, you\n # should add the index of the word to the word_indices\n # vector. Concretely, if str = 'action', then you should\n # look up the vocabulary list to find where in vocabList\n # 'action' appears. For example, if vocabList{18} =\n # 'action', then, you should add 18 to the word_indices\n # vector (e.g., word_indices = [word_indices 18] ).\n #\n # Note: vocabList{idx} returns a the word with index idx in the\n # vocabulary list.\n #\n # Note: You can use strcmp(str1, str2) to compare two strings (str1 and\n # str2). It will return 1 only if the two strings are equivalent.\n #\n\n # print([i for i, val in enumerate(content) if val in vocabList])\n try:\n word_indices.append(vocabList.index(content))\n except:\n continue\n\n # =============================================================\n\n # Print to screen, ensuring that the output lines are not too long\n if (l + len(content) + 1) > 78:\n print(content)\n l = 0\n else:\n print(content),\n l = l + len(content) + 1\n\n# Print footer\n print('=========================')\n return word_indices", "def parse_report_line(self,line):\n\n report = self.new_police_report()\n report['original_text'] = line\n \n #\n # extract month and day\n match_date = REPORT_DATE_REGEXP.search(line)\n assert(match_date)\n start_index=match_date.start('month')\n stop_index=match_date.end('month')\n report['date_month'] = int(line[start_index:stop_index])\n\n start_index=match_date.start('day')\n stop_index=match_date.end('day')\n report['date_day'] = int(line[start_index:stop_index])\n\n my_logger.debug('extracted date (%d/%d)' % (report['date_month'],report['date_day']))\n\n #############################################\n # extract location & scale\n line = line[0:match_date.start('month')-1] # truncate after start of date\n \n #\n # trim off preceding html and trailing comma\n start_index=line.rfind('>')+1\n assert(start_index>0)\n\n stop_index=line.rfind(',',start_index)\n \n if stop_index >= 2:\n #\n # found a comma, \n line = line[start_index:stop_index]\n else:\n #\n # no comma found\n line = line[start_index:]\n my_logger.debug('truncated string: (%s)' % line)\n report['address']=line\n #\n # try to determine which case:\n # a block\n # an exact address\n # an establishment\n # an intersection\n # special cases, like: \"downtown mountain view\"\n # \n\n if (BLOCK_REGEXP.match(line)!=None):\n my_logger.debug('BLOCK detected')\n report['map_scale']=mapscale.BLOCK\n elif (INTERSECTION_REGEXP.match(line)!=None):\n my_logger.debug('INTERSECTION detected')\n report['map_scale']=mapscale.INTERSECTION\n elif (EXACT_REGEXP.match(line)!=None):\n my_logger.debug('EXACT detected')\n report['map_scale']=mapscale.EXACT\n else:\n #\n # must be manually assigned\n report['map_scale']=mapscale.OTHER\n\n\n return report", "def processEmail(email_contents):\n\n # init\n result_indices = []\n result_words = [] # for debugging\n vocabDict = getVocabList()\n\n # prepocessing\n # all lowercase\n email_contents = email_contents.lower()\n\n # strip HTML tags\n # starts with <, ends with >, has 0 or 1 /, no >, 1 or more chars\n p = re.compile(r'\\<[^<>]+\\>')\n email_contents = p.sub(' ', email_contents)\n\n # replace numbers with 'number'\n p = re.compile(r'[0-9]+')\n email_contents = p.sub('number', email_contents)\n\n # replace urls with 'httpaddr'\n p = re.compile(r'http(s?)://(\\S*)')\n email_contents = p.sub('httpaddr', email_contents)\n\n # replace email address with 'emailaddr'\n p = re.compile(r'\\S+@\\S+')\n email_contents = p.sub('emailaddr', email_contents)\n\n # replace dollar sign $ with 'dollar'\n p = re.compile(r'\\$+')\n email_contents = p.sub('dollar', email_contents)\n\n # tokenize and get rid of any punctuation and non alphanumerics\n content_list = re.split(r'[\\W_]', email_contents)\n\n # debugging\n print(' '.join(content_list))\n\n # word stemming\n stemmer = PorterStemmer(mode='ORIGINAL_ALGORITHM')\n for word in content_list:\n # only stem those words with len > 2\n # leave as is for len = 1 or 2\n if len(word) > 2:\n word = stemmer.stem(word)\n if word in vocabDict:\n result_indices.append(vocabDict[word])\n result_words.append(word)\n elif len(word) >= 1:\n if word in vocabDict:\n result_indices.append(vocabDict[word])\n result_words.append(word)\n\n ### debugging ###\n #print(' '.join(content_list))\n\n return result_indices, result_words", "def parse_lt_objs (lt_objs, page_number, images_folder, text=[]):\n text_content = [] \n\n page_text = {} # k=(x0, x1) of the bbox, v=list of text strings within that bbox width (physical column)\n for lt_obj in lt_objs:\n if isinstance(lt_obj, LTImage):\n # an image, so save it to the designated folder, and note its place in the text \n saved_file = save_image(lt_obj, page_number, images_folder)\n if saved_file:\n # use html style <img /> tag to mark the position of the image within the text\n idd=map_coordinates(lt_obj.bbox,page_number,\"\",0,lt_obj.width, lt_obj.height,0)\n figs.append((idd,os.path.join(images_folder, saved_file),lt_obj)) \n else:\n print >> sys.stderr, \"error image not jpeg on page\", page_number\n elif isinstance(lt_obj, LTFigure):\n # LTFigure objects are containers for other LT* objects, so recurse through the children\n parse_lt_objs(lt_obj, page_number, images_folder, text_content)\n elif isinstance(lt_obj,LTTextBox):\n fontname=\"\"\n size=0\n line_spacing = 0\n j = 0\n for x in lt_obj:\n # print('___',x)\n if isinstance(x,LTTextLine):\n # print('******',x)\n for y in x:\n if isinstance(y,LTChar):\n fontname = y.fontname.split('+')\n fontname = fontname[len(fontname)-1].strip()\n size = y.size\n if j==0 and len(lt_obj)<2:\n line_spacing=0\n break\n elif j==1:\n line_spacing -= x.bbox[1]\n break\n \n line_spacing = x.bbox[1]\n j+=1\n idd=map_coordinates(lt_obj.bbox,page_number,fontname,size,lt_obj.width,lt_obj.height,line_spacing)\n figs.append((idd,to_bytestring(lt_obj.get_text()),lt_obj))\n elif isinstance(lt_obj, LTRect) or isinstance(lt_obj, LTLine):\n figs.append((idd,lt_obj.x0,lt_obj.y1,lt_obj.width,lt_obj.height,page_number,lt_obj))", "def preprocess_mail_body(body_dict):\n mail_body = body_dict['Mail_1']\n\n if 'Mail_2' in body_dict.keys():\n mail_body = mail_body + ' ' + body_dict['Mail_2']\n\n pattern_1 = re.compile(r'[\\w\\.-_]+@[\\w\\.-_]+')\n\n text = pattern_1.sub('', mail_body)\n\n pattern_2 = re.compile(r'(?:(?:https?|ftp):\\/\\/)?[\\w/\\-?=%.]+\\.[\\w/\\-?=%.]+')\n\n text = pattern_2.sub('', text)\n\n text = ' '.join(word_tokenize(text))\n\n # pattern_3 = re.compile(r'[^A-Za-z\\s]*')\n #\n # text = pattern_3.sub('', text)\n\n text = ' '.join(x for x in text.split() if not any(c.isdigit() for c in x))\n\n text = text.lower()\n\n return text", "def process_data(self):\n for drive_line in self.drive_text_file.get_drive_line_processors():\n if drive_line.is_valid():\n self._process_valid_drive_line(drive_line=drive_line)\n pass\n else:\n serial = drive_line.get_serial()\n health = drive_line.get_health()\n power_on = drive_line.get_power_on()\n self.text_to_write.add(\n string_to_add=f'SN: {serial} skipped. Health(\"{health}\") or Power on(\"{power_on}\") on are not digits.',\n should_write=True\n )", "def _processLines(self):\n self.nlines = len(self.lines)\n self.params = {}\n self._pline = {}\n for i,line in enumerate(self.lines):\n if (line[0] is not '#') & (line.strip() is not ''):\n spl = line.split()\n self.params[spl[0]] = ' '.join(spl[1:])\n self._pline[spl[0]] = i\n self.nkeys = self.params.keys().__len__()", "def process_email(email_contents):\n\n # Load Vocabulary\n vocab_list = get_vocab_list();\n\n # Init return value\n word_indices = [];\n\n # ========================== Preprocess Email ===========================\n\n # Find the Headers ( \\n\\n and remove )\n # Uncomment the following lines if you are working with raw emails with the\n # full headers\n\n # hdrstart = strfind(email_contents, ([char(10) char(10)]));\n # email_contents = email_contents(hdrstart(1):end);\n\n # Lower case\n email_contents = email_contents.lower()\n\n # Strip all HTML\n # Looks for any expression that starts with < and ends with > and replace\n # and does not have any < or > in the tag it with a space\n email_contents = re.sub(\"<[^<>]+>\", \" \", email_contents)\n\n # Handle Numbers\n # Look for one or more characters between 0-9\n email_contents = re.sub(\"[0-9]+\", \"number\", email_contents)\n\n # Handle URLS\n # Look for strings starting with http:// or https://\n email_contents = re.sub(\"(http|https)://[^\\s]*\", \"httpaddr\", email_contents)\n\n # Handle Email Addresses\n # Look for strings with @ in the middle\n email_contents = re.sub(\"[^\\s]+@[^\\s]+\", \"emailaddr\", email_contents)\n\n # Handle $ sign\n email_contents = re.sub(\"[$]+\", \"dollar\", email_contents)\n\n # ========================== Tokenize Email ===========================\n\n # Output the email to screen as well\n print(\"\\n==== Processed Email ====\\n\");\n\n # Process file\n l = 0;\n\n # Tokenize and also get rid of any punctuation\n stemmer = PorterStemmer()\n email_contents = re.split(r'[@$/#.-:&\\*\\+=\\[\\]?!(){},\\'\\'\\\">_<;%\\s\\n\\r\\t]+', email_contents)\n for s in email_contents:\n\n # Remove any non alphanumeric characters\n s = re.sub(\"[^a-zA-Z0-9]\", \"\", s)\n\n # Stem the word \n # (the porter_stemmer sometimes has issues, so we use a try catch block)\n #try:\n s = stemmer.stem(s.strip())\n #except:\n # s = \"\"\n # continue\n\n # Skip the word if it is too short\n if len(s) < 1:\n continue\n\n # Look up the word in the dictionary and add to word_indices if\n # found\n # ====================== YOUR CODE HERE ======================\n # Instructions: Fill in this function to add the index of s to\n # word_indices if it is in the vocabulary. At this point\n # of the code, you have a stemmed word from the email in\n # the variable s. You should look up s in the\n # vocabulary list (vocabList). If a match exists, you\n # should add the index of the word to the word_indices\n # vector. Concretely, if s = 'action', then you should\n # look up the vocabulary list to find where in vocabList\n # 'action' appears. For example, if vocabList{18} =\n # 'action', then, you should add 18 to the word_indices \n # vector (e.g., word_indices = [word_indices ; 18]; ).\n # \n # Note: vocabList[idx] returns a the word with index idx in the\n # vocabulary list.\n # \n # Note: You can use s1 == s2 to compare two strings (s1 and\n # s2). It will return True only if the two strings are equivalent.\n #\n\n\n\n # =============================================================\n\n # Print to screen, ensuring that the output lines are not too long\n if (l + len(s)) > 78:\n print()\n l = 0\n print(f\"{s} \", end=\"\")\n l = l + len(s) + 1\n\n # Print footer\n print('\\n\\n=========================')\n return word_indices", "def _process_text_file(self, data_folder, data_type, label):\n\n for data_file in data_folder:\n # Skips header of email\n found_first_new_line = True\n cleaned_text = []\n with open(data_file, encoding=\"utf8\", errors=\"replace\") as f:\n for line in f:\n if line == '\\n':\n # found_first_new_line = True\n continue\n elif found_first_new_line:\n # Separates text data into tokens and parses out stop words and symbols\n tokens = nlp(line)\n words = [token.lemma_.lower() for token in tokens]\n words = [w for w in words if self._predicate_filter(w)]\n for w in words:\n cleaned_text.append(w)\n \n # Append data into csv\n with open(f'preprocessing/{data_type}/{data_type}_data.csv', 'a+', newline='') as write_file:\n writer = csv.writer(write_file)\n cleaned_text = ' '.join(cleaned_text)\n writer.writerow([label, cleaned_text])", "def parse_passenger_details(self):\n validator = object_validator.ObjectValidator()\n with open(self.base_path + input_file, \"r\") as input_file_handler:\n header = input_file_handler.readline()\n print(\"Header line - {}\".format(header))\n lines = input_file_handler.readlines()\n for index, line in enumerate(lines):\n line = line.split(\",\")\n print(\"line {} - {}\".format(index, line))\n req_validation = {pnr: line[pnr_index],\n travel_date: [line[travel_date_index], line[booking_date_index]],\n email: line[email_index], mobile: line[mobile_index],\n cabin_type: line[cabin_type_index]}\n validation_result = validator.validate_objects(req_validation)\n found_error = list()\n for key, value in validation_result.items():\n if not value:\n found_error.append(error_strings.get_error_strings().get(key))\n\n if found_error:\n self.write_error_data(line, found_error)\n else:\n self.write_parsed_data(line)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the clear method works for posixbased systems
def test_clear_posix(self): with mock.patch("hangman.cli.screen.os.system") as mock_system: hangman.cli.screen.Screen.clear() mock_system.assert_called_with("clear")
[ "def clearscreen():\n if os.name == 'nt':\n os.system('cls')\n elif os.name == 'posix':\n os.system('clear')\n else:\n print \"Untested OS. Please tell the developer you're on: %s\" % os.name \n sys.exit(0)", "def clear_screen():\n if sys.platform == \"linux\" or sys.platform == \"linux2\" or sys.platform == \"darwin\":\n _ = system('clear')\n else:\n _ = system('cls')", "def clear_console():\n\n if sys.platform.startswith('win'):\n os.system(\"cls\")\n elif sys.platform.startswith('linux'):\n os.system(\"clear\")\n elif sys.platform.startswith('darwin'):\n os.system(\"clear\")\n else:\n print(\"Unable to clear terminal. Your operating system is not supported.\\n\\r\")", "def clearTerminal():\r\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen():\n\n # Windows\n if name == 'nt':\n _ = system('cls')\n\n # Mac/Linux\n else:\n _ = system('clear')", "def clear_terminal(self):\n os.system('clear')", "def clearConsole(wait) : #function to clear console on Linux or Windows\n\n import time\n time.sleep(wait) \n # produces a delay based on the argument given to clearConsole()\n \n import os\n\n try :\n os.system('cls') #clears console on Windows\n\n except :\n os.system('clear') #clears console on Linux", "def clear_console():\n import os\n clear = lambda: os.system('cls')\n clear()\n return None", "def clear_shell_screen():\n cmd = \"cls\" if system_name().lower() == \"windows\" else \"clear\"\n system_call(cmd)", "def clear_console():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clearScreen():\r\n\t#import OS library functions\r\n\timport os\r\n\t\r\n\t#now call OS function to clear the screen\r\n\tos.system('cls')", "def clear_cache() -> None:\n if os.path.isfile('/usr/local/sbin/clearcache.sh'):\n os.system('sudo /usr/local/sbin/clearcache.sh')\n else:\n os.system('sudo sh -c \"sync; echo 1 > /proc/sys/vm/drop_caches\"')", "def clear():\n\n if not CLEAR_PRINT[0]:\n try:\n if os.name == \"nt\":\n # For windows.\n os.system(\"cls\")\n\n elif os.name == \"posix\":\n # For mac/linux.\n os.system(\"clear\")\n\n else:\n # Unknown operating system, just print a newline a bunch of times.\n print(\"\\n\" * CLEAR_PRINT[1])\n\n except:\n # Can't figure out the operating system, safest bet is to just print a newline a bunch of times.\n print(\"\\n\" * CLEAR_PRINT[1])\n\n else:\n # The clearing of screen is overriden, so we just print a newline CLEAR_PRINT[1] times.\n print(\"\\n\" * CLEAR_PRINT[1])", "def test_verify_clear(self):\n self._verify([self.applied_commands['clear']])", "def clear_console():\n os.system('cls' if os.name == 'nt' else 'clear')", "def test_clear(self):\n\n self.assertEqual(len(self.cme), self.count)\n\n self.cme.clear()\n\n self.assertFalse(self.cme)", "def clear(cmd):\n registers[0] = 0\n print \" \" * 17,\n print \"RO {0:#04x} {1}\".format(registers[0], BitScope.registers[0])\n s.write(cmd)", "def clean(self, type):\r\n if type == \"hard\":\r\n os.system(\"rm -rf \"+self.virtualarea.path)\r\n elif type == \"soft\":\r\n os.system(\"rm -rf \"+self.virtualarea.path)", "def clear(self):\n return _pythia8.HardProcess_clear(self)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the goodbye method
def test_goodbye(self): with mock.patch("builtins.print") as mock_print: hangman.cli.screen.Screen.goodbye() output = ",".join([str(x) for x in mock_print.call_args_list]) self.assertTrue("Goodbye" in output)
[ "def test_program_quit(self):\n d = CommandLineInterface(donors_test)\n with self.assertRaises(SystemExit):\n d.quit_the_program()\n del d", "def test_stop(self):\n pass", "def test_quit_game(run):\n out, _ = run(dork.cli.quit_game)\n assert \"Thank you\" in out", "def say_goodbye():\n\n view.print_goodbye()", "def test_teardown(self):\n with pytest.raises(NotImplementedError):\n self.behaviour.teardown()", "def _bye(self):\n self.get(\"BYE\",'')\n self.send()", "def test_teardown(self):\n with pytest.raises(NotImplementedError):\n self.handler.teardown()", "def test_terminate_run(self):\n pass", "def test_teardown(self):\n assert self.oef_search_handler.teardown() is None\n self.assert_quantity_in_outbox(0)", "def test_teardown(self):\n assert self.tac_handler.teardown() is None\n self.assert_quantity_in_outbox(0)", "def goodbyeCmd(self, flags, args):\n if flags or len(args) != 1:\n raise InvalidArguments\n target = args[0]\n if target in self.target_list:\n print >> self.OUT, 'Goodbye %s!' % target\n self.target_list.remove(target)\n else:\n print >> self.OUT, \"I haven't said hello to %s.\" % target", "def test_teardown(self):\n assert self.prometheus_handler.teardown() is None\n self.assert_quantity_in_outbox(0)", "def test_teardown(self):\n assert self.http_handler.teardown() is None\n self.assert_quantity_in_outbox(0)", "def test_teardown(self):\n assert self.search_behaviour.teardown() is None\n self.assert_quantity_in_outbox(0)", "def test_uninstall(self):\n pass", "def test_v1alpha3_stop(self):\n pass", "def tearDown(self):\n print('Tear down for [' + self.shortDescription() + ']\\n')", "def test_do_quit(self):\n for string in self.random_strings:\n self.assertTrue(self.CommandParser.do_quit(string))", "def test_eat_unhealthy(self):\n \tself.assertEqual(\n\t\t\teat(\"pizza\", isHealthy=False),\n\t\t\t\"I'm eating pizza, because YOLO!\"\n \t)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the total number of hives in this apiary.
def hives_count(self) -> int: return self.hives.count()
[ "def total_number_of_animals(self):\n animals = self.animal()\n print 'Total number of animals on island: {:4}'.format(\n animals[\"Herbivores\"] + animals[\"Carnivores\"])", "def count_hp(self):\n print(self.character_class.hp, count_att_bonus(self.constitution))\n return self.character_class.hp + count_att_bonus(self.constitution)", "def totalCount(self):\n return sum(self.values())", "def _get_total_count(response):\n return int(response.headers.get(\"X-Total-Count\", 0))", "def total_count(self):\n return sum([count for count in self.values()])", "def total(self):\n return self._evaluate()['hits']['total']", "def get_total_count(self):\n return structures_module.categories.get_total_count(self.khoros_object)", "def tally(self):\n return self.count", "def total_count(self):\n return self._total_count", "def get_total_count(self):\n return self.total_count", "def get_amount_of_items(self):\n amount = 0\n for item in self.get_items():\n amount += item.amount\n return amount", "def response_count(self) -> int:\n return pulumi.get(self, \"response_count\")", "def totalCount(self):\n return sum(self.values())", "def total_cards(self):\n amount = 0\n for palo in self._cards:\n amount = amount + len(self._cards[palo])\n\n return amount", "def _get_total_records(self):\n return json.loads(requests.get(self.url).content)['meta']['results']['total']", "def pings_count(self) -> int:\n return pulumi.get(self, \"pings_count\")", "def get_total_count(self):\n return structures_module.grouphubs.get_total_count(self.khoros_object)", "def num_animals(self):\n return self._num_herbs + self._num_carns", "def get_total_instruments(self):\n\n total = 0\n for exchange in self.exchanges:\n total += len(exchange.symbols)\n return total" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convenience method which overrides the call method to call the getExpansion function
def __call__(self, data): return self.getExpansion(data)
[ "def applyExpansion(self, coem):\n pass", "def expand(self) -> Callable[\n [gs_echo.ExpandRequest],\n gs_echo.EchoResponse]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if 'expand' not in self._stubs:\n self._stubs['expand'] = self.grpc_channel.unary_stream(\n '/google.showcase.v1beta1.Echo/Expand',\n request_serializer=gs_echo.ExpandRequest.serialize,\n response_deserializer=gs_echo.EchoResponse.deserialize,\n )\n return self._stubs['expand']", "def expansion_method(self, expansion_method):\n\n self._expansion_method = expansion_method", "def append_expansion(params, expansion):\n if expansion is not None:\n params.extend([\"EXPANSION\", expansion])", "def __init__(self):\n super(BasisExpansion, self).__init__()", "def get_expansion(block, expansion=None):\n if isinstance(expansion, int):\n assert expansion > 0\n elif expansion is None:\n if hasattr(block, 'expansion'):\n expansion = block.expansion\n elif issubclass(block, ViPNAS_Bottleneck):\n expansion = 1\n else:\n raise TypeError(f'expansion is not specified for {block.__name__}')\n else:\n raise TypeError('expansion must be an integer or None')\n return expansion", "def onExpandItem(self, func):", "def call(self):\n self.call() # Call a function", "def test_expand_fn_no_expansion(self):\n ops = [qml.Hadamard(0), qml.CNOT([0, 1]), qml.RZ(0.123, wires=1)]\n measurements = [qml.expval(qml.PauliZ(0)), qml.probs()]\n tape = QuantumScript(ops=ops, measurements=measurements)\n expanded_tape = expand_fn(tape)\n\n for op, exp in zip(expanded_tape.circuit, ops + measurements):\n assert qml.equal(op, exp)", "def _evaluate_expansion_all(self, input_array, output_array,\n x=None, kind=None):\n assert kind in ('vandermonde', 'recursive')\n if kind == 'vandermonde':\n P = self.evaluate_basis_all(x=x, argument=1)\n if output_array.ndim == 1:\n output_array = np.dot(P, input_array, out=output_array)\n else:\n fc = np.moveaxis(input_array, self.axis, -2)\n shape = [slice(None)]*input_array.ndim\n N = self.shape(False)\n shape[-2] = slice(0, N)\n array = np.dot(P, fc[tuple(shape)])\n output_array[:] = np.moveaxis(array, 0, self.axis)\n elif kind == 'recursive':\n mod = config['optimization']['mode']\n lib = importlib.import_module('.'.join(('shenfun.optimization', mod, 'transforms')))\n if x is None:\n x = self.mesh(False, False)\n N = self.N\n a = self.get_recursion_matrix(int(N*self.padding_factor)+3, int(N*self.padding_factor)+3).diags('dia').data\n lib.evaluate_expansion_all(input_array, output_array, x, self.axis, a)", "def get_expansion(block, expansion=None):\n if isinstance(expansion, int):\n assert expansion > 0\n elif expansion is None:\n if hasattr(block, 'expansion'):\n expansion = block.expansion\n elif issubclass(block, BasicBlock):\n expansion = 1\n elif issubclass(block, Bottleneck):\n expansion = 4\n else:\n raise TypeError(f'expansion is not specified for {block.__name__}')\n else:\n raise TypeError('expansion must be an integer or None')\n return expansion", "def test_get_systems_expanded(self):\n pass", "def paged_expand(self) -> Callable[\n [gs_echo.PagedExpandRequest],\n gs_echo.PagedExpandResponse]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if 'paged_expand' not in self._stubs:\n self._stubs['paged_expand'] = self.grpc_channel.unary_unary(\n '/google.showcase.v1beta1.Echo/PagedExpand',\n request_serializer=gs_echo.PagedExpandRequest.serialize,\n response_deserializer=gs_echo.PagedExpandResponse.deserialize,\n )\n return self._stubs['paged_expand']", "def get_expansion(self, prec, padic_num):\n padic_expansion = list(padic_num.expansion())\n if isinstance(padic_expansion[0], list):\n return padic_expansion\n else:\n # Eistenstein extension case.\n padic_list = []\n for i in range(0, len(padic_expansion), 2):\n term = [padic_expansion[i]]\n padic_list.append(term)\n\n # Fill the rest of the list to the sufficient precision.\n for i in range(prec - len(padic_list)):\n padic_list.append([]) \n return padic_list", "def call(self, **kwargs):\n return getattr(self.resource, self.function)(**kwargs)", "def expand_call(kargs):\n func = kargs['func']\n del kargs['func']\n out = func(**kargs)\n return out", "def _dispatch(cmd, **kwargs):\n return cmd(**kwargs)", "def __call__(self):\n from ..registered_instructions import get_instruction\n return get_instruction(self.name)", "def test_expand_func(self):\n self.assertEqual([\"test\", [\"a1\", \"\\\"a b\\\"\", \"f(w,x)\"]],\n grammar._EXPAND_FUNC.parseString(\"$test(a1, \\\"a b\\\", f(w,x))\").asList())" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Imports a database from the tmp directory. Use very carefully! (or just to remind yourself how to import mysql data) Modify this code directly if needed, as it hardwires the username, db name and filename.
def mysql_import(): # first make another copy of the db run("mysqldump -u database_user database_name -p > ~/tmp/exported_db_temp.sql") # then import from the backup run("mysql -u database_user -p -D database_name < ~/tmp/exported_db.sql")
[ "def import_remote_db():\n run_export_db()\n run_download_db()\n drop_db()\n create_db()\n import_db()\n reset_passwords()", "def import_database(filename):\n local('pg_restore -O -c -U pyconsg -d pyconsg {0}'.format(filename))", "def import_db(self, mysql_dump):\n print('Importing mysql database...')\n\n # find/replace in sql database if necessary\n raw_input('find and replace not yet implemented... Press enter to continue.')\n\n # TODO: error handling\n system(\n 'mysql ' +\n '-u ' + self.mysql['user'] +\n ' -p' + self.mysql['password'] + ' ' + self.domain +\n ' < ' + mysql_dump\n )\n print('Import complete.')", "def loadStatTrackerDB():\n \n command = \"mysql -u%s -p%s --socket=%s %s < %s\" % (dbConfig[\"user\"], \\\n dbConfig[\"passwd\"], \\\n dbConfig[\"socketFileLocation\"], \\\n dbConfig[\"dbName\"], \\\n input)\n \n result = os.system(command)\n if (result):\n raise RuntimeError, \"Error loading StatTracker data from %s\" % str(file)\n return", "def importToSQLITE(self, db_file, sqlite_db_name):\n\n command = \"{} {} {} {}\".format('cat', db_file, '| sqlite3', sqlite_db_name)\n call(command, shell = True)", "def createTempDb():\n tf = tempfile.NamedTemporaryFile(prefix='csvview',suffix='.db',dir='/tmp')\n dbName = tf.name\n atexit.register( lambda f: f.close(), tf )\n return dbName", "def init_db():\n shutil.rmtree(app.config['DATA_FOLDER'])\n os.makedirs(app.config['DATA_FOLDER'])\n\n shutil.rmtree(app.config['CHECKPOINT_FOLDER'])\n os.makedirs(app.config['CHECKPOINT_FOLDER'])\n\n db = get_db()\n\n with current_app.open_resource('schema.sql') as f:\n db.executescript(f.read().decode('utf8'))", "def load_testdb(c, dbname=\"test_template\", fpath=\"tests/test_db.sql\"):\n default_env = {\n \"PATH\": os.environ[\"PATH\"],\n \"PYTHONPATH\": os.path.abspath(os.path.dirname(__file__)),\n \"LANG\": \"en_US.UTF-8\",\n \"POSTGRES_DB\": dbname,\n \"POSTGRES_HOST\": \"localhost\",\n \"POSTGRES_USER\": \"postgres\",\n \"POSTGRES_PORT\": \"5432\",\n }\n\n env = os.environ\n env.update(default_env)\n\n psql_command = (\n f'psql -h {default_env[\"POSTGRES_HOST\"]} '\n f'-p {default_env[\"POSTGRES_PORT\"]} '\n f'-U {default_env[\"POSTGRES_USER\"]}'\n )\n\n c.run(f'{psql_command} postgres -c \"drop database if exists {dbname}\";', env=env)\n c.run(f'{psql_command} postgres -c \"create database {dbname}\";', env=env)\n c.run(f\"{psql_command} {dbname} < {fpath}\", env=env)\n # update test db to the latest migrations\n c.run(f\"alembic -c ./alembic.ini upgrade head\", env=env)", "def do_import(f, db, table, drop=False, create=True, progress=None):\n cur = db.cursor()\n\n if drop:\n create = True # this makes no sense otherwise\n try:\n cur.execute(\"DROP TABLE {0}\".format(table))\n except sqlite3.OperationalError, e:\n pass # no such table, ignore\n pass\n\n csvr = csv.DictReader(f)\n # DictReader will read the list of field names from the first line\n columns = [ \"{0} VARCHAR(1024)\".format(c) for c in csvr.fieldnames ]\n # TODO: Different database drivers use different syntax\n qmarks = [ \"?\" for c in csvr.fieldnames ]\n insert = \"INSERT INTO {0} VALUES ({1})\".format(table, ', '.join(qmarks))\n\n if create:\n query = \"CREATE TABLE {0} ({1})\".format(table, ', '.join(columns))\n cur.execute(query)\n pass\n\n count = 0\n for row in csvr:\n count += 1\n if progress is not None and (count % 10000) == 0:\n progress(count)\n pass\n values = []\n for f in csvr.fieldnames:\n values.append(row[f])\n pass\n cur.execute(insert, values)\n pass\n db.commit()\n if progress is not None:\n progress(count)\n pass", "def connect_db_and_load_data(cls):\n db.connect()\n db.create_tables([Product], safe=True)\n load_data(transform_data('./inventory.csv'))", "def db():\n\n db_obj = dump_db.DumpDB()\n db_obj.load_from_csv(CONF.BACKUP_DB_PATH)\n return db_obj", "def test_init_db(self, tmpdir):\n if ENV_DATABASE in os.environ:\n del os.environ[ENV_DATABASE]\n filename = '{}/my.db'.format(str(tmpdir))\n connect_string = 'sqlite:{}'.format(filename)\n os.environ[ENV_DATABASE] = connect_string\n # Call the init_db method to create all database tables\n DatabaseDriver.init_db()\n # Connect to the database and ensure we can run a simple query without\n # and SQL error\n con = DatabaseDriver.connect()\n assert con.execute('SELECT * from team').fetchone() is None\n con.close()", "def load_database(db_session, fixture):\n # TODO: the fixture file path controls\n\n # load the fixture\n datas = pickle.loads(fixture)\n db_session.add_all(datas)\n db_session.commit()\n print \"load database ok\"", "def restore_db(path_to_script):\n pass", "def load_db():\n return", "def test_load_database_after_pickling(tmp_path):\n path = tmp_path / \"test.db\"\n database = load_database(path_or_database=path, fast_logging=False)\n database = pickle.loads(pickle.dumps(database))\n assert hasattr(database.engine, \"connect\")", "def init_db():\n db = MySQLdb.connect(host='localhost',\n port=3306,\n user=sys.argv[1],\n passwd=sys.argv[2],\n db=sys.argv[3])\n return db", "def run_upload_db(filename=None):\n if not filename:\n filename = settings.DB_DUMP_FILENAME\n if env.key_filename:\n ssh = settings.PROJECT_NAME\n else:\n ssh = '{0}@{1}'.format(env.user, env.host_string)\n local('scp {0} {1}:{3}'.format(\n filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))", "def _load_data(\n data_dir: Path,\n script_dir: Path,\n user: str = PG_USER,\n password: str = PG_PASS,\n host: str = PG_HOST,\n port: int = PG_PORT,\n database: str = IBIS_TEST_POSTGRES_DB,\n **_: Any,\n ) -> None:\n with open(script_dir / 'schema' / 'postgresql.sql') as schema:\n engine = init_database(\n url=sa.engine.make_url(\n f\"postgresql://{user}:{password}@{host}:{port:d}/{database}\"\n ),\n database=database,\n schema=schema,\n isolation_level='AUTOCOMMIT',\n recreate=False,\n )\n\n tables = list(TEST_TABLES) + ['geo']\n with engine.begin() as con, con.connection.cursor() as cur:\n for table in tables:\n # Here we insert rows using COPY table FROM STDIN, using\n # psycopg2's `copy_expert` API.\n #\n # We could use DataFrame.to_sql(method=callable), but that\n # incurs an unnecessary round trip and requires more code: the\n # `data_iter` argument would have to be turned back into a CSV\n # before being passed to `copy_expert`.\n sql = f\"COPY {table} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')\"\n with data_dir.joinpath(f'{table}.csv').open('r') as file:\n cur.copy_expert(sql=sql, file=file)\n\n con.exec_driver_sql(\"VACUUM FULL ANALYZE\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Set up an ssh shortcut. Called by setup_ssh_keys. You can call it separately if desired.
def update_ssh_shortcut(output_keyfile, quickname=None): if quickname: with settings(warn_only=True): local("touch $HOME/.ssh/config") local(r"echo '' >> $HOME/.ssh/config") local(r"echo 'Host %s' >> $HOME/.ssh/config" % quickname) local(r"echo '' >> $HOME/.ssh/config") local(r"echo 'Hostname %s' >> $HOME/.ssh/config" % host_name) local(r"echo 'User %s' >> $HOME/.ssh/config" % user) local(r"echo 'IdentityFile ~/.ssh/%s' >> $HOME/.ssh/config" % output_keyfile) local(r"echo 'ServerAliveCountMax 3' >> $HOME/.ssh/config") local(r"echo 'ServerAliveInterval 10' >> $HOME/.ssh/config")
[ "def setup_ssh(public_key: Union[str, List[str]], mattermost_webhook_address: Optional[str] = None):\n public_key = parse_public_key(public_key)\n\n if not check_gpu_available():\n return # pragma: no cover\n\n # Config password for root user\n msg = \"\"\n msg = config_root_password(msg)\n\n # Config ssh server\n msg = config_ssh_server(public_key, msg)\n\n # Config other common tool and library\n install_common_tool()\n\n # Config Argo Tunnel\n msg, ssh_command, ssh_config, hostname = config_argo_tunnel(msg)\n\n # Send notification to Mattermost\n if mattermost_webhook_address is not None:\n spec = get_instance_info()\n spec['ssh_command'] = ssh_command\n spec['ssh_config'] = ssh_config\n spec['hostname'] = hostname\n send_notification_to_mattermost(mattermost_webhook_address, spec)\n\n print(msg)", "def createPuttyShortcuts(folder = \"Putty Connections\"):\n desktop = winshell.desktop()\n cpath = os.path.join(desktop, folder)\n\n if not os.path.exists(cpath):\n os.mkdir(cpath)\n \n for c in getPuttyConnections():\n if c.strip() != \"\":\n path = os.path.join(cpath, c + \".lnk\")\n target = \"C:\\\\Program Files (x86)\\\\PuTTY\\\\putty.exe\"\n args = \"-load \" + c\n wdir = \"C:\\\\Program Files (x86)\\PuTTY\\\\\"\n try:\n createShortcut(path, target, wdir = wdir, args = args)\n except Exception, e:\n print \"could not create shortcut for \" + c", "def setup_ssh_keys(output_keyfile=\"id_rsa\", ssh_type=\"rsa\", quickname=None):\n with settings(warn_only=True):\n local(\"mkdir -p $HOME/.ssh\")\n with cd(\"$HOME/.ssh\"):\n local(\"ssh-keygen -t %s -f %s\" % (ssh_type, output_keyfile))\n for host in env.hosts:\n local(\"scp %s.pub %s:temp_id_key.pub\" % (output_keyfile, host))\n with settings(warn_only=True):\n run(\"mkdir -p $HOME/.ssh\")\n run(\"cat $HOME/temp_id_key.pub >> ~/.ssh/authorized_keys\")\n run(\"rm $HOME/temp_id_key.pub\")\n run(\"chmod 600 $HOME/.ssh/authorized_keys\")\n run(\"chmod 700 $HOME/.ssh\")\n run(\"chmod go-w $HOME\")\n if quickname:\n update_ssh_shortcut(output_keyfile, quickname)", "def cmd_setup_ssh(public_key_file):\n\n def add_helper(key_file):\n if exists(key_file):\n try:\n fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]\n key = open(key_file, 'r').read().strip()\n echo(\"Adding key '{}'.\".format(fingerprint), fg='white')\n setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)\n except Exception:\n echo(\"Error: invalid public key file '{}': {}\".format(key_file, format_exc()), fg='red')\n elif public_key_file == '-':\n buffer = \"\".join(stdin.readlines())\n with NamedTemporaryFile(mode=\"w\") as f:\n f.write(buffer)\n f.flush()\n add_helper(f.name)\n else:\n echo(\"Error: public key file '{}' not found.\".format(key_file), fg='red')\n\n add_helper(public_key_file)", "def configure_quick_launch_shortcut(self, add_shortcut = True):\n self._configure_shortcut(2, add_shortcut)", "def __add_ssh_proxy(self):\n self.sendline(\"mkdir -p ~/.ssh\")\n self.sendline(\"cat > ~/.ssh/config << EOF\")\n self.sendline(f\"Host {self.lan_gateway}\")\n self.sendline(\"StrictHostKeyChecking no\")\n self.sendline(\"UserKnownHostsFile=/dev/null\")\n self.sendline(\"\")\n self.sendline(\"Host krouter\")\n self.sendline(f\"Hostname {self.lan_gateway}\")\n self.sendline(\"StrictHostKeyChecking no\")\n self.sendline(\"UserKnownHostsFile=/dev/null\")\n self.sendline(\"EOF\")\n self.expect(self.prompt)", "def __setup_passwordless_ssh():\n import params\n utils.exec_hawq_operation(\"ssh-exkeys\", format('-f {hawq_hosts_file} -p {hawq_password!p}', hawq_hosts_file=hawq_constants.hawq_hosts_file, hawq_password=params.hawq_password))\n\n File(hawq_constants.hawq_hosts_file, action='delete')", "def ssh_executable(self):", "def configure_desktop_shortcut(self, add_shortcut = True):\n self._configure_shortcut(0, add_shortcut)", "def setupSSH(key_rsa_path, key_append_path, key_gen_cmd, HostList):\n # Generate SSH key on localhost\n LocalKey = getLocalKey(key_gen_cmd, key_rsa_path)\n\n # Setup passwordless SSH with each of the specified machines\n for i in HostList:\n if i[0] != 'localhost':\n\n box_ip = i[1]\n user = i[2]\n pwd = i[3]\n\n out = subprocess.Popen(\"echo $\" + user, shell=True,\n stdout=subprocess.PIPE)\n box_user = out.stdout.read().rstrip('\\n')\n out = subprocess.Popen(\"echo $\" + pwd, shell=True,\n stdout=subprocess.PIPE)\n box_pwd = out.stdout.read().rstrip('\\n')\n try:\n\n RemoteKey = getRemoteKey(key_gen_cmd, key_rsa_path, box_ip,\n box_user, box_pwd)\n appendLocalKeyInRemote(LocalKey, key_append_path, box_ip,\n box_user, box_pwd)\n appendRemoteKeyInLocal(RemoteKey, key_append_path, box_ip)\n logging.info(\"Passwordless SSH has been setup b/w \\\n localhost & %s\", box_ip)\n\n except (paramiko.SSHException, paramiko.BadHostKeyException,\n paramiko.AuthenticationException, socket.error) as e:\n logging.info(\"Passwordless SSH setup failed b/w localhost & %s \\\n with %s, please verify host connectivity\", box_ip, e)", "def installShortcutKeys(self):\r\n #TODO: Deal with commented out shortcuts\r\n Key_Escape = 0x01000000 # not in PythonQt\r\n Key_Space = 0x20 # not in PythonQt\r\n self.shortcuts = []\r\n keysAndCallbacks = (\r\n # ('z', self.toolsBox.undoRedo.undo),\r\n # ('y', self.toolsBox.undoRedo.redo),\r\n ('h', self.toggleCrosshair),\r\n (Key_Escape, lambda : self.editor.setActiveEffect(None)),\r\n ('e', lambda : self.editor.setActiveEffect(self.editor.effectByName('Erase'))),\r\n ('p', lambda : self.editor.setActiveEffect(self.editor.effectByName('Paint'))),\r\n ('d', lambda : self.editor.setActiveEffect(self.editor.effectByName('Draw'))),\r\n ('w', lambda : self.editor.setActiveEffect(self.editor.effectByName('Wand'))),\r\n ('r', lambda : self.editor.setActiveEffect(self.editor.effectByName('Rectangle'))),\r\n # (Key_Space, self.toolsBox.toggleFloatingMode),\r\n )\r\n for key,callback in keysAndCallbacks:\r\n shortcut = qt.QShortcut(slicer.util.mainWindow())\r\n shortcut.setKey( qt.QKeySequence(key) )\r\n shortcut.connect( 'activated()', callback )\r\n self.shortcuts.append(shortcut)", "def configure_ssh_key(ssh):\n if not ssh: # Nothing to do\n yield\n return\n\n try:\n os.makedirs(os.path.join(os.environ[HOME_ENV], '.ssh'))\n except OSError as exc:\n logging.info('cannot create $HOME/.ssh, continue : %s', exc)\n except KeyError as exc:\n logging.info('$%s does not exist, continue : %s', HOME_ENV, exc)\n\n # Create a script for use with GIT_SSH, which defines the program git uses\n # during git fetch. In the future change this to GIT_SSH_COMMAND\n # https://superuser.com/questions/232373/how-to-tell-git-which-private-key-to-use\n with tempfile.NamedTemporaryFile(prefix='ssh', delete=False) as fp:\n fp.write(\n '#!/bin/sh\\nssh -o StrictHostKeyChecking=no -i \\'%s\\' -F /dev/null \"${@}\"\\n' % ssh)\n try:\n os.chmod(fp.name, 0o500)\n had = 'GIT_SSH' in os.environ\n old = os.getenv('GIT_SSH')\n os.environ['GIT_SSH'] = fp.name\n\n yield\n\n del os.environ['GIT_SSH']\n if had:\n os.environ['GIT_SSH'] = old\n finally:\n os.unlink(fp.name)", "def setupSSH():\r\n\r\n\r\n subprocess.call([\"ssh-keygen -f ~/.ssh/id_rsa -t rsa -P '' \"],shell=True)\r\n subprocess.call([\"cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys\"], shell=True)\r\n\r\n for node in workerNodes:\r\n\r\n print (\"Working on Node: {}\".format(node))\r\n #########\r\n subprocess.call([\"cat ~/.ssh/id_rsa.pub | ssh {} 'cat >> ~/.ssh/authorized_keys'\".format(node)],shell=True)\r\n #########\r\n subprocess.call([\"scp ~/.ssh/config {}:~/.ssh/\".format(node)],shell=True)\r\n\r\n subprocess.call([\"scp ~/.ssh/{} {}:~/.ssh/\".format(nameofPemKEy, node)], shell=True)\r\n subprocess.call([\"sudo cp /etc/hosts ~\"], shell=True)\r\n subprocess.call([\"sudo chown ubuntu:ubuntu ~/hosts\"],shell=True)\r\n subprocess.call([\"scp ~/hosts {}:~\".format(node)], shell=True)\r\n subprocess.call([\"ssh {} 'sudo mv ~/hosts /etc/hosts' \".format(node)], shell=True)", "def _access_config(self):\n LOG.info('Setup ssh/local user access')\n self.runchroot([\n 'pacman',\n '-Syy',\n '--noconfirm',\n 'openssh'\n ])\n self.runchroot([\n 'systemctl',\n 'enable',\n 'sshd.service'\n ])\n self.runchroot([\n 'systemctl',\n 'enable',\n 'getty@ttyS0.service'\n ])\n if self.domain.password:\n self.runchroot([\n 'usermod',\n '-p',\n self.domain.password,\n 'root'\n ])\n if self.domain.sshkeys:\n authorized_keys = []\n for key, value in self.domain.sshkeys.items():\n authorized_keys.append(\n \"%s %s %s\" % (value['type'], value['key'], key)\n )\n os.mkdir('%s/root/.ssh' % self.target)\n self.writetargetfile(\n '/root/.ssh/authorized_keys',\n authorized_keys\n )", "def create_ssh_key():\n SSHkey().create()", "def addShortcut(widget, name, key, func):\n settings = QtCore.QSettings()\n action = QtWidgets.QAction(name)\n action.triggered.connect(func)\n if not settings.value(\"shortcuts/\" + name):\n settings.setValue(\"shortcuts/\" + name, key)\n action.setShortcut(QtGui.QKeySequence(settings.value(\"shortcuts/\"+ name)))\n widget.addAction(action)\n if not hasattr(widget, \"_qactions\"):\n widget._qactions = []\n widget._qactions.append(action)", "def main():\n defaultPrint('creating hotkeys...')\n\n for hotKey in HOTKEYS_ITEMS:\n setHotkey(hotKey)\n # save hotkeys pref files\n cmds.savePrefs(hotkeys=True)", "def create_shortcut(target_dir, shortcut_name, shortcut_description=None):\n\n #shortcut = pythoncom.CoCreateInstance(\n # shell.CLSID_ShellLink,\n # None,\n # pythoncom.CLSCTX_INPROC_SERVER,\n # shell.IID_IShellLink\n #)\n #shortcut.SetPath(target_dir)\n #shortcut.SetDescription(shortcut_description)\n #shortcut.SetIconLocation(sys.executable, 0)\n\n desktop_path = shell.SHGetFolderPath(0, shellcon.CSIDL_DESKTOP, 0, 0)\n print desktop_path\n\n #persist_file = shortcut.QueryInterface(pythoncom.IID_IPersistFile)\n #print persist_file\n \n target = open (os.path.join(desktop_path, \"IAG ToolKIT.bat\"), 'w') ## a will append, w will over-write \n target.write(\"START %s runserver\" % target_dir)\n target.write(\"\\n\")\n target.write(\"ping 192.0.2.2 -n 1 -w 2000 > nul\")\n target.write(\"\\n\")\n target.write(\"START http://127.0.0.1:8000\")\n target.close()\n # persist_file.Save(os.path.join(desktop_path, \"%s.lnk\" % shortcut_name), 0)", "def task_install_ssh_key():\n return sequence([\n sudo_from_args(['cp', '.ssh/authorized_keys',\n '/root/.ssh/authorized_keys']),\n ])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generate a new SSH key and deliver it to the server. If quickname is provided, also set up an ssh shortcut. Use this to enable passwordless access to webfaction.
def setup_ssh_keys(output_keyfile="id_rsa", ssh_type="rsa", quickname=None): with settings(warn_only=True): local("mkdir -p $HOME/.ssh") with cd("$HOME/.ssh"): local("ssh-keygen -t %s -f %s" % (ssh_type, output_keyfile)) for host in env.hosts: local("scp %s.pub %s:temp_id_key.pub" % (output_keyfile, host)) with settings(warn_only=True): run("mkdir -p $HOME/.ssh") run("cat $HOME/temp_id_key.pub >> ~/.ssh/authorized_keys") run("rm $HOME/temp_id_key.pub") run("chmod 600 $HOME/.ssh/authorized_keys") run("chmod 700 $HOME/.ssh") run("chmod go-w $HOME") if quickname: update_ssh_shortcut(output_keyfile, quickname)
[ "def update_ssh_shortcut(output_keyfile, quickname=None):\n if quickname:\n with settings(warn_only=True):\n local(\"touch $HOME/.ssh/config\")\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Host %s' >> $HOME/.ssh/config\" % quickname)\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Hostname %s' >> $HOME/.ssh/config\" % host_name)\n local(r\"echo 'User %s' >> $HOME/.ssh/config\" % user)\n local(r\"echo 'IdentityFile ~/.ssh/%s' >> $HOME/.ssh/config\" % output_keyfile)\n local(r\"echo 'ServerAliveCountMax 3' >> $HOME/.ssh/config\")\n local(r\"echo 'ServerAliveInterval 10' >> $HOME/.ssh/config\")", "def install_key():\n run('rm -rf ~/.ssh ; mkdir -p ~/.ssh')\n put('fabric/concat.pub', '~/.ssh/authorized_keys')\n put('fabric/id_rsa.pub', '~/.ssh')\n put('fabric/id_rsa', '~/.ssh')\n run('chmod 600 ~/.ssh/id_rsa')", "def create_ssh_key():\n SSHkey().create()", "def GenSshKey(user):\n key_name = 'daisy-test-key-' + str(uuid.uuid4())\n Execute(\n ['ssh-keygen', '-t', 'rsa', '-N', '', '-f', key_name, '-C', key_name])\n with open(key_name + '.pub', 'r') as original:\n data = original.read().strip()\n return \"%s:%s\" % (user, data), key_name", "def install_key(key=None):\n with settings(warn_only=True):\n run('mkdir -p .ssh')\n if not key:\n put('~/.ssh/id_rsa.pub', '.ssh/authorized_keys')\n else:\n put('~/.ssh/%s' % key, '.ssh/authorized_keys')", "def new_ssh_key(self, secret_name):\n with self.__client as client:\n return self.__exec_cmd(client, Command.NEW_SSH_KEY, secret_name)", "def install_key():\n addKey()", "def create_deploy_keys():\n # TODO if the deployment key is not present, offer to generate one\n # # new way (generate if needed):\n # local(\"ssh-keygen -b 8192 -f deploy_rsa -t rsa -N ''\")", "def create_key(name):\n\tinput_data = GPG.gen_key_input(\n\t\tkey_type='RSA',\n\t\tkey_length='1024',\n\t\tname_real='PGP File System',\n\t\tname_comment=create_comment(name),\n\t\tname_email='placeholder@email.address'\n\t)\n\treturn GPG.gen_key(input_data)", "def create_key_pair(self, name: str = \"ec2-keypair\") -> None:\n ec2 = self.session.resource(\"ec2\")\n with open(f\"{os.path.join('../tmp', name)}.pem\", \"w\") as outfile:\n key_pair = ec2.create_key_pair(KeyName=name)\n key_pair_out = str(key_pair.key_material)\n outfile.write(key_pair_out)", "def generate_ssh_keypair():\n key = RSA.generate(2048)\n pubkey = key.publickey()\n return SSHKeyPair(pubkey.exportKey('OpenSSH').decode(), key.exportKey('PEM').decode())", "def ssh_remote_keygen():\n dsa_filename = os.path.join(_get_remote_home_dir(), '.ssh', 'id_dsa')\n run(\"ssh-keygen -t dsa -P '' -f %s\" % dsa_filename)\n ssh_remote_pubkey()", "def generate_ssh_keys(self):\n self.keypair = MySSHKeyPair.create(\n self.api_client,\n name=random_gen() + \".pem\",\n account=self.account.user[0].account,\n domainid=self.account.domainid)\n\n self.cleanup.append(self.keypair)\n self.debug(\"Created keypair with name: %s\" % self.keypair.name)\n self.debug(\"Writing the private key to local file\")\n pkfile = tempfile.gettempdir() + os.sep + self.keypair.name\n self.keypair.private_key_file = pkfile\n self.tmp_files.append(pkfile)\n self.debug(\"File path: %s\" % pkfile)\n with open(pkfile, \"w+\") as f:\n f.write(self.keypair.privatekey)\n os.chmod(pkfile, 0o400)\n\n return self.keypair", "def create_key ():", "def create_ssh_key_file(username: str, ssh_key: bytes, ip_address: str):\n\n if not os.path.exists(\"./ansible/keys\"):\n os.mkdir(\"./ansible/keys\")\n\n with open(f\"./ansible/keys/admin_{ip_address}.pem\", \"w\") as ssh_key_file:\n ssh_key_file.write(ssh_key.decode())\n\n os.system(f\"chmod 400 ./ansible/keys/admin_{ip_address}.pem\")", "def cmd_setup_ssh(public_key_file):\n\n def add_helper(key_file):\n if exists(key_file):\n try:\n fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]\n key = open(key_file, 'r').read().strip()\n echo(\"Adding key '{}'.\".format(fingerprint), fg='white')\n setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)\n except Exception:\n echo(\"Error: invalid public key file '{}': {}\".format(key_file, format_exc()), fg='red')\n elif public_key_file == '-':\n buffer = \"\".join(stdin.readlines())\n with NamedTemporaryFile(mode=\"w\") as f:\n f.write(buffer)\n f.flush()\n add_helper(f.name)\n else:\n echo(\"Error: public key file '{}' not found.\".format(key_file), fg='red')\n\n add_helper(public_key_file)", "def dist_keys():\n SSH_KEYFILE=\"/home/%s/.ssh/id_dsa.pub\" % env.user\n AUTH_KEYS=\"/home/%s/.ssh/authorized_keys\" % env.user\n GROUP=\"sysadmins\"\n print \"distributing your ssh key to %s\" % env.host\n if os.path.exists(SSH_KEYFILE):\n print \"%s exists, reading it\" % SSH_KEYFILE\n f = open(SSH_KEYFILE, \"r\")\n key = f.read()\n f.close()\n \n if files.exists(AUTH_KEYS):\n # possible to have 2 keys that are the same for the first 75 chars?\n if files.contains(key[:75], AUTH_KEYS):\n print \"your key is already in the remote authorized_keys\"\n else:\n print \"%s exists, appending your key\" % AUTH_KEYS\n files.append(AUTH_KEYS,key)\n else:\n run(\"mkdir -p /home/%s/.ssh\" % env.user)\n run(\"chown %s:%s /home/%s/.ssh\" % (env.user, GROUP, env.user))\n put(SSH_KEYFILE, AUTH_KEYS, mode=0600)\n \n run(\"chmod 700 /home/%s/.ssh\" % env.user)\n run(\"chmod 600 %s\" % AUTH_KEYS)", "def install_ssh_key_from_string(foreman_ssh_key):\n print_generic(\"Installing Remote Execution SSH key for user %s\" % options.remote_exec_user)\n foreman_ssh_key = foreman_ssh_key.strip()\n userpw = pwd.getpwnam(options.remote_exec_user)\n if not options.remote_exec_authpath:\n options.remote_exec_authpath = os.path.join(userpw.pw_dir, '.ssh', 'authorized_keys')\n foreman_ssh_dir = os.path.join(userpw.pw_dir, '.ssh')\n if not os.path.isdir(foreman_ssh_dir):\n os.mkdir(foreman_ssh_dir, OWNER_ONLY_DIR)\n os.chown(foreman_ssh_dir, userpw.pw_uid, userpw.pw_gid)\n elif os.path.exists(options.remote_exec_authpath) and not os.path.isfile(options.remote_exec_authpath):\n print_error(\"Foreman's SSH key not installed. You need to provide a full path to an authorized_keys file, you provided: '%s'\" % options.remote_exec_authpath)\n return\n if os.path.isfile(options.remote_exec_authpath):\n if foreman_ssh_key in open(options.remote_exec_authpath, 'r').read():\n print_generic(\"Foreman's SSH key already present in %s\" % options.remote_exec_authpath)\n return\n output = os.fdopen(os.open(options.remote_exec_authpath, os.O_WRONLY | os.O_CREAT, OWNER_ONLY_FILE), 'a')\n output.write(\"\\n\")\n output.write(foreman_ssh_key)\n os.chown(options.remote_exec_authpath, userpw.pw_uid, userpw.pw_gid)\n print_generic(\"Foreman's SSH key added to %s\" % options.remote_exec_authpath)\n output.close()", "def makeKey(self, name, pin):\r\n return name + \"/\" + pin" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Installs pip itself if needed.
def install_pip(): with settings(warn_only=True): run('mkdir $HOME/lib/python2.7') run('easy_install-2.7 pip')
[ "def install_pip():\n if sys.version_info[0] < 3: # We are running python 2.x\n cmd = ['python -m easy_install pip']\n process = subprocess.Popen(cmd, shell=True)\n process.wait()\n process.poll()\n if process.returncode is 0:\n print('Successfully installed pip')\n return 0\n else:\n print('Error installing pip!')\n return -1", "def install_pip():\n print \"Install PIP\"\n paths = sys.path\n for path in paths:\n p = path.split(\"\\\\\")\n if len(p) == 3 and \"Python\" in p[1] and \"ArcGIS\" in [2]:\n\n process = subprocess.Popen([os.path.join(path, \"python.exe\"), os.path.join(dir_name, \"get-pip.py\")],\n stdout=subprocess.PIPE)\n out, err = process.communicate()\n print out", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def install(packages):\n global pip_install_argument\n for package in packages:\n pip([pip_install_argument, package])", "def pip_install(package: str):\n return subprocess.check_call([\"pip\", \"install\", package])", "def install_pkg(pip, package):\n if not os.path.isdir(INSTALL_DIR):\n os.makedirs(INSTALL_DIR)\n pip_cmds = ['mayapy', pip, 'install', package, '--target', INSTALL_DIR, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n print(\"Successfully installed package {}\".format(package))\n if installer.returncode != 0:\n raise RuntimeError(\"Failed to install package: {}, please check logs in: {}\".format(package, DEPENDENCY_INSTALL_LOG))", "def ensure_pip(framework_path, version):\n python_path = os.path.join(\n framework_path, \"Versions\", version, \"bin/python\" + version\n )\n if not os.path.exists(python_path):\n print(\"No python at %s\" % python_path, file=sys.stderr)\n return\n cmd = [python_path, \"-s\", \"-m\", \"ensurepip\"]\n print(\"Ensuring pip is installed...\")\n subprocess.check_call(cmd)", "def install(self):\n other_args = list(requirement_args(self._argv, want_other=True))\n archive_path = join(self._temp_path, self._downloaded_filename())\n # -U so it installs whether pip deems the requirement \"satisfied\" or\n # not. This is necessary for GitHub-sourced zips, which change without\n # their version numbers changing.\n run_pip(['install'] + other_args + ['--no-deps', '-U', archive_path])", "def update_dependencies():\n pip = env.virtualenv.child('bin', 'pip')\n reqs = env.code_dir.child('deploy-requirements.txt')\n sudo('%s -q install -U pip' % pip)\n sudo('%s -q install -r %s' % (pip, reqs))", "def _install(self, requirement):\n\n import pip\n\n self.log.debug('Installing %s' % requirement)\n\n if pip.main(['install', requirement]) == 0:\n self.log.debug('Successfully installed %s' % requirement)\n\n else:\n self.log.debug('Failed to \"pip install %s\"' % requirement)", "def install_pip(py_bin):\n get_pip = path_from_url('get-pip.py', GET_PIP_URL, always_download=True)\n check_call([py_bin, get_pip])\n pip_dir = dirname(py_bin)\n ver = get_version(py_bin)\n pip_bin = pjoin(pip_dir, 'pip' + short_version(ver))\n assert exists(pip_bin)\n return pip_bin", "def pipupdate():\n\n packages = [d for d in pkg_resources.working_set]\n subprocess.call('pip install --upgrade ' + ' '.join(packages))", "def upgrade_pip():\n out_info(\"Upgrading pip...\")\n pipexe = [sys.executable, \"-m\", \"pip\"]\n pipexe.extend([\"install\", \"--no-cache-dir\", \"-qq\", \"--upgrade\"])\n if not IS_ADMIN and not IS_VIRTUALENV:\n pipexe.append(\"--user\")\n pipexe.append(\"pip\")\n run(pipexe)", "def pip_requirements():\n\n with cd(env.config.ApiServer.document_root):\n run(\"./env/bin/pip install --requirement ./scripts/requirements.pip\")", "def _install_pip_dependency(dep: str, verbose: bool = True):\n cmd = [\n sys.executable,\n \"-m\",\n \"pip\",\n \"--disable-pip-version-check\",\n \"install\",\n dep,\n ]\n if verbose:\n logger.info(f\"running: {' '.join(cmd)}\")\n results = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\n read_pipe(cmd, results.stdout, verbose)\n read_pipe(\n cmd, results.stderr, verbose=True, log=logger.error, collect=True,\n )", "def check_pip():\n try:\n import pip\n except ImportError:\n out_error(\"Import pip failed. Please Install python3-pip \"\n \"and try again\")\n exit(1)\n upgrade_pip()\n importlib.reload(pip)\n pip_version = pip.__version__\n del pip\n\n get_installed_packages()\n out_info(\"Installed pip: {}\".format(pip_version))", "def reinstall() -> None:\n pipenv.remove()\n install()", "def check_pip(env: Environment) -> None:\n dependencies = env.dependencies.get(\"conda\", {})\n if \"pip\" not in dependencies:\n raise PipInstallError(\"Must have pip installed to install pip packages\")", "def install():\n import subprocess\n import sys\n subprocess.check_call([sys.executable, \"-m\", \"ensurepip\"])\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"--upgrade\", \"pip\"])\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"mediapipe\"])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a new git repo on the server (do not include the .git ending in git_repo_name)
def create_prod_git_repo(git_repo_name): with cd(git_dir): run("git init --bare %s.git && cd %s.git && git config http.receivepack true" % (git_repo_name,git_repo_name))
[ "def cmd_create(self):\n self.repo.create()\n\n # Add .gitignore.\n self.repo.add_files({'.gitignore': '.swp\\n'}, FIRST_COMMIT_MSG)\n\n # Create the etc and timestamps branches.\n self.repo.checkout('etc', create=True)\n self.repo.checkout('timestamps', create=True)\n\n self.repo.checkout('master')\n self.repo.init()\n self.update_repository()\n print('Git repository created at %s' % self.repodir)", "def create_repo():\n db.session.add(\n Repo(\n name='repo_name',\n url='https://github.com/user/repo',\n github_repo_id=default_repo_id\n )\n )", "def create_repo(self, name):\n self.repo = self.org.create_repo(name, auto_init=True, private=True)", "def create_repo(repo_path):\n\n if repo_path is None:\n repo_path = os.getcwd()\n\n return Repository(repo_path)", "def command_new_repo(self):\n repoinit.new_repo(*self.args())", "def _create_github_repo(self):\n\n repo_dir = join(self.temp_dir, 'repo')\n subprocess.check_output(['git', 'init', repo_dir])\n\n subprocess.check_output(\n ['git', 'config', 'user.email', os.environ['GIT_EMAIL']],\n cwd=repo_dir\n )\n subprocess.check_output(\n ['git', 'config', 'user.name', os.environ['GIT_NAME']],\n cwd=repo_dir\n )\n\n content = statiki.get_travis_files_content(TEST_REPO, 'BOGUS', {})\n\n for info in content:\n path = join(repo_dir, info['name'])\n with open(path, 'w') as f:\n f.write(info['content'])\n\n subprocess.check_output(['git', 'add', path], cwd=repo_dir)\n subprocess.check_output(\n ['git', 'commit', '-m', '%s' % info['message']], cwd=repo_dir\n )\n\n subprocess.check_output(\n shlex.split('git remote add origin ..'), cwd=repo_dir\n )\n\n return repo_dir", "def create_repository(request):\n logged_in_user = get_logged_in_user(request)\n\n # get params\n name = request.params.get('name')\n windows_path = request.params.get('windows_path')\n linux_path = request.params.get('linux_path')\n osx_path = request.params.get('osx_path')\n\n if name and windows_path and linux_path and osx_path:\n # create a new Repository and save it to the database\n new_repository = Repository(\n name=name,\n windows_path=windows_path,\n linux_path=linux_path,\n osx_path=osx_path,\n created_by=logged_in_user\n )\n DBSession.add(new_repository)\n\n return HTTPOk()", "def create_repository(cfg):\n if os.path.isdir(cfg[\"repo_dir\"]):\n shutil.rmtree(cfg[\"repo_dir\"], ignore_errors=True)\n return Repo.init(cfg[\"repo_dir\"])", "def create_update_gitdir():\n if not os.path.exists(gitdname):\n retcode = subprocess.call('git clone '+repo, shell=True)\n if retcode != 0:\n msg = \"\"\"There was a problem cloning the repo\"\"\"\n raise Exception(msg)\n else: # directory exists, can't pull if you're not on a branch\n # just delete it and clone again. Lazy but clean solution.\n shutil.rmtree(gitdname)\n create_update_gitdir()", "def create_code_repository(CodeRepositoryName=None, GitConfig=None):\n pass", "def clone_git_repo(resource, git_repos_dir):\n # clone to a temporary directory to examine their files\n remote_uri = resource['source']['uri']\n if 'username' in resource['source'] and 'password' in resource['source']:\n remote_uri = re.sub('https://', 'https://'+resource['source']['username']+\":\"+resource['source']['password']+'@', remote_uri)\n\n repo = git.Repo.init(join(git_repos_dir, resource['name']))\n\n try:\n origin = repo.create_remote('origin', remote_uri)\n except:\n origin = repo.remotes.origin\n\n assert origin.exists()\n assert origin == repo.remotes.origin == repo.remotes['origin']\n origin.fetch()\n # Setup a local tracking branch of a remote branch\n branch = 'master'\n if 'branch' in resource['source']:\n branch = resource['source']['branch']\n repo.create_head(branch, origin.refs[branch]) # create local branch branch from remote branch\n repo.heads[branch].set_tracking_branch(origin.refs[branch]) # set local branch to track remote branch\n repo.heads[branch].checkout() # checkout local branch to working tree\n origin.pull()", "def create_github_repo(organisation, repo_name):\n gh = _get_github_instance()\n\n gh_org = gh.get_organization(organisation)\n\n create_args = {'name': repo_name}\n new_repo = gh_org.create_repo(**create_args)\n return new_repo.clone_url", "def create_repo(self, repo):\n return self.user_con.create_repo(repo=repo)", "def create_repo(repo_name, private = 'false', description = ''):\n reqq='orgs/%s/repos' % (org_name)\n url = host + reqq\n data = '{\"name\":\"%s\",\"private\":\"%s\",\"description\":\"%s\"}'\\\n % (repo_name,private,description)\n r = type_connect.post(url, data)\n if (errors_requests(r))&(r.status_code == requests.codes.CREATED):\n create_team(repo_name + ' guests','pull',repo_name)\n create_team(repo_name,'push',repo_name)\n create_team(repo_name + ' owners','admin',repo_name)\n return 0\n\n else:\n if debug:\n print_debug(r) \n return -1", "def create_repository(organization_name, repository_name, \n template_repository=None, travis_ci=True):\n\n # Create a GitHub repository.\n github_client = GitHub(os.environ.get(\"GITHUB_TOKEN\"))\n\n organization = github_client.get_organization(organization_name)\n new_repository = organization.create_repo(repository_name)\n new_repository_uri = \"/\".join([organization_name, repository_name])\n\n # Enable continuous integration.\n if travis_ci:\n enable_continuous_integration(new_repository_uri)\n\n # Copy from a template.\n if template_repository: \n template = github_client.get_repo(template_repository)\n\n temp_folder = mkdtemp()\n subprocess.Popen(\n [\"git\", \"clone\", template.clone_url], cwd=temp_folder).wait()\n\n # Remove .git directory, create new one, add files, commit and push\n commands = [\n \"rm -Rf .git/\",\n \"git init\",\n \"git add -f -A\",\n \"git remote add origin git@github.com:{uri}.git\"\\\n .format(uri=new_repository_uri),\n (\"git\", \"commit\", \"-m\", \"Initial commit using {} template\"\\\n .format(template_repository)),\n \"git push -u origin master\"\n ]\n\n cwd = glob(os.path.join(temp_folder, \"*\"))[0]\n for command in commands:\n args = command.split() if isinstance(command, str) else command\n subprocess.Popen(args, cwd=cwd).wait()\n\n return new_repository", "def create_bare_repo(self, domain):\n\n domain_dir = self.get_domaindir(domain)\n www_dir = domain_dir + \"/www\"\n www_git = domain_dir + \"/www.git\"\n hook_post_receive_file = www_git + \"/hooks/post-receive\"\n\n if not os.path.exists(www_git):\n os.makedirs(www_git)\n git_init_command = \"cd \" + www_git\n git_init_command += \" && git init --bare\"\n subprocess.call(git_init_command, shell=True)\n\n if not os.path.isfile(hook_post_receive_file):\n with open(hook_post_receive_file, \"w\") as file:\n post_receive_content = \"#!/bin/sh\"\n post_receive_content += \"\\nGIT_WORK_TREE=\" + www_dir\n post_receive_content += \" git checkout -f\"\n file.write(post_receive_content)\n subprocess.call(\"chmod +x \" + hook_post_receive_file, shell=True)", "def test_create_repo_git(self):\n protocols = ['git', 'GIT', 'Git', ]\n for protocol in protocols:\n self._repo[ExternalsDescription.PROTOCOL] = protocol\n repo = create_repository(self._name, self._repo)\n self.assertIsInstance(repo, GitRepository)", "def api_repo_create():\n form = NewRepoForm()\n if form.validate_on_submit():\n # On the miniscule chance we generate a non-unique access key, loop and try again.\n success = False\n while not success:\n new_repo = Repo.create(\n pass_phrase = form.pass_phrase.data,\n title = form.title.data,\n description = form.description.data,\n is_private = form.is_private.data\n )\n db.session.add(new_repo)\n try:\n db.session.commit()\n success = True\n except:\n db.session.rollback()\n success = False\n session['working_repo'] = new_repo.access_key\n return jsonify(message='success', created=new_repo.access_key)\n else:\n return jsonify(message=\"failed\", errors=form.errors_to_json()), 400", "def create_git_repository(repo_path, files, idx):\n repository = Repo.init(repo_path, initial_branch=\"main\")\n\n commits = []\n for file, content in files:\n file_path = os.path.join(repo_path, file)\n with open(file_path, \"w\") as f:\n f.write(content)\n repository.index.add(file_path)\n commit = repository.index.commit(f\"Add {file}\")\n commits.append(commit.hexsha)\n\n # Checkout given commit\n repository.git.checkout(commits[idx])\n # Create branch\n repository.create_head(\"new-branch\")\n # Create tag\n repository.create_tag(\"new-tag\")\n # Go back to main branch\n repository.git.checkout(\"main\")\n\n return commits" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds the git repo on the server as the local .git repo's origin, and pushes master to it. (do not include the .git ending in git_repo_name)
def add_prod_repo_as_origin_and_push(git_repo_name): local("""echo '[remote "origin"]' >> .git/config""") local(r"echo ' fetch = +refs/heads/*:refs/remotes/origin/*' >> .git/config") local(r"echo ' url = %s:webapps/git/repos/%s.git' >> .git/config" % (env.hosts[0], git_repo_name)) local(r"git push origin master")
[ "def push(self):\n origin = self.git_repo.remotes.origin\n origin.push()", "def push(self):\n pass # push will happen at workspace level\n # actions.call_subprocess([GIT_EXE_PATH, 'push', 'origin', 'master'],\n # cwd=self.local_path, verbose=self.verbose)", "def push():\n branch = git.current_branch().name\n shell.run('git push -u origin {}'.format(branch))", "def PushToGit(self):\n # TODO: use GitPython instead of calling Git directly.\n call([\"git\", \"add\", \".\"], cwd=self.root)\n call([\"git\", \"commit\", \"-am\", \"Update repo\"], cwd=self.root)\n call([\"git\", \"push\"], cwd=self.root)", "def __gitAddRemote(self):\n self.vcs.gitAddRemote(self.project.getProjectPath())", "def git_commit_and_push(self):\r\n self.repo.git.add('--all')\r\n self.repo.index.commit(\"committed changes\")\r\n origin = self.repo.remote('origin')\r\n origin.push('master')\r\n self.repo.git.add(update=True)\r\n print(\"Commit and push changes - completed....\")", "def git_push():\n\n # get current version\n new_version = get_version()\n values = list(map(lambda x: int(x), new_version.split('.')))\n\n # Push to origin new version and corresponding tag:\n # * commit new version\n # * create tag\n # * push version,tag to origin\n local('git add {}/version.py version.py'.format(project_name))\n\n local('git commit -m \"updated version\"')\n local('git tag {}.{}.{}'.format(values[0], values[1], values[2]))\n local('git push origin --tags')\n local('git push')", "def git_repo_push(cls, git_path, commit_message, remote=\"origin\", branch=\"main\"):\n check_git_status()\n\n repo = Repo(git_path)\n repo.git.add(all=True)\n repo.index.commit(commit_message)\n repo.git.push(remote, branch)", "def push(\n self,\n repo: Repo,\n ) -> None:\n print('pushing branch to remote')\n try:\n # Use local branch name as upstream branch name\n info_list = repo.remotes.origin.push(refspec=repo.active_branch)\n except GitError as e:\n raise utils.EmailToPrError('failed to push branch to remote', e)", "def push(self, base_repo, branch=\"master\"):\n base_repo.push_to(self, branch)", "def repo_push(self):\n\n if self.clowder_repo is None:\n exit_clowder_not_found()\n\n if is_offline():\n print(fmt.offline_error())\n sys.exit(1)\n\n self.clowder_repo.print_status(fetch=True)\n self.clowder_repo.push()", "def _sync_git_origin(cache_dir, site):\n\n git_dir = '--git-dir=' + cache_dir\n\n # silently try to add origin first, to lazily handle a missing case\n GIT.execute([git_dir, 'remote', 'add', 'origin', site],\n cwd=cache_dir, quiet=True)\n\n if not GIT.execute([git_dir, 'remote', 'set-url', 'origin', site],\n cwd=cache_dir):\n err('unable to ensure origin is set on repository cache')\n return False\n\n return True", "def push(self):\n\n repo = ProjectRepo(self.clowder_path, self.remote, self.default_ref)\n repo.push()", "def gitAdd(filename, repo_dir):\n file_path = \"%s/%s\" % (repo_dir, filename)\n git(\"add\", file_path)", "def scm_push(ctx):\n\n for branch in ('develop', 'master'):\n ctx.run('git push origin {}'.format(branch))\n\n ctx.run('git push --tags')", "def init(ctx, url):\n global dotdir\n if dotdir.path.exists():\n logger.critical(\n \"Can not init local repo when {} already exists\".format(\n dotdir.path.as_posix()\n )\n )\n sys.exit(1)\n logger.info(\"Creating local repo: {}\".format(dotdir.path.as_posix()))\n repo = git.Repo.init(dotdir.path.as_posix())\n logger.info(\"Adding remote origin: {}\".format(url))\n origin = repo.create_remote(\"origin\", url)\n logger.info(\"Fetching origin\")\n origin.fetch()\n logger.info(\"Remote pull\")\n origin.pull(origin.refs[0].remote_head)", "def push(self, base_repo, branch: str = \"master\") -> None:\n raise NotImplementedError", "def post(self):\n my_data = json.loads(self.request.body.decode('utf-8'))\n origin = my_data[\"origin\"]\n master = my_data[\"master\"]\n curr_fb_path = my_data[\"curr_fb_path\"]\n my_output = self.git.push(origin, master, curr_fb_path)\n self.finish(my_output)\n print(\"You Pushed\")", "def push(self, from_dir, branch=\"master\"):\n try:\n self.checkout(from_dir, branch)\n origin = self.commit(from_dir, branch)\n origin.push()\n print(\"Push successful\")\n except GitCommandError as e:\n print(\"wrong command! \\n\", str(e))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Updates the apache httpd.conf file to point to the new project instead of the default 'myproject'. This is called as part of clone_into_project, or you can call
def update_conf_file(): filepath = remote_dir + "/apache2/conf/httpd.conf" fabric.contrib.files.sed(filepath, 'myproject', project_name)
[ "def set_apache_config():\n # Delete the existing default config file.\n if exists(\"/etc/apache2/sites-enabled/000-default.conf\"):\n run(\"rm /etc/apache2/sites-enabled/000-default.conf\")\n\n with cd(\"/etc/apache2/sites-enabled/\"):\n run(\"ln -sf /app/workshop-php-bootstrap/support/apache/000-default.conf .\")\n run(\"apachectl restart\")", "def install_apache_conf():\r\n sudo('cp %(repo_path)s/%(project_name)s/configs/%(settings)s/apache %(apache_config_path)s' % env)", "def handle_project_change(self):\n self.update_default_wdir()\n self.load_config()", "def update_webserver_config():\n require('ws_config_path', provided_by=[prod])\n apache_sa = '/etc/apache2/sites-available/'\n apache_se = '/etc/apache2/sites-enabled/'\n nginx_sa = '/etc/nginx/sites-available/'\n nginx_se = '/etc/nginx/sites-enabled/'\n\n sudo('rm %s%s' % (apache_sa, env.project_name))\n sudo('rm %s%s' % (apache_se, env.project_name))\n\n sudo('rm %s%s' % (nginx_sa, env.project_name))\n sudo('rm %s%s' % (nginx_se, env.project_name))\n\n put('%sapache2/sites-available/*' % (env.ws_config_path), apache_sa, use_sudo=True)\n put('%snginx/sites-available/*' % (env.ws_config_path), nginx_sa, use_sudo=True)\n\n sudo('ln -s %s%s %s' % (apache_sa, env.project_name, apache_se))\n sudo('ln -s %s%s %s' % (nginx_sa, env.project_name, nginx_se))\n restart_webservers()", "def config_apache():\n with lcd(env.projectroot):\n with cd(\"/etc/apache2\"):\n put(\"manage/sysconf/%(target)s/etc/apache2/sites-available/lagrummet\" % env, \"sites-available\",\n use_sudo=True)\n try:\n sudo(\"ln -s ../sites-available/lagrummet sites-enabled/lagrummet\")\n except:\n print \"Ignored failed to create symbolic link!\"", "def configure(full=1, site=ALL, delete_old=0):\n from burlap import service\n \n print 'Configuring Apache...'\n apache_specifics = set_apache_specifics()\n \n if int(delete_old):\n # Delete all existing enabled and available sites.\n sudo('rm -f %(apache_sites_available)s/*' % env)\n sudo('rm -f %(apache_sites_enabled)s/*' % env)\n \n for site, site_data in common.iter_sites(site=site, setter=set_apache_site_specifics):\n #print '-'*80\n print site\n #continue\n \n print 'env.apache_ssl_domain:',env.apache_ssl_domain\n print 'env.apache_ssl_domain_template:',env.apache_ssl_domain_template\n \n fn = common.render_to_file('django.template.wsgi')\n put(local_path=fn, remote_path=env.apache_django_wsgi, use_sudo=True)\n \n if env.apache_ssl:\n env.apache_ssl_certificates = list(iter_certificates())\n \n fn = common.render_to_file('apache_site.template.conf')\n env.apache_site_conf = site+'.conf'\n env.apache_site_conf_fqfn = os.path.join(env.apache_sites_available, env.apache_site_conf)\n put(local_path=fn, remote_path=env.apache_site_conf_fqfn, use_sudo=True)\n \n sudo('a2ensite %(apache_site_conf)s' % env)\n #return\n if service.is_selected(APACHE2_MODEVASIVE):\n configure_modevasive()\n \n if service.is_selected(APACHE2_MODSECURITY):\n configure_modsecurity()\n \n for mod_enabled in env.apache_mods_enabled:\n env.apache_mod_enabled = mod_enabled\n sudo('a2enmod %(apache_mod_enabled)s' % env)\n \n if int(full):\n # Write master Apache configuration file.\n fn = common.render_to_file('apache_httpd.template.conf')\n put(local_path=fn, remote_path=env.apache_conf, use_sudo=True)\n \n # Write Apache listening ports configuration.\n fn = common.render_to_file('apache_ports.template.conf')\n put(local_path=fn, remote_path=env.apache_ports, use_sudo=True)\n \n #sudo('mkdir -p %(apache_app_log_dir)s' % env)\n #sudo('chown -R %(apache_user)s:%(apache_group)s %(apache_app_log_dir)s' % env)\n# sudo('mkdir -p %(apache_log_dir)s' % env)\n# sudo('chown -R %(apache_user)s:%(apache_group)s %(apache_log_dir)s' % env)\n sudo('chown -R %(apache_user)s:%(apache_group)s %(apache_root)s' % env)", "def install_apache_conf():\n sudo('cp -T %(repo_path)s/apache/%(settings)s/apache %(apache_config_path)s' % env)", "def clone_into_project(git_repo_name):\n repo_dir = git_dir + \"/%s.git\" % git_repo_name\n with cd(remote_dir):\n run('rm -rf myproject')\n run(\"git clone %s %s\" % (repo_dir, project_name))\n run(\"echo 'MY_ENV=\\\"prod\\\"' > %s/%s/site_settings.py\" % (project_name,project_name))\n update_conf_file()", "def setup_apache():\n put(\n \"{0}/apache2/ports.conf\".format(env.CONFIG.NAGIOS_CFG_DIR),\n \"/etc/apache2/\",\n use_sudo=True\n )\n put(\n \"{0}/apache2/000-default.conf\".format(env.CONFIG.NAGIOS_CFG_DIR),\n \"/etc/apache2/sites-available/\",\n use_sudo=True\n )\n sudo(\n \"\"\"\n a2enmod rewrite\n a2enmod cgi\n htpasswd -b -c /usr/local/nagios/etc/htpasswd.users \\\n nagiosadmin {0}\n \"\"\".format(env.CONFIG.NAGIOS_PWD)\n )\n sudo(\n \"ln -sf /etc/apache2/sites-available/nagios.conf\" +\n \" /etc/apache2/sites-enabled/\")\n sudo(\"service nagios restart\")\n sudo(\"service apache2 restart\")\n # enable nagios to start on server boot\n sudo(\"ln -s /etc/init.d/nagios /etc/rcS.d/S99nagios\")", "def config_apache_command(server_name):\n if not server_name:\n server_name = socket.getfqdn()\n print(\"\"\"# Virtual Host config for BetterWeather WSGI Server\n# Required modules: mod_wsgi\n<VirtualHost *:80>\n ServerName \"\"\", end='')\n print(server_name, end='')\n print(\"\"\"\n WSGIDaemonProcess betterweather threads=15\n WSGIScriptAlias / \"\"\", end='')\n print(app.root_path + '/wsgi.py', end='')\n print(\"\"\"\n <Directory \"\"\", end='')\n print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))).__str__() + '>', end='')\n print(\"\"\"\n WSGIProcessGroup betterweather\n WSGIApplicationGroup %{GLOBAL}\n \n <IfVersion < 2.4>\n Allow from all\n Order allow,deny\n </IfVersion>\n \n <IfVersion >= 2.4>\n Require all granted\n </IfVersion>\n \n <IfModule mod_headers.c>\n Header set Cache-Control \"no-cache, no-store, must-revalidate\"\n Header set Pragma \"no-cache\"\n Header set Expires 0\n </IfModule>\n </Directory>\n</VirtualHost>\"\"\")", "def configure_project(self, project_name, top_dir):\n\n readme_text = \"# %s\\n\\nYour first Mantra project - huzzah!\" % project_name\n\n self.write_new_content(file_path=top_dir + '/README.md', text=readme_text)\n self.write_new_content(file_path=top_dir + '/requirements.txt', text='mantraml==%s' % __version__)\n self.replace_content(file_path=top_dir + '/mantra.yml', old_text='default_name', new_text=project_name)\n self.replace_content(file_path=top_dir + '/settings.py', old_text='default-s3-bucket-name', new_text='%s-%s' % (project_name.lower().replace('_', '-'), str(uuid.uuid4())))", "def configure(self):\n projects = self.get_dep_projects()\n configure_args = {\n \"version\": self.version,\n \"hosted\": self.hosted,\n \"build_type\": self.build_type,\n \"warnings\": self.warnings\n }\n for project in projects:\n if project.translated:\n project.html_dir = os.path.join(project.html_dir, self.language)\n project.configure(**configure_args)", "def apache_reload():\n require('root', provided_by=('staging', 'production'))\n run('sudo /etc/init.d/apache2 reload')", "def addconf_apache(*names):\n for conf in names:\n oldname = os.path.join(A_AVAIL, conf)\n newname = os.path.join(A_ENABL, conf)\n local('sudo ln -s {} {}'.format(oldname, newname))", "def apache():\n\n get_details()\n\n context = {\n \"site_name\": env.site_name,\n \"paths\": env.paths,\n \"project_name\": env.project_name,\n }\n\n apache_path = '/etc/httpd/sites-available/'\n\n if exists(apache_path):\n with cd(apache_path):\n if exists(env.site_name):\n print \"apache site configuration already exists!\"\n return\n else:\n upload_template(\"apache_conf.txt\", \n env.site_name,\n context,\n use_jinja=True,\n template_dir=JINJA_TEMPLATE_PATH,\n use_sudo=True)\n print \"Created apache site configuration file. Don't forget to enable it!\"\n return\n else:\n print \"It doesn't seem like you have apache installed.\"\n return", "def edit():\n config.edit_project()\n # projects = config.read_config_file()\n # return projects", "def project(c):\n print('==================================================')\n print('Setup/Install Project on Web+App Server')\n print('==================================================')\n # Get some variables from the environment\n print(\"Setting up environment ...\")\n _version = c.config.php.version\n _project_name = c.config.project.name.lower()\n _mount_root = c.config.mount_root\n #server_root = env.config.get('server_root', \"\")\n #www_root = env.config.get('www_root', \"\")\n _aws_buckets = c.config.aws.buckets\n #app_dirs = c.config.project.app_dirs\n #project_name = c.config.project.name\n # Checkout project from git\n if (c.config.provider != 'vagrant'):\n print(\"Checking out project from git\")\n git.register(c)\n git.checkout(c)\n git.rsync(c)\n # Also create the logs and cache folders\n _server_root_dir = c.config.project.server.root_dir\n c.run(f'mkdir -m 777 -p {_server_root_dir}/application/logs')\n c.run(f'mkdir -m 777 -p {_server_root_dir}/application/cache')\n else:\n print(\"Vagrant project directory already created during VM boot in Vagrantfile\")\n print(\" Guest: /var/www/<project>/web\")\n print(\" Host: ~/Projects/<project>/src/php/public\")\n # Add host to nginx\n nginx.add_host(c, c.config.environment)\n # Symlink to enable site\n print('Creating symlink to project virtual host')\n c.sudo(f'ln -s /etc/nginx/sites-available/{_project_name} /etc/nginx/sites-enabled/{_project_name}')\n # Restart\n print('Restart nginx & php-fpm')\n c.sudo('/etc/init.d/nginx restart')\n c.sudo(f'/etc/init.d/php{_version}-fpm restart')\n # Mount project folders (whatever is defined)\n print(\"Mounting project directories ...\")\n # Mount S3\n if (c.enabled('s3fs')):\n print('s3fs enabled')\n #s3fs.mount(c, mount_root, aws_buckets)\n else:\n print(\"NOT mounting S3FS\")\n # Create directories for the project/application\n print(\"Creating project directories\")\n if c.config.project.dirs:\n directories = []\n for _dir in c.config.project.dirs:\n directories.append(f'{_mount_root}/{_dir}')\n print(directories)\n #filesystem.mkdirs(c, directories)\n # Also make /tmp/{{domain}} directory\n c.run(f'mkdir /tmp/{_project_name}')\n c.run(f'chmod -R 777 /tmp/{_project_name}')\n # ==================================================\n # Install other software here\n # ==================================================\n print(\"Installing project-specific software ...\")\n if c.config.project.packages:\n for _package in c.config.project.packages:\n print(f' ... {_package}')\n c.sudo(f'apt-get install -yq {_package}')\n print('==================================================')\n print('... done Setup/Install Project on Web+App Server')\n print('==================================================')", "def update_project(arn=None, name=None, defaultJobTimeoutMinutes=None):\n pass", "def set_project_url(self, url):\n\n # Check project can be modified\n self.check_project()\n\n # Change project url\n old_info = self.setup_info\n self.publish(is_filtered='setup.py', url=format_url(url, 'https')) # Start by updating setup.py script\n self.change_url(old_info.url.value, url)\n\n # Commit modifications\n message_pattern = 'refactor(all): set project url to {url}\\n' \\\n '\\n' \\\n '{postfix}'\n self.commit('-am', self.make_message(message_pattern, url=format_url(url, 'https')))\n\n return format_url(url, 'https')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Clones the git repo into the new webapp, deleting the default myproject project and updating the config file to point to the new project. Also adds a site_settings.py file to the project/project folder.
def clone_into_project(git_repo_name): repo_dir = git_dir + "/%s.git" % git_repo_name with cd(remote_dir): run('rm -rf myproject') run("git clone %s %s" % (repo_dir, project_name)) run("echo 'MY_ENV=\"prod\"' > %s/%s/site_settings.py" % (project_name,project_name)) update_conf_file()
[ "def newproject():\n log('Criando novo projeto', yellow)\n log('Cria a conta no bitbucket com o nome do projeto vázio que o script se encarregará do resto', red)\n\n conta = raw_input('Digite o nome do projeto: ')\n\n local('echo \"clonando projeto %s\"' % bitbucket_repository)\n local('git clone {0} {1}{2}'.format(bitbucket_repository, folder_project_local, conta))\n local('cd {0}{1}'.format(folder_project_local, conta))\n local('mkvirtualenv {0}'.format(conta))\n local('setvirtualenvproject')\n local('pip install -r requirements.txt')\n local('rm -rf {0}{1}/.git'.format(folder_project_local, conta))\n local('rm -rf README.md')\n local('git init')\n local('git remote add origin git@bitbucket.org:{0}/{1}.git'.format(bitbucket_user, conta))", "def clone_project(project):\n print(project.url + \".git\")\n dirname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(N))\n\n project_dictionary[project.pk] = dirname\n cloned_repo = Repo.clone_from(project.url + \".git\", base_path + dirname)\n print(cloned_repo)\n detect_language(project)", "def project(c):\n print('==================================================')\n print('Setup/Install Project on Web+App Server')\n print('==================================================')\n # Get some variables from the environment\n print(\"Setting up environment ...\")\n _version = c.config.php.version\n _project_name = c.config.project.name.lower()\n _mount_root = c.config.mount_root\n #server_root = env.config.get('server_root', \"\")\n #www_root = env.config.get('www_root', \"\")\n _aws_buckets = c.config.aws.buckets\n #app_dirs = c.config.project.app_dirs\n #project_name = c.config.project.name\n # Checkout project from git\n if (c.config.provider != 'vagrant'):\n print(\"Checking out project from git\")\n git.register(c)\n git.checkout(c)\n git.rsync(c)\n # Also create the logs and cache folders\n _server_root_dir = c.config.project.server.root_dir\n c.run(f'mkdir -m 777 -p {_server_root_dir}/application/logs')\n c.run(f'mkdir -m 777 -p {_server_root_dir}/application/cache')\n else:\n print(\"Vagrant project directory already created during VM boot in Vagrantfile\")\n print(\" Guest: /var/www/<project>/web\")\n print(\" Host: ~/Projects/<project>/src/php/public\")\n # Add host to nginx\n nginx.add_host(c, c.config.environment)\n # Symlink to enable site\n print('Creating symlink to project virtual host')\n c.sudo(f'ln -s /etc/nginx/sites-available/{_project_name} /etc/nginx/sites-enabled/{_project_name}')\n # Restart\n print('Restart nginx & php-fpm')\n c.sudo('/etc/init.d/nginx restart')\n c.sudo(f'/etc/init.d/php{_version}-fpm restart')\n # Mount project folders (whatever is defined)\n print(\"Mounting project directories ...\")\n # Mount S3\n if (c.enabled('s3fs')):\n print('s3fs enabled')\n #s3fs.mount(c, mount_root, aws_buckets)\n else:\n print(\"NOT mounting S3FS\")\n # Create directories for the project/application\n print(\"Creating project directories\")\n if c.config.project.dirs:\n directories = []\n for _dir in c.config.project.dirs:\n directories.append(f'{_mount_root}/{_dir}')\n print(directories)\n #filesystem.mkdirs(c, directories)\n # Also make /tmp/{{domain}} directory\n c.run(f'mkdir /tmp/{_project_name}')\n c.run(f'chmod -R 777 /tmp/{_project_name}')\n # ==================================================\n # Install other software here\n # ==================================================\n print(\"Installing project-specific software ...\")\n if c.config.project.packages:\n for _package in c.config.project.packages:\n print(f' ... {_package}')\n c.sudo(f'apt-get install -yq {_package}')\n print('==================================================')\n print('... done Setup/Install Project on Web+App Server')\n print('==================================================')", "def flush_repo():\n server = get_server()\n run(\"rm -rf %(project_name)s\" % env)\n git.clone()\n server.setup()", "def dev_site(live_path, dev_parent, dev_name, dev_db_name='',\n base_url='', rewrite_base=''):\n with mute():\n remote = git.get_remote_url(live_path)\n dev_path = '%s/%s' % (dev_parent, dev_name)\n if exists(dev_path):\n warning = \"\"\"\nA folder already exists at your destination path.\n\nDo you wish to overwrite it?\n\"\"\"\n confirm_overwrite(warning)\n\n with mute():\n run('rm -rf %s' % dev_path)\n with cd(dev_parent):\n run('git clone %s %s' % (remote, dev_name))\n\n with cd(dev_path):\n run('git fetch')\n run('git branch')\n\n # Determinine a branching strategy\n strategy_prompt = \"\"\"\nHow would you like to create your dev site:\n1) Use an existing Git branch\n2) Create a new Git branch\n:\n\"\"\"\n strategy = prompt(strategy_prompt,\n validate=validate_branching_strategy)\n\n # Checkout an existing branch\n if strategy == '1':\n branch_prompt = \"\"\"\nWhich existing branch would you like to use for this dev site?\n\"\"\"\n # TODO - add validation\n dev_branch = prompt(branch_prompt)\n run('git checkout %s' % dev_branch)\n run('git pull origin %s' % dev_branch)\n\n # Create new branch\n if strategy == '2':\n start_branch_prompt = \"\"\"\nWhich branch should we use to start from?\n\"\"\"\n start_branch = prompt(start_branch_prompt)\n run('git checkout %s' % start_branch)\n dev_branch_prompt = \"\"\"\nWhat would like to name the new dev branch?\n\"\"\"\n dev_branch = prompt(dev_branch_prompt)\n run('git checkout -b %s' % dev_branch)\n # Look for an git origin in the live site\n\n # cd to the dev parent dir and clone the repo from origin\n\n # switch to the develop branch\n\n # git fetch\n\n # git pull origin develop\n\n # Duplicate the live mysql db as a dev db\n # Look into cross platform ways to just do the db duplication without\n # needing to write the db dump file and then do the insert\n\n # Configure the settings.php and .htaccess files for the dev site\n\n # Copy the files folder from the live site to the dev site\n # Eventually there should be a option here for doing read only sym-links\n # Or maybe some S3 thingy\n\n # drush cc all on dev\n\n # done", "def newproject(self):\n \n self.path = os.path.join(self.base, self.name)\n subpath = os.path.join(self.path, self.lowname)\n check_build_path(subpath)\n \n for filename, content in self.files.items():\n self.buildfile(filename, content, self.path)\n\n script = open(SCRIPT, 'r').read().format(self.lowname)\n self.buildfile('{0}.py'.format(self.lowname), script, subpath) \n self.buildfile('__init__.py', '', subpath)\n \n #optionals\n if self.git:\n self.buildfile('.gitignore', '*.pyc', self.path)\n if self.db:\n datapath = os.path.join(self.path, 'data')\n os.makedirs(datapath)\n copydb = os.path.join(datapath, '{0}.db'.format(self.lowname))\n copy = subprocess.call(['cp', DATA, \"%s\" % copydb])\n if self.test:\n testpath = os.path.join(self.path, 'tests')\n os.makedirs(testpath)\n self.buildfile('__init__.py', '', testpath)", "def project_clone(request, proj_id=None):\n\n if not proj_id or not request.user.is_authenticated():\n raise Http404\n\n project = get_object_or_404(Project, id=proj_id)\n\n if project.user != request.user and project.is_private:\n raise Http404\n\n project.pk = None\n project.user = request.user\n project.save()\n\n for scenario in Scenario.objects \\\n .filter(project_id=proj_id) \\\n .order_by('created_at'):\n scenario.pk = None\n scenario.project = project\n scenario.save()\n\n return redirect('/project/{0}'.format(project.id))", "def update_django_project():\n with cd('/var/www/tantejanniespostkamer/project'):\n sudo('git pull')\n\n set_correct_file_permissions()\n\n with cd('/var/www/tantejanniespostkamer/project'):\n with prefix('source /var/www/tantejanniespostkamer/snailmail/bin/activate'):\n sudo('pip install -r requirements.txt')\n run('python manage.py syncdb')\n run('python manage.py migrate')\n if schemamigrate:\n sudo('python manage.py schemamigration snailmail --auto')\n run('python manage.py migrate snailmail')\n sudo('python manage.py collectstatic --noinput')\n if new_translations:\n sudo('python manage.py makemessages -l en -e=html,py')\n sudo('python manage.py makemessages -l nl -e=html,py')\n sudo('python manage.py compilemessages')\n\n set_correct_file_permissions()", "def mkweb(project_name, mode):\n\n MAIN_FOLDER = data.get_base_path(data.WEB)\n\n if mode != 'MAIN':\n MAIN_FOLDER += f'{mode}/'\n \n webproject = folders.WebProject(project_name, MAIN_FOLDER)\n\n webproject.create_project()\n click.echo(f'Project created succesfull in {webproject.project_path}')\n cli_commands.start_git(webproject.project_path)\n cli_commands.show_dir_path(webproject.project_path)\n # cli_commands.start_vscode(webproject.project_path)\n\n click.echo('Project Path copied to clipboard...')", "def init_remote_site():\n run(\"git clone %s\" % _get_remote_repo_dir())", "def copy_makefile(args):\n vprint('Copying project makefile from submodule into project repo.')\n shutil.copyfile('website/project.mk', 'makefile')\n\n vprint('Committing makefile to repository.')\n run(['git', 'add', 'makefile'], 'Failed to stage makefile in repo.')\n run(['git', 'commit', '-m', 'Add project makefile template.'],\n 'Failed to commit makefile to repo.')", "def _create_main_project_and_root(self): \n if len(ComicSite.objects.filter(short_name=settings.MAIN_PROJECT_NAME)) == 0:\n main = ComicSite.objects.create(short_name=settings.MAIN_PROJECT_NAME,\n description=\"main project, autocreated by comicframeworkTestCase._create_inital_project()\",\n skin=\"fakeskin.css\"\n )\n \n main.save()\n \n try:\n self.root = User.objects.get(username='root')\n except ObjectDoesNotExist:\n # A user who has created a project\n root = User.objects.create_user('root',\n 'w.s.kerkstra@gmail.com',\n 'testpassword') \n root.is_staff = True\n root.is_superuser = True\n root.save()\n \n self.root = root\n\n call_command('check_permissions')", "def upload():\n run('mkdir -p /srv/images/'+env.project_name+'/')\n rsync_project(\n env.project_dir, './',\n exclude=(\n '.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store', 'environment.yml',\n 'fabfile.py', 'Makefile', '.idea', 'bower_components', 'node_modules',\n '.env.example', 'README.md', 'var'\n ), delete=True)", "def git_project(soup, github_user, github_pass, github_repo, github_name):\n giturl = 'https://{user}:{password}@github.com/{user}/{repo}.git'.format(\n user=github_user, password=github_pass, repo=github_repo\n )\n oldcwd = os.getcwd()\n tmpdir = tempfile.mkdtemp()\n gitdir = os.path.join(tmpdir, github_repo)\n cmd = 'git clone {} {}'.format(shlex.quote(giturl), shlex.quote(gitdir))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(gitdir)\n rhinoscrape(soup, github_user, github_name)\n cmd = 'git add .'\n subprocess.run(shlex.split(cmd), check=False)\n msg = 'Project committed by Rhino Repo'\n cmd = 'git commit -m {}'.format(shlex.quote(msg))\n subprocess.run(shlex.split(cmd), check=False)\n cmd = 'git push {}'.format(shlex.quote(giturl))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(oldcwd)\n shutil.rmtree(tmpdir, ignore_errors=True)", "def project_refresh(project_name):\n if not db_find_project(project_name):\n abort(404)\n analyser.add_repos(current_user.username, [project_name])\n return redirect(url_for('main.admin_manage'))", "def upload_django_settings(self):\n with self.host.open('~/git/django-project/local_settings.py') as f:\n f.write(django_settings)", "def edit_files(project_name, app_name):\n SETTINGS = f'{project_name}/backend/backend/settings.py'\n PACKAGE_JSON = f'{project_name}/frontend/package.json'\n\n\n c1 = f\"\\n \\t'corsheaders', \\n\\t'rest_framework', \\n\\t'{app_name}',\\n\"\n add_to_line(SETTINGS, 32, c1 )\n\n c2 = f\"\\n \\t'corsheaders.middleware.CorsMidleware',\\n\"\n add_to_line(SETTINGS, 44, c2 )\n \n with open(SETTINGS, 'a+') as f:\n f.write(\"\\nCORS_ORIGIN_WHITELIST = ['localhost:3000/']\")\n\n c3 = '\\n\\t\"proxy\": \"http://localhost:8000\",\\n'\n add_to_line(PACKAGE_JSON, 3, c3)", "def prepare():\n clean()\n subprocess.call([\"git\", \"clone\", LANGUAGES_REPO, REPO_DIR],\n stdout=DEVNULL, stderr=DEVNULL)\n os.chdir(REPO_DIR)", "def deploy():\n\n\t# My checkout has root permissions, motherfucker.\n\twith cd(SITE_CHECKOUT):\n\t\tsudo('git fetch origin')\n\t\tsudo('git checkout origin/master')\n\t\tsudo('git reset --hard HEAD')\n\t\tsudo('markdoc build')\n\n\tsudo('apache2ctl restart')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds the "/static" and "/media" directories to the static webapp if needed, and deletes the default index.html. Also adds a project/project/static directory if there isn't one.
def add_dirs_to_static(static_webapp_name): static_dir = '$HOME/webapps/%s' % static_webapp_name with settings(warn_only=True): with cd(static_dir): run("mkdir static && mkdir media") run("rm index.html") run("touch index.html") with cd(code_dir): run("mkdir %s/static" % project_name)
[ "def ensure_static_exists():\n for entry in html_static_path:\n static_path = os.path.join(__repo_docs__, entry)\n if not os.path.isdir(static_path):\n os.makedirs(static_path)", "def setup_statics(project, frontend):\n # Create JS Static structure\n static_path = '{}/static/js'.format(project.get('name'))\n if not os.path.exists(static_path):\n os.makedirs(static_path)\n\n if frontend == 'angular':\n _write_rendered_file('{}/app.js'.format(static_path),\n app_static_template,\n project=project)\n _write_rendered_file('{}/services.js'.format(static_path),\n services_static_template,\n apps=apps)\n _write_rendered_file('{}/controllers.js'.format(static_path),\n controllers_static_template,\n apps=apps)\n\n # Create Partials Static structure\n angular_templates_path = '{}/static/partials'.format(project.get('name'))\n if not os.path.exists(angular_templates_path):\n os.makedirs(angular_templates_path)\n\n shutil.copy(os.path.dirname(os.path.realpath(__file__)) + '/../' +\n 'scaffolder/templates/static/partials/home.html',\n '{}/home.html'.format(angular_templates_path))\n\n # Create Less Static structure\n less_static_path = '{}/static/less'.format(project.get('name'))\n if not os.path.exists(less_static_path):\n os.makedirs(less_static_path)\n\n shutil.copy(os.path.dirname(os.path.realpath(__file__)) + '/../' +\n 'scaffolder/templates/static/less/styles.less',\n '{}/styles.less'.format(less_static_path))", "def copy_static_resources(self):\n if not hasattr(settings, 'STATIC_ROOT'):\n raise MissingStaticRoot()\n destination = os.path.join(STORAGE_PATH, 'static')\n if os.path.exists(destination):\n shutil.rmtree(destination)\n shutil.copytree(settings.STATIC_ROOT, destination)", "def copy_static(self):\n try:\n shutil.copytree('template/static', 'public/static')\n except:\n print(\"Error copying static files \")", "def add_static_files(app, config):\n static_paths = config.html_static_path\n for path in static_paths:\n path = Path(app.confdir).joinpath(path)\n for path_css in path.rglob(\"*.css\"):\n app.add_css_file((path_css.relative_to(path)).as_posix())\n for path_js in path.rglob(\"*.js\"):\n app.add_js_file((path_js.relative_to(path)).as_posix())", "def buildStatic(self):\n\t\tstaticBuildPath = os.path.join(self.paths['build'], 'static')\n\n \t\tif not hasattr(self, 'nosymlink') and callable(getattr(os, \"symlink\", None)):\n\t\t\t# If there is a folder, replace it with a symlink\n\t\t\tif os.path.lexists(staticBuildPath) and not os.path.exists(staticBuildPath):\n\t\t\t\tos.remove(staticBuildPath)\n\t\t\n\t\t\tif not os.path.lexists(staticBuildPath):\n\t\t\t\tos.symlink(self.paths['static'], staticBuildPath)\n\t\telse:\n\t\t\ttry:\n\t\t\t\tif os.path.exists(staticBuildPath):\n\t\t\t\t\tshutil.rmtree(staticBuildPath) # copytree fails if destination exists\n\n\t\t\t\tshutil.copytree(self.paths['static'], staticBuildPath)\n\t\t\texcept Exception:\n\t\t\t\tlogging.info('*** Error copying %s to %s' % (self.paths['static'], staticBuildPath))", "def update_static_publish_folder():\n if os.path.isdir(\"/var/new-www\"):\n shutil.rmtree(\"/var/new-www\")\n os.mkdir(\"/var/new-www\")\n shutil.copytree(os.path.join(CODE_DIR, \"static\"), \"/var/new-www/static\")\n shutil.copy2(\"/var/new-www/static/slides.html\", \"/var/new-www\" + SLIDES_PATH)\n if os.path.exists(\"/var/www\"):\n os.rename(\"/var/www\", \"/var/old-www\")\n os.rename(\"/var/new-www\", \"/var/www\")\n if os.path.exists(\"/var/old-www\"):\n shutil.rmtree(\"/var/old-www\")", "def copy_site_assets(self):\n if os.path.isdir(\"static\"):\n self.merge_dirs(\"static\", os.path.join(self.out_dir, \"static\"))", "def public():\n require('PROJECT_NAME')\n\n media_dir = utils.home('public', env.PROJECT_NAME, 'media')\n static_dir = utils.home('public', env.PROJECT_NAME, 'static')\n\n run('mkdir -p {}'.format(media_dir))\n run('mkdir -p {}'.format(static_dir))", "def static_dir(self):\n return os.path.join(self.app_dir, 'static')", "def add_static_dir (self, www_path, local_path=None, relative=False):\n if not www_path.startswith('/'): www_path = '/' + www_path\n\n if local_path is None:\n local_path = www_path[1:]\n if relative:\n local_path = os.path.basename(local_path)\n if relative:\n import inspect\n path = inspect.stack()[1][1]\n path = os.path.dirname(path)\n local_path = os.path.join(path, local_path)\n\n local_path = os.path.abspath(local_path)\n\n log.debug(\"Serving %s at %s\", local_path, www_path)\n\n self.set_handler(www_path, StaticContentHandler,\n {'root':local_path}, True);", "def collectstatic():\n public_dir = os.path.join(os.getcwd(), 'public')\n\n if os.path.isdir(public_dir):\n print('directory exists')\n else:\n os.mkdir(public_dir)\n\n local_static = os.path.join(os.getcwd(), 'folio', 'static')\n os.system(\"rsync -ruv --chmod=ug+w %s %s\" % (local_static, public_dir))", "def serve_static_files(request, path, insecure=False, **kwargs):\n\n if not settings.DEBUG and not insecure:\n raise Http404\n normalized_path = posixpath.normpath(unquote(path)).lstrip('/')\n absolute_path = finders.find(normalized_path)\n if not absolute_path:\n if path.endswith('/') or path == '':\n raise Http404(\"Directory indexes are not allowed here.\")\n raise Http404(\"'%s' could not be found\" % path)\n document_root, path = os.path.split(absolute_path)\n return static.serve(request, path, document_root=document_root, **kwargs)", "def copy_template_assets(self):\n if os.path.isdir(os.path.join(\"templates\", \"static\")):\n self.merge_dirs(os.path.join(\"templates\", \"static\"),\n os.path.join(self.out_dir, \"static\"))", "def path_static():\n return os.path.abspath(os.path.dirname(__file__))+'/_static'", "def make_static_application(basepath, staticdir, not_found):\n\n def app(environ, start_response):\n\n path = environ['PATH_INFO']\n if path.startswith(basepath):\n path = path[len(basepath):]\n path = os.path.join(staticdir, path)\n if os.path.exists(path):\n h = open(path, 'r')\n content = h.read()\n h.close()\n headers = [('Content-Type', content_type(path))]\n start_response(\"200 OK\", headers)\n return [content.encode('utf-8'), ]\n\n return not_found(environ, start_response)\n\n return app", "def send_static(self, dest):\n try:\n if path.exists(dest): rmtree(dest)\n copytree(self.staticpath, dest)\n except Exception as e:\n raise e", "def serve_static(request, path, document_root):\n # Clean up given path to only allow serving files below document_root.\n path = posixpath.normpath(urllib.unquote(path))\n path = path.lstrip('/')\n newpath = ''\n for part in path.split('/'):\n if not part:\n # Strip empty path components.\n continue\n drive, part = os.path.splitdrive(part)\n head, part = os.path.split(part)\n if part in (os.curdir, os.pardir):\n # Strip '.' and '..' in path.\n continue\n newpath = os.path.join(newpath, part).replace('\\\\', '/')\n if newpath and path != newpath:\n return HttpResponseRedirect(newpath)\n fullpath = os.path.join(document_root, newpath)\n if os.path.isdir(fullpath):\n #if show_indexes:\n # return directory_index(newpath, fullpath)\n raise Http404, \"Directory indexes are not allowed here.\"\n if not os.path.exists(fullpath):\n raise Http404, '\"%s\" does not exist' % fullpath\n # Respect the If-Modified-Since header.\n statobj = os.stat(fullpath)\n if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n statobj[stat.ST_MTIME], statobj[stat.ST_SIZE]):\n return HttpResponseNotModified()\n mimetype = mimetypes.guess_type(fullpath)[0] or 'application/octet-stream'\n # Treat the file as a django template\n template = Template(open(fullpath, 'rb').read())\n context = RequestContext(request)\n # Render the template giving the current request\n contents = template.render(context)\n response = HttpResponse(contents, mimetype=mimetype)\n response[\"Last-Modified\"] = http_date(statobj[stat.ST_MTIME])\n response[\"Content-Length\"] = len(contents)\n return response", "def setup_output_path(self):\n self.logger.info('setting up output path')\n try:\n self.output_path.mkdir()\n except FileExistsError:\n pass\n try:\n (self.output_path / 'simple').mkdir()\n except FileExistsError:\n pass\n for filename in resource_listdir(__name__, 'static'):\n if filename == 'index.html':\n # Skip template\n continue\n with (self.output_path / filename).open('wb') as f:\n source = resource_stream(__name__, 'static/' + filename)\n f.write(source.read())\n source.close()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initialises the database to contain the tables required for DjangoCMS with South. Runs syncdb all and migrate fake.
def initialise_database(): with cd(code_dir): run(python_add_str + "python manage.py syncdb --all") run(python_add_str + "python manage.py migrate --fake")
[ "def setup_database():\n from django.core.management import call_command\n from django import setup\n setup()\n call_command('migrate', verbosity=0, interactive=False)\n call_command('loaddata', data('initial_data.json'), verbosity=0, interactive=False)", "def initialize_test_db(self):\n # Create a test database and sync it with models.py\n # Handle a second test database for selenium use. Postgres uses\n # transactions which interfere with the Django server thread.\n settings.TEST_DATABASE_NAME = self.db_name\n connection.creation.create_test_db(verbosity=self.verbosity,\n autoclobber=True)\n # Hook for doing any extra initialization\n self.extra_init()\n # Load fixture data.\n call_command('loaddata', *self.fixtures, verbosity=self.verbosity)\n # Sync data and close connection\n connection.close()\n # If sqlite3 or Postgres is used, create a backup database to speed up\n # fixture reloading.\n if settings.DATABASE_ENGINE == 'postgresql_psycopg2':\n # connection.creation is used to overcome transaction management,\n # allowing to execute DROP and CREATE db commands.\n cursor = connection.cursor()\n connection.creation.set_autocommit()\n cursor.execute(\"DROP DATABASE IF EXISTS %s_backup\" % self.db_name)\n cursor.execute(\"CREATE DATABASE %s_backup WITH TEMPLATE %s\" % (\n self.db_name, self.db_name))\n if settings.DATABASE_ENGINE == 'sqlite3':\n self.db_path = os.path.join(PROJECT_PATH, settings.DATABASE_NAME)\n self.db_backup_path = '%s_backup' % self.db_path\n if self.db_path[-3:] == '.db':\n self.db_backup_path = '%s_backup.db' % self.db_path[:-3]\n shutil.copyfile(self.db_path, self.db_backup_path)\n # Restore the database names as create_test_db changed it.\n settings.TEST_DATABASE_NAME = self.test_database_name\n settings.DATABASE_NAME = self.database_name", "def init_db():\n db.drop_all()\n db.create_all()\n seed_companies()\n seed_emission_reports()\n seed_reduction_targets()\n seed_milestones()", "def initdb(self):\n DBASE.init(self.db)\n DBASE.connect()\n\n if not Users.table_exists():\n Users.create_table()\n Artists.create_table()\n Tags.create_table()\n Friends.create_table()\n WeeklyArtistChart.create_table()\n ArtistTags.create_table()\n Artists.create(name='')\n\n DBASE.set_autocommit(False)\n return", "def migrate_database(self):\n\n self.db.migrate_database()", "def db_initialise():\n generate_migration_file()\n if not MySQLScheme.fetch_one(IS_MIGRATION_TABLE,\n **{\"args\": {'schema': SCHEMA}}):\n with open(MIGRATION_FILE, 'r') as init_sql:\n data = init_sql.read()\n\n if f\"CREATE TABLE IF NOT EXISTS {MIGRATION_TABLE}\" not in data:\n when = str(int(time.time()))\n sql_file = os.path.join(MIGRATION_FOLDER, f\"{when}.sql\")\n\n with open(sql_file, 'w') as save_sql:\n up = MYSQL_MIGRATION_UP.format(f\"upgrade-{when}\", when,\n MIGRATION_TABLE)\n down = MYSQL_MIGRATION_DOWN.format(f\"downgrade-{when}\",\n MIGRATION_TABLE)\n\n save_sql.write(\"\\n\\n\".join([up, down]))\n LOGGER.info(f\"migration file: \"\n f\"{os.path.join('migrations', sql_file)}\")\n else:\n when = re.findall('[0-9]+', data)[0]\n\n generate_migration_file()\n dbi_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')\n MySQLScheme.commit(getattr(dbi_query, f\"upgrade_{when}\").sql)\n LOGGER.info(f\"initial successful migration: {when}\")", "def init_db():\n shutil.rmtree(app.config['DATA_FOLDER'])\n os.makedirs(app.config['DATA_FOLDER'])\n\n shutil.rmtree(app.config['CHECKPOINT_FOLDER'])\n os.makedirs(app.config['CHECKPOINT_FOLDER'])\n\n db = get_db()\n\n with current_app.open_resource('schema.sql') as f:\n db.executescript(f.read().decode('utf8'))", "def deploy_db():\n _create_db()\n _migrate()\n _create_superuser()\n _load_fixtures()", "def smart_syncdb_migrate(self):\n local('python manage.py syncdb')\n local('python manage.py migrate')\n local('python manage.py syncdb --all')", "def init_db():\n # We are setting the module variables here for the first time, so disable the warning\n global DB_USER_TABLE # pylint: disable=global-variable-undefined\n global DB_CUSTOMER_TABLE # pylint: disable=global-variable-undefined\n global DB_USER_CUSTOMER_RELS_TABLE # pylint: disable=global-variable-undefined\n global DB_TICKET_TABLE # pylint: disable=global-variable-undefined\n global DB_COMMENT_TABLE # pylint: disable=global-variable-undefined\n\n db = TinyDB(app.config['DB_NAME'])\n\n DB_USER_TABLE = db.table('users')\n DB_CUSTOMER_TABLE = db.table('customers')\n DB_USER_CUSTOMER_RELS_TABLE = db.table('user_customer_rels')\n DB_TICKET_TABLE = db.table('tickets')\n DB_COMMENT_TABLE = db.table('comments')", "def setup_db(self):\n\n self.db_conn = sqlite3.connect(config.db_file)\n self.db_cursor = self.db_conn.cursor()", "def initialize_db(self) -> None:\n if not self.check_schema_initialized():\n self._create_genes_table()\n self._create_meta_data_table()", "def initialize():\n db.connect()\n db.create_tables([TimeSheets, Users], safe=True)", "def syncdb():\n command(\"syncdb\", \"migrate\")", "def syncdb():\n run('%s/mwana/manage.py syncdb' % env.root)", "def _setup_db(self):\n self._logger.info(\"Setting up Database...\")\n db_path = DB_PATH if not self._test else TEST_DB_PATH\n if self._other_db_path is not None:\n db_path= self._other_db_path\n self._connection = sqlite3.connect(DB_PATH if not self._test else TEST_DB_PATH, check_same_thread=False)\n self._connection.row_factory = sqlite3.Row\n self._cursor = self._connection.cursor()\n\n setup_file = open(\"client/setup-client.sql\")\n setup_file_str = setup_file.read()\n self._cursor.executescript(setup_file_str)\n self._connection.commit()", "def setup(self):\n #print \"Creating test database...\"\n files = glob.glob(os.path.join(self.home_dir, 'sqlFiles', '*.sql'))\n for fls in files:\n loc = fls.rfind('/')\n #print(\" \" + fls.replace('.sql', '')[loc + 1:])\n flh = open(fls, 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()\n for fls in ['INSERTS', 'TRIGGERS']:\n #print(fls)\n flh = open(os.path.join(self.home_dir, 'sqlFiles', fls), 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()", "def _init_db():\n\n with open(\"createdb.sql\", \"r\", encoding=\"utf-8\") as file:\n sql = file.read()\n cursor.executescript(sql)\n connector.commit()", "def init_db(self):\n self.create_db()\n col_rows = self.check_default_settings()\n if col_rows == 0:\n self.set_default_settings()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Binary mask from cv2 styled contour (gets filled)
def make_mask(shape, contour): mask = np.zeros(shape, np.int32) cv2.drawContours(mask, [contour], 0, (255), -1) return mask
[ "def get_contour_features(mask,selectcell=\"centered\"):\r\n \r\n #binarize image (everything above 0 becomes 1)\r\n mask = np.clip(mask,a_min=0,a_max=1)\r\n\r\n #for contours, dont use RETR_TREE, but RETR_EXTERNAL as we are not interested in internal objects\r\n contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\r\n contours = list(contours)\r\n \r\n #in case there is no contour found, add a dummy contour\r\n if len(contours)==0:\r\n contours = [np.array([[[0, 0]],[[0, 1]],[[1, 1]],[[1, 0]]])] #generate a dummy contour\r\n\r\n #Sort contours, longest first\r\n contours.sort(key=len,reverse=True)\r\n contours = [c for c in contours if len(c)>4] #proper contour should have at least 5 points\r\n hulls = [cv2.convexHull(contour,returnPoints=True) for contour in contours]\r\n\r\n mu_origs = [cv2.moments(contour) for contour in contours]\r\n mu_hulls = [cv2.moments(hull) for hull in hulls]\r\n\r\n area_origs = [mu_orig[\"m00\"] for mu_orig in mu_origs]\r\n area_hulls = [mu_hull[\"m00\"] for mu_hull in mu_hulls]\r\n\r\n #drop events where area is zero\r\n hulls = [hulls[i] for i in range(len(hulls)) if area_origs[i]>0] \r\n contours = [contours[i] for i in range(len(contours)) if area_origs[i]>0]\r\n mu_origs = [mu_origs[i] for i in range(len(mu_origs)) if area_origs[i]>0]\r\n mu_hulls = [mu_hulls[i] for i in range(len(mu_hulls)) if area_origs[i]>0]\r\n area_hulls = [area_hulls[i] for i in range(len(area_hulls)) if area_origs[i]>0]\r\n area_origs = [area_origs[i] for i in range(len(area_origs)) if area_origs[i]>0]\r\n \r\n \r\n pos_x = [int(mu_orig['m10']/mu_orig['m00']) for mu_orig in mu_origs]\r\n pos_y = [int(mu_orig['m01']/mu_orig['m00']) for mu_orig in mu_origs]\r\n\r\n \r\n if selectcell == \"smooth\":\r\n #compute the area ratio (roughness of contour)\r\n area_ratio = np.array(area_hulls)/np.array(area_origs)\r\n #get the contour with minimum roughness (smooth contour)\r\n sorter = np.argsort(area_ratio) #smallest first\r\n\r\n if selectcell == \"centered\":\r\n #select contour that is closest to the center of the image. \r\n #In iPAC, cells are usually in the center.\r\n mid_x,mid_y = mask.shape[0]/2,mask.shape[1]/2 #middle of the image\r\n BB = [cv2.boundingRect(c) for c in contours] #get a bounding box around the object\r\n distances = [np.sqrt((mid_x-bb[0])**2 + (mid_y-bb[1])**2) for bb in BB]\r\n sorter = np.argsort(distances) #smallest first\r\n \r\n #sort values with respect to chosen metric (area_ratio or distance)\r\n contours = [contours[s] for s in sorter]\r\n hulls = [hulls[s] for s in sorter]\r\n pos_x = [pos_x[s] for s in sorter]\r\n pos_y = [pos_y[s] for s in sorter]\r\n mu_origs = [mu_origs[s] for s in sorter]\r\n area_origs = [area_origs[s] for s in sorter]\r\n area_hulls = [area_hulls[s] for s in sorter]\r\n \r\n # draw mask of the chosen contour\r\n mask = np.zeros_like(mask)\r\n cv2.drawContours(mask,contours,0,1,cv2.FILLED)# produce a contour that is filled inside\r\n\r\n hull = hulls[0]#[0:n_contours]\r\n pos_x = pos_x[0]\r\n pos_y = pos_y[0] \r\n mu_orig = mu_origs[0]#[0:n_contours]\r\n area_orig = area_origs[0]#[0:n_contours]\r\n area_hull = area_hulls[0]#[0:n_contours]\r\n \r\n if area_orig>0:\r\n area_ratio = area_hull/area_orig\r\n else:\r\n area_ratio = np.nan\r\n\r\n arc = cv2.arcLength(hull, True) \r\n circularity = 2.0 * np.sqrt(np.pi * mu_orig[\"m00\"]) / arc\r\n\r\n\r\n dic = {\"mask\":mask,\"pos_x\":pos_x,\"pos_y\":pos_y,\"area_orig\":area_orig,\"area_hull\":area_hull,\\\r\n \"area_ratio\":area_ratio,\"circularity\":circularity}\r\n return dic", "def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask", "def get_binary_mask(self,index):\n mask = self.load_mask_png(index)\n (rows,cols) = np.where(mask>0)[0:2] #pixels in mask disregarding the color\n new_mask = np.zeros(shape=mask.shape[0:2], dtype=np.uint8)\n new_mask[(rows,cols)] = 255\n return new_mask", "def get_contour(self, mask):\n\n assert mask.ndim == 2\n assert mask.min() == 0\n assert mask.max() == 1\n contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n assert len(contours) == 1, \"Too many contours in this mask!\"\n contour = contours[0]\n # logging.debug(\"Returning {} fit contours over mask pixels\".format(len(contours)))\n return contour", "def find_contours(self, mask):\n mask = mask.astype(np.uint8)\n _, contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # CHAIN_APPROX_NONE\n return contours", "def find_contours(mask):\n _, contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_TC89_KCOS)\n return contours", "def get_mask(box, shape):\n tmp_mask = np.zeros(shape, dtype=\"uint8\")\n tmp = np.array(box, dtype=np.int32).reshape(-1, 2)\n cv2.fillPoly(tmp_mask, [tmp], 255)\n# tmp_mask=cv2.bitwise_and(tmp_mask,mask)\n return tmp_mask, cv2.countNonZero(tmp_mask)", "def get_boundary_mask():\n mask = np.ones([NUM_PIX_X, NUM_PIX_Y])\n for x in range(NUM_PIX_X):\n for y in range(NUM_PIX_Y):\n if cfg.get_chip_coords(x,y)[0] == -1:\n mask[x,y] = 0\n\n return mask", "def compute_mask(self, pattern, im_shape):", "def apply_mask_to_image(img, mask):\n img_size = img.shape[0]\n mask = cv2.resize(mask, dsize=(img_size, img_size))\n\n # Find contour of the mask\n imgray = mask\n ret,thresh = cv2.threshold(imgray, 127, 255, 0)\n contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\n # Draw contours on image\n segmented_img = cv2.drawContours(img, contours, -1, (0,255,0), 3)\n\n return segmented_img", "def create_mask(mask, max_contour_size=3, max_border_size=9):\n labels = mask\n\n final_mask = np.zeros((labels.shape[0], labels.shape[1], 3))\n building_num = np.max(labels)\n\n if building_num > 0:\n for i, rprops in enumerate(measure.regionprops(labels)):\n i = i + 1\n y1, x1, y2, x2 = rprops.bbox\n padding = max(16, min((y2 - y1), (x2 - x1)) // 2)\n y1 = max(y1 - padding, 0)\n x1 = max(x1 - padding, 0)\n y2 = min(y2 + padding, labels.shape[0])\n x2 = min(x2 + padding, labels.shape[1])\n # print(i, building_num)\n labels_rprop = labels[y1:y2, x1:x2]\n building_mask = np.zeros_like(labels_rprop, dtype='bool')\n building_mask[labels_rprop == i] = 1\n area = np.sum(building_mask)\n if area < 500:\n contour_size = max_contour_size - 2\n elif area < 1000:\n contour_size = max_contour_size - 1\n else:\n contour_size = max_contour_size\n eroded = binary_erosion(building_mask, iterations=contour_size)\n countour_mask = building_mask ^ eroded\n # plt.imshow(building_mask)\n # plt.show()\n final_mask[..., 0][y1:y2, x1:x2] += building_mask\n final_mask[..., 1][y1:y2, x1:x2] += countour_mask\n final_mask[..., 2] = create_separation(labels, max_border_size=max_border_size)\n return np.clip(final_mask * 255, 0, 255).astype(np.uint8)", "def find_contours(mask):\n\n cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n cnts = imutils.grab_contours(cnts)\n return cnts", "def mask_label_contour(image, seg):\n return sitk.Mask(image, sitk.LabelContour(seg+1)==0)", "def __mask_region(self, img, vertices):\n\n mask = np.zeros_like(img) \n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n cv2.fillConvexPoly(mask, vertices, ignore_mask_color)\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def headMask(comet):\n width = comet.shape[1]\n height = comet.shape[0]\n head = comet[0:height-1, 0:height-1]\n m = np.max(head)\n blur = cv2.GaussianBlur(head, (5, 5), 0)\n tval, thresh = cv2.threshold(blur, 0, 255,\n cv2.THRESH_BINARY | cv2.THRESH_OTSU)\n #print(f'tval={tval}')\n tval, thresh = cv2.threshold(blur,(tval+10),255,cv2.THRESH_BINARY)\n #plt.imshow(thresh)\n kernel = np.ones((5, 5), np.uint8)\n # The first parameter is the original image,\n # kernel is the matrix with which image is\n # convolved and third parameter is the number\n # of iterations, which will determine how much\n # you want to erode/dilate a given image.\n img_dilation = cv2.dilate(thresh, kernel, iterations=3)\n img_erosion = cv2.erode(img_dilation, kernel, iterations=4)\n img_expand = cv2.dilate(img_erosion, kernel, iterations=2)\n img_final = cv2.erode(img_expand,kernel,iterations=2)\n mx = findneck(img_final)\n # zero out everything to the right of mx in img_final\n img_final[:,mx:] = 0\n\n\n plt.imshow(img_final)\n\n cnts = cv2.findContours(img_final.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n cnts = imutils.grab_contours(cnts)\n sorted_ctrs = sorted(cnts, key=lambda ctr: cv2.boundingRect(ctr)[0])\n # find largest contour\n if len(sorted_ctrs) == 0:\n raise ValueError(\"no contours found in headMask\")\n headcontour = sorted_ctrs[0]\n\n\n cometarray = np.zeros(comet.shape, dtype=np.uint8)\n cv2.drawContours(cometarray, [headcontour], 0, (255,255,255), -1)\n mask_dilation = cv2.dilate(cometarray, kernel, iterations=4)\n plt.imshow(cometarray)\n #plt.imshow(dist_transform)\n return (cv2.boundingRect(headcontour), headcontour, mask_dilation)", "def mask_remove_cen_obj(mask):\n from scipy.ndimage import label\n mask_copy = copy.deepcopy(mask)\n seg = label(mask)[0]\n mask_copy[seg == seg[int(seg.shape[0] / 2.0), int(seg.shape[1] / 2.0)]] = 0\n\n return mask_copy", "def create_binary_image(img, s_thresh=(100, 255), sx_thresh=(10, 200), dir_thresh=(np.pi/6, np.pi/2), c_thresh=50):\n # We use a combination of gradient and direction threshold\n # convert to gray scale\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # Compute the combined threshold\n sobel_x = sobel_mask(gray, sx_thresh)\n dir_gradient = dir_mask(gray, dir_thresh)\n combined = ((sobel_x == 1) & (dir_gradient == 1))\n\n # Color threshold in RGB color space\n # This helps to detect yellow lanes better, which is a significant issue in the video \n G = img[:,:,1]\n R = img[:,:,2]\n r_g = (R > c_thresh) & (G > c_thresh)\n \n # color channel thresholds\n hls = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)\n S = hls[:,:,2]\n L = hls[:,:,1]\n \n # S channel performs well for detecting bright yellow and white lanes\n s = (S > s_thresh[0]) & (S <= s_thresh[1])\n l = (L > s_thresh[0]) & (L <= s_thresh[1])\n\n # combine all the thresholds\n # The pixel we want is either white or yellow\n color_combined = np.zeros_like(R)\n color_combined[(r_g & l) & (s | combined)] = 1\n \n # apply the region of interest mask\n # This helps to remove the shadow outside the lane\n mask = np.zeros_like(color_combined)\n h, w = img.shape[0], img.shape[1]\n polygon_vertice = np.array([[0,h-1], [w//2, h//2], [w-1, h-1]], dtype=np.int32)\n cv2.fillPoly(mask, [polygon_vertice], 1)\n binary = cv2.bitwise_and(color_combined, mask)\n \n return binary", "def mask_extract(self):\r\n\r\n # define the background color for mask extraction\r\n lower_green = (30,80,80)\r\n upper_green = (150,255,255)\r\n\r\n # find green background, green is 255\r\n mask = cv.inRange(self.green_image_hsv, lower_green, upper_green)\r\n # convert mask, green is 0\r\n mask = 255 - mask\r\n\r\n # apply morphology opening to mask\r\n kernel = np.ones((1, 1), np.uint8)\r\n # erode edge of the mask, background of the mask here is 0 (black)\r\n mask = cv.morphologyEx(mask, cv.MORPH_ERODE, kernel)\r\n # remove the back points in the foreground of mask\r\n mask = cv.morphologyEx(mask, cv.MORPH_CLOSE, kernel)\r\n # antialias mask\r\n self.mask = cv.GaussianBlur(mask, (1, 1), sigmaX=1, sigmaY=1, borderType=cv.BORDER_DEFAULT) # mask, green is 0\r", "def contour(image):\n return _apply_filter(image, \"contour\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Converts either bytes or unicode to `bytes`, using utf8 encoding for text.
def as_bytes(bytes_or_text, encoding='utf-8'): if isinstance(bytes_or_text, _six.text_type): return bytes_or_text.encode(encoding) elif isinstance(bytes_or_text, bytes): return bytes_or_text else: raise TypeError('Expected binary or unicode string, got %r' % (bytes_or_text,))
[ "def utf8_bytes(text):\n if not isinstance(text, bytes):\n return text.encode('utf-8')\n return text", "def _to_bytes(value: Union[str, bytes]) -> bytes:\n return value if isinstance(value, bytes) else value.encode(\"utf-8\")", "def ensure_utf8_bytes(v: Union[str, bytes]) -> bytes:\n if isinstance(v, str):\n v = v.encode(\"utf-8\")\n return v", "def bytes_(s, encoding=\"latin-1\", errors=\"strict\"):\n if isinstance(s, text_type):\n return s.encode(encoding, errors)\n return s", "def str_to_bytes(data):\n u_type = type(b''.decode('utf8'))\n if isinstance(data, u_type):\n return data.encode('utf8')\n return data", "def utf8_bytes(string):\n return bytes(string, 'UTF-8')", "def _as_bytes(value):\n\tif isinstance(value, str):\n\t\treturn value.encode(\"utf-8\", \"replace\")\n\telif isinstance(value, bytes):\n\t\treturn value\n\telse:\n\t\traise TypeError(\"expected str\")", "def str_to_bytes(self, data):\n if isinstance(data, bytes):\n return data\n return data.encode(\"utf-8\")", "def to_utf8(text, charset='iso-8859-15'):\n try:\n # Do nothing if it's already utf-8\n u = unicode(text, 'utf-8')\n return text\n except UnicodeError:\n try:\n # Use the user supplied charset if possible\n u = unicode(text, charset)\n except UnicodeError:\n # This should always work\n u = unicode(text, 'iso-8859-15')\n return u.encode('utf-8')\n except TypeError:\n return text", "def convert_string_to_bytes(value, encoding='ascii'):\n result = (value.encode(encoding)\n if isinstance(value, six.text_type) else value)\n if isinstance(result, six.binary_type):\n return result\n else:\n raise TypeError('%r could not be converted to bytes' % (value,))", "def as_utf8(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.UnicodeType):\n return value.encode('utf-8')\n else:\n return value", "def to_bytestring(s):\n if not isinstance(s, six.string_types):\n return s\n if isinstance(s, six.text_type):\n return s.encode('utf-8')\n else:\n return s", "def utf8(s: bytes):\n\n return str(s, 'utf-8')", "def _utf8str(x):\r\n if six.PY3:\r\n return str(x)\r\n if isinstance(x, six.binary_type):\r\n return x\r\n elif isinstance(x, six.text_type):\r\n return x.encode('utf-8')\r\n else:\r\n return six.binary_type(x)", "def to_bytes(bytes_or_str):\n if isinstance(bytes_or_str, str):\n value = bytes_or_str.encode() # uses 'utf-8' for encoding\n else:\n value = bytes_or_str\n return value # Instance of bytes", "def to_bytes(string):\n assert isinstance(string, basestring)\n if sys.version_info[0] >= 3:\n if isinstance(string, str):\n return string.encode('utf-8')\n else:\n return string\n else:\n if isinstance(string, unicode):\n return string.encode('utf-8')\n else:\n return string", "def bytes_to_unicode(value, encoding=\"utf-8\"):\n if value is None or is_unicode(value):\n return value\n assert is_bytes(value)\n return value.decode(encoding)", "def encode_bytes(s):\n if isinstance(s, bytes):\n return s\n elif isinstance(s, unicode):\n return s.encode(\"raw_unicode_escape\")\n else:\n return s", "def toBytes(data):\n\tif isBytes(data):\n\t\treturn data\n\telse:\n\t\treturn data.encode(\"latin-1\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the file system path representation of a `PathLike` object.
def path_to_str(path): if hasattr(path, '__fspath__'): path = as_str_any(path.__fspath__()) return path
[ "def stringify_pathlike(pathlike):\n maybe_pathlike_str = (\n pathlike.__fspath__() if hasattr(pathlike, \"__fspath__\") else pathlike\n )\n\n return maybe_pathlike_str", "def as_path(path: PathLike) -> Path:\n msg = py_utils.dedent(\"\"\"\n `tfds.core.as_path` is deprecated. Pathlib API has been moved to a\n separate module. To migrate, use:\n\n ```\n from etils import epath\n path = epath.Path('gs://path/to/f.txt')\n ```\n\n Alternatively `tfds.core.Path` is an alias of `epath.Path`.\n\n Installation: `pip install etils[epath]`\n\n \"\"\"\n\n )\n logging.warning(msg)\n return epath.Path(path)", "def path_serializer(obj: PurePath, **_: Any) -> str:\n return obj.as_posix()", "def as_pathlib(self):\n return Path(self.absolute)", "def as_path(path: PathLike) -> tfds.core.ReadWritePath:\n return tfds.core.as_path(path)", "def path_str(cls, arg):\n if cls.supports_unicode_filenames:\n # native representation is unicode character\n if is_unicode(arg):\n return arg\n elif isinstance(arg, bytes):\n return arg.decode(cls.codec)\n elif isinstance(arg, cls):\n return arg.path\n else:\n raise TypeError\n else:\n # native representation is binary string\n if is_unicode(arg):\n return arg.encode(cls.codec)\n elif isinstance(arg, bytes):\n return arg\n elif isinstance(arg, cls):\n return arg.path\n else:\n raise TypeError", "def posix_path(self, **kw):\n with_drive_letter = kw.get(\"with_drive\", True)\n return self._construct_path(\"/\", with_drive_letter)", "def posix_path(path):\n\n return os.path.normpath(path).replace('\\\\', '/')", "def os_path(self, **kw):\n with_drive = kw.get(\"with_drive\", True)\n if os.name == \"nt\":\n return self.windows_path(with_drive=with_drive)\n return self.posix_path(with_drive=with_drive)", "def get_path(self, scope: Scope) -> str:\n return os.path.normpath(os.path.join(*scope[\"path\"].split(\"/\")))", "def ToPosixPath(path):\n return path.replace(os.path.sep, posixpath.sep)", "def cast_to_pathlib_path(value):\n if value is None:\n return value\n if isinstance(value, pathlib.Path):\n return value\n try:\n return pathlib.Path(value.strpath)\n except AttributeError:\n return pathlib.Path(str(value))", "def convert_to_path(arg: Any) -> Path:\n return Path(arg)", "def path(self):\n\n if os.path.isabs(self._value):\n return pathlib.Path(self._value)\n raise RuntimeError('RequestString.path not supported.')", "def _path_to_string(path):\n\n return \"/\".join(str(item) for item in path)", "def pathstr(self):\n s = ('furl.pathstr is deprecated. Use str(furl.path) instead. There '\n 'should be one, and preferably only one, obvious way to serialize '\n 'a Path object to a string.')\n warnings.warn(s, DeprecationWarning)\n return str(self._path)", "def _path_to_str(var):\n if not isinstance(var, (Path, str)):\n raise ValueError(\"All path parameters must be either strings or \"\n \"pathlib.Path objects. Found type %s.\" % type(var))\n else:\n return str(var)", "def _get_as_path(self):\n return self.__as_path", "def path(sc, file_path):\n path_class = sc._gateway.jvm.org.apache.hadoop.fs.Path\n path_obj = path_class(file_path)\n return path_obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a node to the front of the list with value 'val'
def push_front(self, val): new_node = Node(val, self.head) if self.is_empty(): self.tail = new_node self.head = new_node self.size += 1
[ "def insert_before(self, val, new_value):\n\n current = self.head\n\n if current.val is val:\n self.insert(new_value)\n else:\n while current.next.val is not val:\n current = current.next\n\n new_node = Node(new_value, current.next)\n # new_node._next = current._next\n current.next = new_node\n self._length += 1", "def addAtHead(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n node.next = self.head\n self.head = node", "def insert_before(self, val, new_val):\n new_node = Node(new_val)\n current = self.head._next\n while current._next is not None:\n if current._next.val == val:\n new_node._next = current._next\n current._next = new_node\n self._size += 1\n break\n\n current = current._next\n\n if current._next is None:\n raise ValueError(\"Data not in list\")", "def add_to_front(self, val):\n pass", "def add(self, val):\n self.head = self.rec_add(self.head, val)", "def add_in_head(self, value):\n\n if self.head == None:\n self.head = Node(value);\n else:\n new = Node(value)\n new.set_next(self.head)\n self.head = new", "def push_front(self, value):\n new_node = self.Node(value)\n\n # Edge Case : List is empty\n if self._size == 0:\n self._tail = new_node\n self._head = new_node\n self._size += 1\n return\n\n new_node.next = self._head\n self._head.prev = new_node\n self._head = new_node\n self._size += 1", "def prepend(self, value):\n # Create new node with next_ referring to front\n new_node = LinkedListNode(value, self.front)\n # change front\n self.front = new_node\n # if the list was empty, change back\n if self.size == 0:\n self.back = new_node\n # update size\n self.size += 1", "def insertBefore(self,value,newVal):\n current = self.head\n \n while current.next is not None:\n if current.next.value == value:\n break\n current = current.next\n if current.next is None:\n raise Exception(\"the value not exisit \")\n else:\n new_node = Node(newVal)\n new_node.next = current.next\n current.next = new_node", "def push_back(self, val):\n new_node = Node(val)\n # Update current head and tail, if necessary\n if self.is_empty():\n self.head = new_node\n else:\n self.tail.next_node = new_node\n # new_node is now the tail\n self.tail = new_node\n self.size += 1", "def prepend(self, value: object) -> None:\n nn = LinkedListNode(value, None)\n if self.front == None:\n self.front = nn\n self.back = nn\n self.size = 1\n else:\n nn.next_ = self.front\n self.front = nn\n self.size += 1", "def prepend(self, value: Any) -> None:\n self.front = LinkedListNode(value, self.front)\n if self.back is None:\n self.back = self.front\n self.size += 1", "def prepend(self, value):\r\n if self.head is None:\r\n self.head = Node(value)\r\n return\r\n new_node = Node(value)\r\n new_node.next = self.head\r\n self.head = new_node", "def insert_after(self, val, new_val):\n new_node = Node(new_val)\n current = self.head._next\n while current._next is not None:\n if current.val == val:\n new_node._next = current._next._next\n current._next = new_node\n self._size += 1\n break\n\n current = current._next\n\n if current._next is None:\n raise ValueError(\"Data not in list\")", "def addAtTail(self, val):\n if self.head is None:\n self.addAtHead(val)\n else:\n new_node = Node(val)\n curr = self.head\n while (curr.next is not None):\n curr = curr.next\n\n curr.next = new_node\n new_node.prev = curr\n self.length += 1", "def push_back(self, val: Generic[T]) -> None:\n return insert(self,self.node.next,val)", "def push_front(self, val):\r\n self.deque.insert(0, val)", "def push_front(self, val : Any):\n\n if (self.size == self.capacity):\n raise IndexError(\"Capacity of LinkedList_Array has been exceeded\")\n\n for i in range (self.size,0,-1):\n self.array[i] = self.array[i-1]\n \n self.array[0] = val\n self.size += 1", "def enqueue(self, val):\n self.list.insert_last(val)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds a node to the back of the list with value 'val'
def push_back(self, val): new_node = Node(val) # Update current head and tail, if necessary if self.is_empty(): self.head = new_node else: self.tail.next_node = new_node # new_node is now the tail self.tail = new_node self.size += 1
[ "def push_back(self, val: Generic[T]) -> None:\n return insert(self,self.node.next,val)", "def addAtTail(self, val):\n if self.head is None:\n self.addAtHead(val)\n else:\n new_node = Node(val)\n curr = self.head\n while (curr.next is not None):\n curr = curr.next\n\n curr.next = new_node\n new_node.prev = curr\n self.length += 1", "def insert_after(self, val, new_val):\n new_node = Node(new_val)\n current = self.head._next\n while current._next is not None:\n if current.val == val:\n new_node._next = current._next._next\n current._next = new_node\n self._size += 1\n break\n\n current = current._next\n\n if current._next is None:\n raise ValueError(\"Data not in list\")", "def add(self, val):\n self.head = self.rec_add(self.head, val)", "def enqueue(self, val):\n self.list.insert_last(val)", "def push_front(self, val):\n new_node = Node(val, self.head)\n if self.is_empty():\n self.tail = new_node\n self.head = new_node\n self.size += 1", "def append(self, value: object) -> None:\n # create the new node\n new_node = LinkedListNode(value)\n # if the list is empty, the new node is front and back\n if self.size == 0:\n assert self.back is None and self.front is None, \"ooops\"\n self.front = self.back = new_node\n # if the list isn't empty, front stays the same\n else:\n # change *old* self.back.next_ first!!!!\n self.back.next_ = new_node\n self.back = new_node\n # remember to increase the size\n self.size += 1", "def addBack(self, value):\n if not self.head:\n self.head = value\n else:\n last = self.head\n while last.next:\n last = last.next\n last.next = value\n return f'{value} added to array '", "def add_to_front(self, val):\n pass", "def insert_before(self, val, new_value):\n\n current = self.head\n\n if current.val is val:\n self.insert(new_value)\n else:\n while current.next.val is not val:\n current = current.next\n\n new_node = Node(new_value, current.next)\n # new_node._next = current._next\n current.next = new_node\n self._length += 1", "def bstAdd(root, val):\n which = 'right' if val > root.val else 'left'\n if getattr(root, which) is None:\n setattr(root, which, Node(val))\n else:\n bstAdd(getattr(root, which), val)", "def insert_before(self, val, new_val):\n new_node = Node(new_val)\n current = self.head._next\n while current._next is not None:\n if current._next.val == val:\n new_node._next = current._next\n current._next = new_node\n self._size += 1\n break\n\n current = current._next\n\n if current._next is None:\n raise ValueError(\"Data not in list\")", "def append(self, value):\n node = Node(value) # node.next is None\n if not self.head:\n self.head = node\n else:\n current = self.head\n while current.next != None:\n current = current.next\n current.next = node # previous-node.next is now our new node", "def addAtIndex(self, index: int, val: int) -> None:\n if index > self.length:\n return None\n\n if index == self.length:\n return self.addAtTail(val)\n\n new_node = ListedNode(val)\n prev_node = self._get(index-1)\n new_node.prev = prev_node\n new_node.next = prev_node.next\n prev_node.next.prev = new_node\n prev_node.next = new_node\n self.length += 1\n # self.debug()", "def insertAfter(self,value,newVal):\n current = self.head\n\n while current is not None:\n if current.value == value:\n break\n current = current.next\n if current is None:\n raise Exception(\" the value not exisit \")\n else:\n new_node = Node(newVal)\n new_node.next = current.next\n current.next = new_node", "def push(self, value):\n self.top = Node(value, self.top)\n self.size += 1", "def append_left(self, value: Any) -> None:\n node = Node(data=value)\n node.next = self.head\n self.head = node\n self._length += 1", "def addAtHead(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n node.next = self.head\n self.head = node", "def addAtIndex(self, index, val):\n if index > 0 and not self.head:\n return\n \n tmp = Node(val)\n if index == 0 and not self.head:\n self.head = tmp\n self.tail = self.head\n return\n if index == 0 and self.head:\n tmp.nxt = self.head\n self.head = tmp \n return\n \n \n cur = self.head\n i = 1\n while i < index and cur:\n cur = cur.nxt\n i+=1\n if i == index:\n if not cur:\n if self.tail:\n self.tail.nxt = tmp\n self.tail = tmp\n else:\n self.head = tmp\n self.tail = tmp\n# print(\"KMG 1\")\n else:\n# print(\"inserting after the value %d\" %cur.val)\n tmp.nxt = cur.nxt\n cur.nxt = tmp\n if self.tail == cur:\n self.tail = tmp" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get a post given its title
def get(self, title): post = get_a_post(title) if not post: api.abort(404) else: return post
[ "def getPost(title):\n try:\n path = os.path.join(\"data\",title)\n if not os.path.isfile(path):\n raise PostDoesNotExist(title)\n \n except PostDoesNotExist:\n print \"PostDoesNotExist\"\n pass\n \n else:\n datafile = open(path, mode=\"rb\")\n post = pickle.load(datafile)\n datafile.close()\n return post", "def get(self, slug):\n return self._posts.get(slug)", "def read(self, request, title=None):\n base = Blogpost.objects\n \n if title:\n return base.get(title=title)\n else:\n return base.all()", "def get_by_natural_key(self, title):\n try:\n return self.get(title=title)\n except ObjectDoesNotExist:\n logging.getLogger(self.__module__).error('%s \"%s\" does not exist',\n self.model.__name__, title)", "def get_article(title):\n article = None\n # search for the corresponding title from the memcache\n articles = memcache.get('top_ten')\n if articles and len(articles) > 0:\n for item in articles:\n # workaround to remove all non-alphanumeric characters before comparison\n item_title = re.sub(r'\\W', \"\", item.title)\n art_title = re.sub(r'\\W', \"\", title)\n if item_title == art_title:\n article = item\n break\n # in case the article we're looking for is not in memcache:\n if not article:\n # query the DB\n query = db.Query(Article)\n query.filter('title =', title)\n article = query.get()\n return article", "def find_movie_by_title(title):\n return Movie.objects.filter(title=title).first()", "def get_snippet(self, title=None):\n for snippet in self.snippets:\n if snippet[\"title\"] == title:\n return snippet\n return None", "def get_post_title(url):\n html = get_html(url)\n links = []\n soup = BeautifulSoup(html, 'html.parser')\n return soup.head.title.string.split('\\n')[0]", "def get_post(name):\n return post_dict.get(name)", "def extract_title(post):\n\tart = post.find(\"div\", {\"id\": \"article-container\"})\n\ttit = art.find(\"hgroup\", {\"class\": \"title-container\"})\n\n\ttitle = tit.find(\"h2\", class_=\"title-type-sup\").text\n\treturn title", "def get_post_by_id(self, id):\n return requests.get(self.url + 'posts/{}'.format(id))", "def gettitle(post):\n\ttag='<title>'\n\tx=post.find(tag)+len(tag)\n\ttag='</title>'\n\ty=post.find(tag, x)\n\tprint 'gettitle=%s' % notags(post[x:y])\n\treturn notags(post[x:y])", "def get_post(self, post=None, search_type=\"id\", params = dict()):\n\n response = _get(\n post,\n search_type,\n params,\n self.base_url,\n self.session,\n resource_type = \"posts\"\n )\n\n return response", "def get_movie_by_title(title):\n \n #get movie from db and build object\n movies_df = database_helper.select_query(\"movies\", { \"title\" : title })\n if (not movies_df.empty):\n return Movie(movies_df.iloc[0])\n \n #if no matching movie return none\n return None", "def get_book_by_title(title):\n\n return Book.query.filter(Book.title == title).first()", "def fromtitle(cls, title):\n return Collection.get_by_key_name(cls.getkeyname(title))", "def get_object(self, id):\n try:\n return Post.objects.get(id=id)\n except Post.DoesNotExist:\n raise Http404", "def get_post(id, check_author=True):\r\n cur = get_db().cursor()\r\n cur.execute(\r\n 'SELECT p.id, title, body, created, author_id, username'\r\n ' FROM novel.post p JOIN novel.user u ON p.author_id = u.id'\r\n ' WHERE p.id = %s',id )\r\n\r\n post = cur.fetchone()\r\n if post is None:\r\n abort(404, \"Post id {0} doesn't exist.\".format(id))\r\n\r\n if check_author and post['author_id'] != g.user['id']:\r\n abort(403)\r\n\r\n return post", "def title_by_id(id_: int) -> Any:\n post = Posts.query.filter_by(id=id_).first()\n if post is None:\n return \"404\"\n return post.title" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reinvite an already invited user.
def reinvite_user(self, user, email): if self.is_moderator and self.has_perm('accounts.invite_user'): # Reset email, set a new token and update decision datetime user.email = email user.auth_token = generate_unique_id() user.decision_datetime = timezone.now() user.save() return user else: raise PermissionDenied
[ "def invited_user(self, invited_user):\n self._invited_user = invited_user", "def resend_invite(id):\n invite = s.query(Invites). \\\n filter(Invites.id==id). \\\n first()\n\n send_mail_unknown(invite.email, \"Register for CompetenceDB\",\n 'You are invited to register for CompetenceDB. <br><br> Go to this address: <a href=\"' + request.url_root + 'register?invite_id=' + invite.invite_id + '\">' + request.url_root + 'register?invite_id=' + invite.invite_id + '</a>')\n\n flash(\"Email sent to \"+invite.first_name +\" \"+ invite.last_name + \"(\"+invite.email+\")\",\"success\")\n\n return redirect(url_for('admin.invites'))", "def redeem_invite(self, invite, email):\n # Lets make sure we have a clean slate\n self.client.logout()\n assert not User.objects.filter(email=email), (\n \"User shouldn't be in database.\")\n\n # We need to store the invite code in the session\n self.client.get(invite.get_url(), follow=True)\n\n # BrowserID needs an assertion not to be whiney\n d = dict(assertion=self.fake_assertion)\n with mock_browserid(email):\n self.client.post(reverse('browserid_verify'), d, follow=True)\n\n # Now let's register\n d = dict(full_name='Desaaaaaaai',\n username='aakash',\n optin=True)\n with mock_browserid(email):\n self.client.post(reverse('register'), d, follow=True)\n\n # Return the New Users Profile\n invited_user_profile = User.objects.get(email=email).get_profile()\n return invited_user_profile", "def invite_user(context):", "def resend_invite(self, **kwargs):\n\n query_params = {\"_actions\": \"false\", \"_links\": \"true\", \"_embedded\": \"true\"}\n path_params = {}\n headers = {}\n body = None\n\n if \"instanceId\" in kwargs:\n path_params[\"instanceId\"] = kwargs[\"instanceId\"]\n if \"orgId\" in kwargs:\n path_params[\"orgId\"] = kwargs[\"orgId\"]\n if \"inviteId\" in kwargs:\n path_params[\"inviteId\"] = kwargs[\"inviteId\"]\n if \"roleInfo\" in kwargs:\n body = kwargs[\"roleInfo\"]\n if \"losantdomain\" in kwargs:\n headers[\"losantdomain\"] = kwargs[\"losantdomain\"]\n if \"_actions\" in kwargs:\n query_params[\"_actions\"] = kwargs[\"_actions\"]\n if \"_links\" in kwargs:\n query_params[\"_links\"] = kwargs[\"_links\"]\n if \"_embedded\" in kwargs:\n query_params[\"_embedded\"] = kwargs[\"_embedded\"]\n\n path = \"/instances/{instanceId}/orgs/{orgId}/invites/{inviteId}\".format(**path_params)\n\n return self.client.request(\"POST\", path, params=query_params, headers=headers, body=body)", "def resend_invitation(self, request, *args, **kwargs):\n self.get_object()\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return Response({\"message\": _(\"Success\")})", "def invite(self,roomName,user):\n\n self.sendCommand(roomName +\" /invite\",user)", "def invite_user(email: str):\n if not app.config['REQUIRE_INVITE_CODE']:\n raise APIException(\n 'An invite code is not required to register, so invites have been disabled.'\n )\n if not flask.g.user.invites:\n raise APIException('You do not have an invite to send.')\n\n invite = Invite.new(\n inviter_id=flask.g.user.id, email=email, ip=flask.request.remote_addr\n )\n flask.g.user.invites -= 1\n db.session.commit()\n return flask.jsonify(invite)", "def send_invitation(self):\n invitation = InvitationKey.objects.create_invitation()\n invitation.send_to(self.email)\n self.invited = True\n self.save()", "def test_no_reinvite(self):\n vouched_email = 'mr.fusion@gmail.com'\n create_vouched_user(vouched_email)\n url = reverse('invite')\n d = dict(recipient=vouched_email)\n r = self.mozillian_client.post(url, d, follow=True)\n eq_(r.status_code, 200)\n assert ('You cannot invite someone who has already been vouched.' in\n pq(r.content)('ul.errorlist li').text())", "def resend_invitation(self, pending_member, application_url):\n email = pending_member.username\n invitation = forms.EmailInvitation(self.app_title, self.app_banner, self.theme, email, security.get_user().data, self.data, application_url)\n invitation.send_email(self.mail_sender)\n # re-calculate pending\n self.pending = [component.Component(BoardMember(PendingUser(token.token), self, \"pending\"))\n for token in set(self.data.pending)]", "def ResendFriendInvite(self, friend):\n return self.request.post(f'https://{self.BASE_URL}/hmsweb/users/friends', friend)", "def test_user_invite_cant_edit_users_existing_user(self):\n project = fake_clients.FakeProject(name=\"test_project\")\n\n user = fake_clients.FakeUser(name=\"test@example.com\")\n\n setup_identity_cache(projects=[project], users=[user])\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": project.id,\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"user\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"username\": \"new_user\",\n \"email\": \"test@example.com\",\n \"roles\": [\"member\"],\n \"project_id\": project.id,\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.json(), {\"notes\": [\"task created\"]})", "def invite(self, invite):\n\n self._invite = invite", "def revoke_invitation(self, request, *args, **kwargs):\n self.get_object()\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return Response({\"message\": _(\"Success\")})", "def test_send_invite_flow(self):\n invite = self.invite_someone(self.fake_email, self.fake_invite_message)\n self.invite_without_message(self.fake_email)\n self.get_register(invite)\n invited_user_profile = self.redeem_invite(invite, self.fake_email)\n assert(invited_user_profile.is_vouched)\n assert(invite.inviter == invited_user_profile.vouched_by)\n\n # Don't reuse codes.\n self.redeem_invite(invite, email='mr2@gmail.com')\n eq_(User.objects.get(email='mr2@gmail.com').get_profile().is_vouched,\n False)", "def revoke_invite(code: str) -> flask.Response:\n invite = Invite.from_pk(\n code, _404=True, asrt=InvitePermissions.REVOKE_OTHERS\n )\n invite.expired = True\n invite.inviter.invites += 1\n db.session.commit()\n return flask.jsonify(invite)", "def test_revoke_invites(self) -> None:\n iago = self.example_user(\"iago\")\n desdemona = self.example_user(\"desdemona\")\n\n invite_expires_in_minutes = 2 * 24 * 60\n do_invite_users(\n iago,\n [\"new1@zulip.com\", \"new2@zulip.com\"],\n [],\n invite_expires_in_minutes=invite_expires_in_minutes,\n invite_as=PreregistrationUser.INVITE_AS[\"REALM_ADMIN\"],\n )\n do_invite_users(\n desdemona,\n [\"new3@zulip.com\", \"new4@zulip.com\"],\n [],\n invite_expires_in_minutes=invite_expires_in_minutes,\n invite_as=PreregistrationUser.INVITE_AS[\"REALM_ADMIN\"],\n )\n\n do_invite_users(\n iago,\n [\"new5@zulip.com\"],\n [],\n invite_expires_in_minutes=None,\n invite_as=PreregistrationUser.INVITE_AS[\"REALM_ADMIN\"],\n )\n do_invite_users(\n desdemona,\n [\"new6@zulip.com\"],\n [],\n invite_expires_in_minutes=None,\n invite_as=PreregistrationUser.INVITE_AS[\"REALM_ADMIN\"],\n )\n\n iago_multiuse_key = do_create_multiuse_invite_link(\n iago, PreregistrationUser.INVITE_AS[\"MEMBER\"], invite_expires_in_minutes\n ).split(\"/\")[-2]\n desdemona_multiuse_key = do_create_multiuse_invite_link(\n desdemona, PreregistrationUser.INVITE_AS[\"MEMBER\"], invite_expires_in_minutes\n ).split(\"/\")[-2]\n\n iago_never_expire_multiuse_key = do_create_multiuse_invite_link(\n iago, PreregistrationUser.INVITE_AS[\"MEMBER\"], None\n ).split(\"/\")[-2]\n desdemona_never_expire_multiuse_key = do_create_multiuse_invite_link(\n desdemona, PreregistrationUser.INVITE_AS[\"MEMBER\"], None\n ).split(\"/\")[-2]\n\n self.assertEqual(\n filter_to_valid_prereg_users(\n PreregistrationUser.objects.filter(referred_by=iago)\n ).count(),\n 3,\n )\n self.assertEqual(\n filter_to_valid_prereg_users(\n PreregistrationUser.objects.filter(referred_by=desdemona)\n ).count(),\n 3,\n )\n self.assertTrue(\n assert_is_not_none(\n Confirmation.objects.get(confirmation_key=iago_multiuse_key).expiry_date\n )\n > timezone_now()\n )\n self.assertTrue(\n assert_is_not_none(\n Confirmation.objects.get(confirmation_key=desdemona_multiuse_key).expiry_date\n )\n > timezone_now()\n )\n self.assertIsNone(\n Confirmation.objects.get(confirmation_key=iago_never_expire_multiuse_key).expiry_date\n )\n self.assertIsNone(\n Confirmation.objects.get(\n confirmation_key=desdemona_never_expire_multiuse_key\n ).expiry_date\n )\n\n do_deactivate_user(iago, acting_user=None)\n\n # Now we verify that invitations generated by iago were revoked, while desdemona's\n # remain valid.\n self.assertEqual(\n filter_to_valid_prereg_users(\n PreregistrationUser.objects.filter(referred_by=iago)\n ).count(),\n 0,\n )\n self.assertEqual(\n filter_to_valid_prereg_users(\n PreregistrationUser.objects.filter(referred_by=desdemona)\n ).count(),\n 3,\n )\n self.assertTrue(\n assert_is_not_none(\n Confirmation.objects.get(confirmation_key=iago_multiuse_key).expiry_date\n )\n <= timezone_now()\n )\n self.assertTrue(\n assert_is_not_none(\n Confirmation.objects.get(confirmation_key=desdemona_multiuse_key).expiry_date\n )\n > timezone_now()\n )\n self.assertTrue(\n assert_is_not_none(\n Confirmation.objects.get(\n confirmation_key=iago_never_expire_multiuse_key\n ).expiry_date\n )\n <= timezone_now()\n )\n self.assertIsNone(\n Confirmation.objects.get(\n confirmation_key=desdemona_never_expire_multiuse_key\n ).expiry_date\n )", "def inviteuser():\n if request.method == \"GET\":\n return redirect('/myparties')\n else:\n code = request.form.get('code')\n party_data = db.execute('SELECT * FROM parties WHERE code = ?', code)\n username = request.form.get('username')\n guest_data = db.execute(\n 'SELECT * FROM users WHERE username = ?', username)\n #User is not found\n if len(guest_data) < 1:\n return \"User not found\"\n # If the username corresponds to the organizer of the party\n if guest_data[0]['id'] == session['user_id']:\n return \"You cannot invite yourself\"\n\n guest_birth = datetime.strptime(\n guest_data[0]['birth'], \"%Y-%m-%d %H:%M:%S\")\n guest_age = relativedelta(datetime.now(), guest_birth).years\n\n # If the user tries to invite a -18 person to a adult party\n if party_data[0]['adult'] and guest_age < 18:\n return f\"{username} can't be invited because your party is for adult only.\"\n db.execute(\n 'INSERT INTO guests (party_code, guest_id, confirmed) VALUES (?, ?, ?)',\n code,\n guest_data[0]['id'],\n False)\n flash(f\"{username} invited!\")\n return redirect('/myparties')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Approve a user's application
def approve_user_application(self, user): if self.is_moderator and \ self.has_perm('accounts.approve_user_application'): user.moderator = self user.moderator_decision = user.APPROVED user.decision_datetime = timezone.now() user.auth_token = generate_unique_id() user.save() return user else: raise PermissionDenied
[ "def approve_application(request, application_pk):\n application = get_object_or_404(\n teambuilder_models.Application, pk=application_pk)\n developer = application.applicant\n application.approve()\n email.send_email(\n 'Your application was approved!',\n '''Hello {}!\n Thank you for your application to {} as {}.\n We are happy to inform you that your application has been\n accepted. We will soon get in touch with you regarding\n the details of this job. With regards, {}'''\n .format(\n developer, application.position.project,\n application.position.skill,\n application.position.project.owner),\n [developer.email],\n )\n return HttpResponseRedirect(reverse_lazy(\n 'accounts:applications'))", "def approve(self):\n self._check_if_open()\n data = {\"approved\": True}\n return self.post(\"approve\", data)", "def can_approve(self, user, **data):\n raise Return(False)", "def approve(self, request):\n if request.user == self.user:\n messages.error(\n request,\n \"You cannot approve your own expenses, aka. the anti-stein-bagger defense\",\n )\n return\n\n # mark as approved and save\n self.approved = True\n self.save()\n\n # send email to economic for this expense\n send_accountingsystem_expense_email(expense=self)\n\n # send email to the user\n send_expense_approved_email(expense=self)\n\n # message to the browser\n messages.success(request, \"Expense %s approved\" % self.pk)", "def approve(self, user_id=None, service=\"\"):\r\n # If someone approves an outdated version of this, the first one wins\r\n if self.status == \"approved\":\r\n return\r\n\r\n self.error_msg = \"\" # reset, in case this attempt was denied before\r\n self.error_code = \"\" # reset, in case this attempt was denied before\r\n self.reviewing_user = user_id\r\n self.reviewing_service = service\r\n self.status = \"approved\"\r\n self.save()", "def approve(self, user):\n if not self.may_be_approved_by(user):\n raise NotPermittedToApprove\n if not self.in_approvable_status():\n raise NotInApprovableStatus\n if not self.is_approved_by(user):\n self.approvers.add(user)\n if self.status < Changeset.STATUS_APPROVED \\\n and self.number_of_approvals >= settings.MINIMUM_CHANGESET_APPROVALS:\n self.status = Changeset.STATUS_APPROVED\n self.save()", "def test_approve(self):\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.approve(TOOLNAME,TOOLLICENSEDATA)", "def _confirm_approve_tool(catalog):\n\n # confirm the version\n po = catalog.load_pageobject('ToolsStatusApproveConfirmVersionPage')\n version = po.version_form.version.value\n try:\n version = float(version) + 0.01\n except:\n version = int(time.time())\n po.version_form.submit_form({'version':str(version)})\n\n\n # confirm the license\n po = catalog.load_pageobject('ToolsStatusApproveConfirmLicensePage')\n po.license_form.submit_form([('sourceaccess','open source'),\n ('templates','custom'),\n ('licensetext','.'),\n ('authorize',True)])\n\n # confirm the license\n po = catalog.load_pageobject('ToolsStatusApproveConfirmToolInfoPage')\n po.approve_tool()", "def approve():\n print(request, file=sys.stderr)\n logged_in = session.get('logged_in', 0)\n if (logged_in >= 2):\n f = request.form\n if (f):\n approvals = []\n disprovals = []\n pendings = []\n for key in f.keys():\n ka = key.split('.')\n if (len(ka) >= 2):\n rowid = int(ka[1])\n l = f.getlist(key)\n if (len(l) > 0):\n what = int(l[0])\n print(l[0]+\", \"+key+\", \"+str(rowid), file=sys.stderr)\n if (what < 0):\n disprovals.append(rowid)\n elif (what > 0):\n approvals.append(rowid)\n else:\n pendings.append(rowid)\n approve_vacation_db(approvals, 1)\n approve_vacation_db(disprovals, -1)\n approve_vacation_db(pendings, 0)\n return render_approve_command(0)\n else:\n flash(\"you must login as an admin first\", \"error\")\n return render_template('main.html')", "def review_applications(request):\n moderator = request.user\n site = get_current_site(request)\n\n pending = User.objects.filter(registration_method='REQ',\n decision_datetime=None,\n is_active=False)\n\n form = ModerateApplicationForm()\n\n if request.method == 'POST':\n\n form = ModerateApplicationForm(request.POST)\n user = get_object_or_404(User, id=request.POST['user_id'])\n\n if form.is_valid():\n decision = form.cleaned_data['decision']\n comments = form.cleaned_data['comments']\n\n if decision == 'APP':\n confirmation_message = _(\"{}'s account application \"\n \"has been approved.\".format(\n user.get_full_name().title()))\n\n moderator.approve_user_application(user)\n\n # Set log and email settings\n msg_type = ModerationLogMsg.APPROVAL\n url = request.build_absolute_uri(\n reverse('accounts:activate-account',\n args=[user.auth_token]))\n subject = _('Welcome to {}'.format(site.name))\n template = 'moderation/emails/approve_user.html'\n\n elif decision == 'REJ':\n confirmation_message = _(\"{}'s account application \"\n \"has been rejected.\".format(\n user.get_full_name().title()))\n\n moderator.reject_user_application(user)\n\n # Set log and email settings\n msg_type = ModerationLogMsg.REJECTION\n url = ''\n subject = _(('Unfortunately, your application to {} '\n 'was not successful').format(site.name))\n template = 'moderation/emails/reject_user.html'\n\n # Log moderation event\n log_comment = '{}'.format(comments)\n log_moderator_event(msg_type=msg_type,\n user=user,\n moderator=moderator,\n comment=log_comment)\n\n # Send moderation email\n send_connect_email(subject=subject,\n template=template,\n recipient=user,\n sender=moderator,\n site=site,\n url=url)\n\n messages.success(request, confirmation_message)\n\n return redirect('moderation:review-applications')\n\n context = {\n 'pending': pending,\n 'form': form,\n }\n\n return render(request, 'moderation/review_applications.html', context)", "def userApproveApp(self):\r\n return self.reddit.auth.url(['*'], '...', 'permanent')", "def approveAction(self):\r\n\r\n approvedAddedItems = self.tabs.currentWidget().children()[2].selectedItems()\r\n for item in approvedAddedItems:\r\n self.data.session.user.approveAddedRoot(item.text())\r\n self.tabs.widget(self.tabs.count() - 1).children()[2].addItem(item)\r\n\r\n approvedDeletedItems = self.tabs.currentWidget().children()[3].selectedItems()\r\n for item in approvedDeletedItems:\r\n self.data.session.user.approveDeletedLemma(item.text())\r\n self.tabs.widget(self.tabs.count() - 1).children()[3].addItem(item)\r\n\r\n # self.tabs.widget(self.tabs.count() - 1).update()\r\n # self.tabs.widget(self.tabs.count() - 1).update()\r", "def approve(self, user, operator):\n if user:\n if user.uid not in self.signups:\n # New signup, create new record\n self.signups[user.uid] = {'approvals': set([operator.uid])}\n else:\n # Known signup, add approval\n self.signups[user.uid]['approvals'].add(operator.uid)\n approval_count = len(self.signups[user.uid]['approvals'])\n if (approval_count >= self.approval_threshold\n and 'convo' not in self.signups[user.uid]):\n convo = interro.Interro(\n msg_callback=lambda msg:\n self.core.msg(user.nickname, msg),\n complete_callback=lambda results:\n self.convo_complete(user.uid, results))\n self.fill_convo(convo)\n self.signups[user.uid]['convo'] = convo\n convo.start()\n return self.approval_threshold - approval_count\n else:\n return -1", "def approve(self, request):\n if request.user == self.user:\n messages.error(\n request,\n \"You cannot approve your own revenues, aka. the anti-stein-bagger defense\",\n )\n return\n\n # mark as approved and save\n self.approved = True\n self.save()\n\n # send email to economic for this revenue\n send_accountingsystem_revenue_email(revenue=self)\n\n # send email to the user\n send_revenue_approved_email(revenue=self)\n\n # message to the browser\n messages.success(request, \"Revenue %s approved\" % self.pk)", "def test_approve_user(test_client, init_database, login_superadmin):\n response = test_client.post('/approveUser', json=\"4\", follow_redirects=True)\n assert response.status_code == 200\n assert b'Sign Up Requests' in response.data", "def approve_membership(self, user):\n try:\n membership = self.get_membership(user)\n if not membership.approved:\n membership.approved = True\n membership.save()\n except Membership.DoesNotExist:\n pass", "def approve(mrequest_id):\n return _processbyadmin(mrequest_id, True)", "def approve(owner,spender,amount):\n if len(spender) != 20 or len(owner) != 20:\n raise Exception(\"address length error\")\n if CheckWitness(owner) == False:\n return False\n if amount > balanceOf(owner):\n return False\n\n key = concat(concat(APPROVE_PREFIX,owner),spender)\n Put(ctx, key, amount)\n\n # Notify([\"approval\", AddressToBase58(owner), AddressToBase58(spender), amount])\n # ApprovalEvent(AddressToBase58(owner), AddressToBase58(spender), amount)\n ApprovalEvent(owner, spender, amount)\n\n return True", "def approve(self):\n self.approved = True\n self.quest_node['approved'] = True\n graph.push(self.quest_node)\n self.payout()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reject a user's application
def reject_user_application(self, user): if self.is_moderator \ and self.has_perm('accounts.reject_user_application'): user.moderator = self user.moderator_decision = user.REJECTED user.decision_datetime = timezone.now() user.save() return user else: raise PermissionDenied
[ "def reject_application(request, application_pk):\n application = get_object_or_404(\n teambuilder_models.Application, pk=application_pk)\n developer = application.applicant\n application.reject()\n email.send_email(\n 'Your application was rejected!',\n '''Hello {}!\n Thank you for your application to {} as {}.\n Unfortunately we could not consider your application.\n The position has been filled. With regards, {}'''\n .format(developer,\n application.position.project,\n application.position.skill,\n application.position.project.owner),\n [developer.email],\n )\n return HttpResponseRedirect(reverse_lazy(\n 'accounts:applications'))", "def admin_reject(user):\n if user.comments in (None or \"\"):\n return\n\n subject = \"ECE/CIS Account - Account Application rejected for %s\" % user.username\n application = \"https://www.eecis.udel.edu/NewAccount/\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n sponsor = \"%s@eecis.udel.edu\" % user.sponsor\n \n message = \"Your ECE/CIS Account has been rejected by ECE/CIS faculty adminstrators.\\n\" % user.sponsor\n message += \"The reason given for rejection was:\\n\\n%s\\n\\n\" % user.comments\n message += \"You may re-apply with corrected information at %s\\n\" % application\n message += \"Please don't reply to this email. If have any questions, please \\n\"\n message += \"please post a ticket as an outsider at %s\" % helprequest\n message += \"-- ECE\\CIS Labstaff\"\n\n\n send('account@eecis.udel.edu', 'ECE/CIS Account System', \\\n [user.email, sponsor], subject, message, MAILHOST)", "def reject(self):\n self.rejected = True", "def serverReject(self):\n self.handshake_deferred.errback(ConnectionDeny(code=403, reason=\"Access denied\"))\n self.cleanup()\n logger.debug(\"WebSocket %s rejected by application\", self.reply_channel)\n self.factory.log_action(\"websocket\", \"rejected\", {\n \"path\": self.request.path,\n \"client\": \"%s:%s\" % tuple(self.client_addr) if self.client_addr else None,\n })", "def _reject(self, reason):\n log.error('Rejected: %s' % reason)\n\n self._remove_changes()\n self._remove_files()\n\n if self.user is not None:\n email = Email('importer_reject_maintainer')\n package = self.changes.get('Source', '')\n\n self.send_email(email, [self.user.email], package=package, message=reason)\n sys.exit(1)", "async def deny(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if not accepter:\n if not ctx.author.guild_permissions.administrator:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n else:\n if accepter not in ctx.author.roles:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id=await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\n \"Uh oh, the configuration is not correct. Ask the Admins to set it.\"\n )\n if applicant in target.roles:\n await ctx.send(\"Would you like to specify a reason? (yes/no)\")\n pred = MessagePredicate.yes_or_no(ctx)\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n await ctx.send(\"Please, specify your reason now.\")\n\n def check(m):\n return m.author == ctx.author\n\n try:\n reason = await self.bot.wait_for(\"message\", timeout=120, check=check)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\\n*Reason:* {reason.content}\"\n )\n else:\n await target.send(f\"Your application in {ctx.guild.name} has been denied.\")\n await target.remove_roles(applicant)\n await ctx.send(f\"Denied {target.mention}'s application.\")\n else:\n await ctx.send(f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\")", "async def deny(self, ctx: commands.Context, target: discord.Member):\n try:\n accepter = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).accepter_id())\n except TypeError:\n accepter = None\n if not accepter:\n if not ctx.author.guild_permissions.administrator:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n else:\n if accepter not in ctx.author.roles:\n return await ctx.send(\"Uh oh, you cannot use this command.\")\n try:\n applicant = get(ctx.guild.roles, id = await self.config.guild(ctx.guild).applicant_id())\n except TypeError:\n applicant = None\n if not applicant:\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n if not applicant:\n return await ctx.send(\"Uh oh, the configuration is not correct. Ask the Admins to set it.\")\n if applicant in target.roles:\n await ctx.send(\"Would you like to specify a reason? (yes/no)\")\n pred = MessagePredicate.yes_or_no(ctx)\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n await ctx.send(\"Please, specify your reason now.\")\n\n def check(m):\n return m.author == ctx.author\n\n try:\n reason = await self.bot.wait_for(\n \"message\", timeout=120, check=check\n )\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\\n*Reason:* {reason.content}\"\n )\n else:\n await target.send(\n f\"Your application in {ctx.guild.name} has been denied.\"\n )\n await target.remove_roles(applicant)\n await ctx.send(f\"Denied {target.mention}'s application.\")\n else:\n await ctx.send(\n f\"Uh oh. Looks like {target.mention} hasn't applied for anything.\"\n )", "def reject_appl(data, ind):\n global rejected\n global pending_sheet\n rejected.append_row(data)\n ind += 1\n pending_sheet.delete_rows(ind)\n print(colored('\\nApplication rejected.\\n', 'cyan', attrs=['bold']))", "def reject(self):\n self.skype.conn(\"PUT\", \"{0}/users/{1}/invites/8:{2}/decline\"\n .format(SkypeConnection.API_CONTACTS, self.skype.userId, self.userId),\n auth=SkypeConnection.Auth.SkypeToken)", "async def denycmd(self, message):\n user = await utils.get_target(message)\n if not user:\n await utils.answer(message, self.strings[\"who_to_deny\"])\n return\n self._db.set(__name__, \"allow\", list(set(self._db.get(__name__, \"allow\", [])).difference({user})))\n await utils.answer(message, self.strings[\"pm_denied\"].format(user))", "def reject_permission(current_user):\n try:\n dataset_name = request.args.get('dataset')\n user_email = request.args.get('userEmail')\n record = ask_permissions.query.filter_by(Email=user_email, name_of_dataset=dataset_name).first()\n user_email_check = record.dataset.owner.Email\n print(\"input email:\" + user_email)\n print(\"db email:\" + user_email_check)\n if current_user.Email != user_email_check:\n db.session.close()\n return jsonify({'message': 'dont try to fool me, you dont own this dataset!'}), 400\n db.session.delete(record)\n db.session.commit()\n db.session.close()\n try:\n notify_by_email.send_an_email(\n message=f\"Subject: Your permission request for Dataset \" + dataset_name + \" was rejected\",\n receiver_email=user_email)\n except:\n return jsonify({'message': 'cannot send email!.'}), 409\n return jsonify({'message': 'permission accepted!'})\n except:\n return jsonify({'message': 'there has been an error!'}), 500", "def reject(self, request):\n # mark as not approved and save\n self.approved = False\n self.save()\n\n # send email to the user\n send_expense_rejected_email(expense=self)\n\n # message to the browser\n messages.success(request, \"Expense %s rejected\" % self.pk)", "def reject_application(request):\n if request.method == 'POST':\n\n profile_task = get_object_or_404(ProfileTask, pk=request.data[\"profiletask_id\"])\n task = profile_task.task\n\n # Permission check: logged in user owns the task\n if task.owner.id != request.user.id:\n return Response({\"error\":\"Current User does not own this task\"}, status=status.HTTP_400_BAD_REQUEST)\n\n # Integrity check: profileTask must be either applied or\n # application_shortlisted, and task must be open\n if not ((profile_task.status == ProfileTask.APPLIED or profile_task.status == ProfileTask.APPLICATION_SHORTLISTED)\n and task.status == 'O'):\n return Response({\"error\":\"profileTask status must be (Applied or Application_Shortlisted), and task status must be Open\"}, status=status.HTTP_400_BAD_REQUEST)\n\n #set compulsory fields for the serializer\n request.data[\"status\"] = ProfileTask.REJECTED\n request.data[\"task\"] = profile_task.task.id\n request.data[\"profile\"] = profile_task.profile.id\n\n # Create and save serializer\n serializer = ProfileTaskPostSerializer(profile_task,data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_200_OK)\n return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST)", "async def reject_challenge(self, user_id, *, delay=0, lifespan=math.inf):\n await self.user_command(\n \"\", \"reject\", user_id, delay=delay, lifespan=lifespan\n )", "def review_applications(request):\n moderator = request.user\n site = get_current_site(request)\n\n pending = User.objects.filter(registration_method='REQ',\n decision_datetime=None,\n is_active=False)\n\n form = ModerateApplicationForm()\n\n if request.method == 'POST':\n\n form = ModerateApplicationForm(request.POST)\n user = get_object_or_404(User, id=request.POST['user_id'])\n\n if form.is_valid():\n decision = form.cleaned_data['decision']\n comments = form.cleaned_data['comments']\n\n if decision == 'APP':\n confirmation_message = _(\"{}'s account application \"\n \"has been approved.\".format(\n user.get_full_name().title()))\n\n moderator.approve_user_application(user)\n\n # Set log and email settings\n msg_type = ModerationLogMsg.APPROVAL\n url = request.build_absolute_uri(\n reverse('accounts:activate-account',\n args=[user.auth_token]))\n subject = _('Welcome to {}'.format(site.name))\n template = 'moderation/emails/approve_user.html'\n\n elif decision == 'REJ':\n confirmation_message = _(\"{}'s account application \"\n \"has been rejected.\".format(\n user.get_full_name().title()))\n\n moderator.reject_user_application(user)\n\n # Set log and email settings\n msg_type = ModerationLogMsg.REJECTION\n url = ''\n subject = _(('Unfortunately, your application to {} '\n 'was not successful').format(site.name))\n template = 'moderation/emails/reject_user.html'\n\n # Log moderation event\n log_comment = '{}'.format(comments)\n log_moderator_event(msg_type=msg_type,\n user=user,\n moderator=moderator,\n comment=log_comment)\n\n # Send moderation email\n send_connect_email(subject=subject,\n template=template,\n recipient=user,\n sender=moderator,\n site=site,\n url=url)\n\n messages.success(request, confirmation_message)\n\n return redirect('moderation:review-applications')\n\n context = {\n 'pending': pending,\n 'form': form,\n }\n\n return render(request, 'moderation/review_applications.html', context)", "def get_everyone_denied(self):", "def reject_fr():\n\n if request.args == []:\n redirect(URL(\"user\", \"friend_requests\"))\n\n fr_id = request.args[0]\n\n # Simply delete the friend request\n db(db.friend_requests.id == fr_id).delete()\n\n redirect(URL(\"user\", \"friend_requests\"))\n return dict()", "def reject(self):\n self.rejected = timezone.now()\n self.save()\n friendship_request_rejected.send(sender=self)", "def deauthorize_application(request):\n if request.facebook:\n user = User.objects.get(\n facebook_id = request.facebook.signed_request.user.id\n )\n\n user.authorized = False\n user.save()\n\n return HttpResponse()\n else:\n return HttpResponse(status=400)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a user's profiency in a particular skill as a percentage, based on the position of the proficiency in PROFICIENCY_CHOICES.
def get_proficiency_percentage(self): choice_values = [choice[0] for choice in self.PROFICIENCY_CHOICES] if '' in choice_values: choice_values.remove('') # Remove the empty proficiency choice choice_values.sort() # Ensure values are in the correct order value = choice_values.index(self.proficiency) + 1 factor = 100 / len(choice_values) percentage = round(value * factor) return percentage
[ "def get_opinion_percent(self):\n return (self.get_percent()+100)/2", "def female_pct(self) -> float:\n return sum([p.sex_female for p in self.pop]) / self.starting_population", "def setup_proficiencies(self):\n Proficiency.objects.create(name='starter', needed_percentage=0)\n Proficiency.objects.create(name='newb', needed_percentage=25)\n Proficiency.objects.create(name='med', needed_percentage=50)\n Proficiency.objects.create(name='advanced', needed_percentage=75)\n Proficiency.objects.create(name='master', needed_percentage=100)", "def peirce_skill_score(self):\n n = float(self.table.sum())\n nf = self.table.sum(axis=1)\n no = self.table.sum(axis=0)\n correct = float(self.table.trace())\n return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2)", "def get_next_proficiency(self):\n return # osid.learning.Proficiency", "def passive_perception(self):\n skill = self.get_skill_by_name('perception')\n return 10 + skill.get_bonus(self.get_prof_bonus())", "def percent_score(self):\n return self.score * 100", "def get_percentage_practices(measure_table):\n with open(OUTPUT_DIR / \"practice_count.json\") as f:\n num_practices = json.load(f)[\"num_practices\"]\n\n num_practices_in_study = get_number_practices(measure_table)\n\n return np.round((num_practices_in_study / num_practices) * 100, 2)", "def get_proficiency(conn):\n\n # Query database for proficency levels\n cursor = conn.execute(\"\"\"SELECT *\n FROM proficiency\n ORDER BY id ASC\"\"\")\n rows = cursor.fetchall()\n if rows is None:\n return []\n\n # Create occupations list and store queried values\n profiency = []\n for row in rows:\n skill_level = {}\n skill_level[\"id\"] = row[0]\n skill_level[\"level\"] = row[1]\n\n profiency.append(skill_level)\n\n return profiency", "def confidence_rating_in_percentage(self) -> pulumi.Output[float]:\n return pulumi.get(self, \"confidence_rating_in_percentage\")", "def percentageSurveyComplete(surveyID):\n complete, number = isComplete(surveyID)\n if complete:\n return 100\n else:\n total = len(getClassesTaking(surveyID))\n return ((total - len(number)) / total) * 100", "def perc_complete(self) -> str:\n return f\"{int(self.prop_complete * 100):3d}%\"", "def proportion(self, value,istart,istop,ostart,ostop) :\n return float(ostart) + (float(ostop) - float(ostart)) * ((float(value) - float(istart)) / (float(istop) - float(istart)))", "def percentageComplete(surveyID, classID):\n totalQuestions = len(getClassQuestions(surveyID, classID))\n completed = len(getQuestionsCompleted(surveyID, classID))\n return round((completed/totalQuestions) *100)", "def GetProportion(self):\r\n\r\n return self.proportion", "def get_prof_bonus(self):\n logger.debug('Setting proficiency bonus')\n levels = [5, 9, 12, 16]\n for lvl in levels:\n if self.level < lvl:\n return levels.index(lvl)+2\n return 6", "def get_percent(self):\n if not (self.votes and self.score):\n return 0\n return 100 * (self.get_rating() / self.field.range)", "def calculate_fitness_percentage(selection):\n return get_percentage(fitness_function(selection)/fitness_function(init_loot()))", "def calculate_progress(self):\n stakeholders_count = self.meetingstakeholder_set.count()\n meeting_items_count = self.meetingitem_set.count()\n factors_count = self.factors.count()\n\n max_evaluations = stakeholders_count * meeting_items_count * factors_count\n total_evaluations = self.get_evaluations().count()\n\n if max_evaluations != 0:\n percentage = round((total_evaluations / float(max_evaluations)) * 100.0, 2)\n else:\n percentage = 0.0\n\n self.progress = percentage\n self.save()\n return self.progress" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generate the preparation files for the projects in a run
def format_preparation_files(run_dir, sample_sheet, output_dir, pipeline, verbose): sample_sheet = KLSampleSheet(sample_sheet) df_sheet = sample_sheet_to_dataframe(sample_sheet) if pipeline == 'atropos-and-bowtie2': click.echo('Stats collection is not supported for pipeline ' 'atropos-and-bowtie2') else: stats = run_counts(run_dir, sample_sheet) stats['sample_name'] = \ df_sheet.set_index('lane', append=True)['sample_name'] # returns a map of (run, project_name, lane) -> preparation frame preps = preparations_for_run(run_dir, df_sheet, pipeline=pipeline) os.makedirs(output_dir, exist_ok=True) for (run, project, lane), df in preps.items(): fp = os.path.join(output_dir, f'{run}.{project}.{lane}.tsv') if pipeline == 'fastp-and-minimap2': # stats are indexed by sample name and lane, lane is the first # level index. When merging, make sure to select the lane subset # that we care about, otherwise we'll end up with repeated rows df = df.merge(stats.xs(lane, level=1), how='left', on='sample_name') # strip qiita_id from project names in sample_project column df['sample_project'] = df['sample_project'].map( lambda x: re.sub(r'_\d+$', r'', x)) # center_project_name is a legacy column that should mirror # the values for sample_project. df['center_project_name'] = df['sample_project'] df.to_csv(fp, sep='\t', index=False) if verbose: project_name = remove_qiita_id(project) # assume qiita_id is extractable and is an integer, given that # we have already passed error-checking. qiita_id = project.replace(project_name + '_', '') print("%s\t%s" % (qiita_id, abspath(fp)))
[ "def newproject(self):\n \n self.path = os.path.join(self.base, self.name)\n subpath = os.path.join(self.path, self.lowname)\n check_build_path(subpath)\n \n for filename, content in self.files.items():\n self.buildfile(filename, content, self.path)\n\n script = open(SCRIPT, 'r').read().format(self.lowname)\n self.buildfile('{0}.py'.format(self.lowname), script, subpath) \n self.buildfile('__init__.py', '', subpath)\n \n #optionals\n if self.git:\n self.buildfile('.gitignore', '*.pyc', self.path)\n if self.db:\n datapath = os.path.join(self.path, 'data')\n os.makedirs(datapath)\n copydb = os.path.join(datapath, '{0}.db'.format(self.lowname))\n copy = subprocess.call(['cp', DATA, \"%s\" % copydb])\n if self.test:\n testpath = os.path.join(self.path, 'tests')\n os.makedirs(testpath)\n self.buildfile('__init__.py', '', testpath)", "def task_generate_tasks():\n \n yield {\n 'basename': 'generate_tasks',\n 'name': None,\n # 'doc': 'docs for X',\n 'watch': ['trains/'],\n 'task_dep': ['create_folders'],\n }\n \n for root, dirs, files in os.walk('trains/',topdown=False):\n for f in files:\n #print(f)\n yield template_train_model(os.path.join(root,f))", "def project():", "def generate_project(self):\n self.save_config_file(self.filename)\n try:\n saplib.generate_project(self.filename)\n except IOError as err:\n print \"File Error: \" + str(err)", "def generate_files(self):\n\t\tapply_stemmer, xml_file, query_file, expected_file = self.read_config_file()\n\t\tself.generate_query_file(query_file, xml_file, apply_stemmer)\n\t\tself.generate_expected_file(expected_file, xml_file)\n\t\tlogging.info('FINALIZADO: MÓDULO PROCESSADOR DE CONSULTAS')", "def pre_gen(self, spec_config, output_dir):", "def create_files(project_name, root_dir):\r\n root_dir = projectfolders.create_path(root_dir, project_name) #Modify the root\r\n \r\n write_setup(project_name, root_dir)\r\n write_inits(project_name, root_dir)\r\n write_tests(project_name, root_dir)", "def generate_all_files():\n for (name, fn) in lang_module.targets.items():\n path = of_g.options.install_dir + '/' + name\n os.system(\"mkdir -p %s\" % os.path.dirname(path))\n with open(path, \"w\") as outfile:\n fn(outfile, os.path.basename(name))\n print(\"Wrote contents for \" + name)", "def set_up(self, project_name):\n \n BaseProject.set_up(self, project_name) \n \n generate_template(\"models\", project_name, self.project_dir)\n generate_template(\"crawlers\", project_name, self.project_dir)", "def cmd_generate_requirements(): \n \n for env in ('dev', 'test'):\n source = Path(ROOT, \"requirements\", f\"{env}.txt\")\n target = Path(ROOT, \"requirements\", f\"{env}.in\")\n os.system(f\"pip-compile --output-file={source} {target}\")", "def prepare_files(npod, typ):\n os.chdir('/home/li/Dropbox/KTH/numerical_analysis/small-scale-multiobj')\n \n file_path = 'pod'+str(npod)+'_milp'\n \n # # remove directory if it exists\n # if os.path.exists(file_path):\n # shutil.rmtree(file_path)\n \n # # create new directory\n # os.mkdir(file_path)\n \n # # copy all files from pod100_milp/\n # os.system('cp -rf pod100_milp/* '+file_path+'/')\n \n # generate traffic matrices\n os.chdir(file_path+'/'+typ+'/')\n \n # modify generate_traffic_matrices.cpp\n file_path = 'generate_traffic_matrices.cpp'\n line_number = 13\n newline = ' int num_pods = '+str(npod)+';\\n'\n change_line(file_path, line_number, newline)\n \n line_number = 0\n newline = '#include \"../../../heuristics/sa_sdm.h\"\\n'\n change_line(file_path, line_number, newline)\n \n # run generate_traffic_matrices.cpp\n os.system('g++ -std=c++11 -pthread generate_traffic_matrices.cpp -o main; ./main')\n\n # modify the python template\n file_path = 'template_runsimu_'+typ+'.py'\n line_number = 29\n if npod == 150:\n timelimit = 18000\n elif npod == 200:\n timelimit = 25200\n newline = 'time_limit_sa = '+str(timelimit)+'\\n' # change gurobi running time\n change_line(file_path, line_number, newline)\n \n line_number = 6\n newline = 'optimize '+typ+'\\n' # change comment\n change_line(file_path, line_number, newline)\n\n # change the bash template\n file_path = 'template_runsimu_'+typ+'.sh'\n old_pattern = 'pod100'\n new_pattern = 'pod'+str(npod)\n replace(file_path, old_pattern, new_pattern)\n \n file_path = 'copyfile.py'\n replace(file_path, old_pattern, new_pattern)\n\n # replicate the template\n os.system('python copyfile.py')", "def setupRunDir(self):\n\n pass", "def setup_for_compilation_testcase(self):\n os.chdir(self.tmp_work)\n\n for container in self.containers:\n self._setup_single_directory_for_compilation(container.directory)\n # Run any necessary pre_commands\n self._run_pre_commands(container.directory)", "def generate_build_files(ctx):\n\n project_dir = Path(__file__).parent\n\n directory_of_the_tests = project_dir / \"tests/plugins\"\n directory_to_build_tests = project_dir / \"build/build_directory_for_tests\"\n\n # Clean UP\n if directory_to_build_tests.exists():\n shutil.rmtree(directory_to_build_tests)\n os.makedirs(directory_to_build_tests)\n\n # Finding hook_specs.py, each hook_specs represent a different project with different hooks\n hook_spec_paths = [\n path for path in directory_of_the_tests.glob(\"**/hook_specs.py\") if \"tmp\" not in path.parts\n ]\n\n # CMakeList.txt that includes all sub_directory with tests to be compiled\n root_cmake_list = directory_to_build_tests / \"CMakeLists.txt\"\n cmake_file_of_test_build_dir = [\n f\"add_subdirectory({i.parent.name })\\n\" for i in hook_spec_paths\n ]\n root_cmake_list.write_text(\"\".join(cmake_file_of_test_build_dir))\n\n # For each hook_specs, create a directory for the compilation and generate the files\n for project_hook_spec_path in hook_spec_paths:\n project_dir_for_build = directory_to_build_tests / project_hook_spec_path.parent.name\n project_dir_for_build.mkdir(parents=True)\n\n hm_generator = HookManGenerator(hook_spec_file_path=project_hook_spec_path)\n hm_generator.generate_project_files(dst_path=project_dir_for_build)\n\n # Find folder with Plugins\n plugins_dirs = [\n x\n for x in project_hook_spec_path.parent.iterdir()\n if x.is_dir() and (x / \"assets\").exists()\n ]\n\n # Copy all the plugins to the build dir\n for plugin in plugins_dirs:\n plugin_dir_build = project_dir_for_build / f\"plugin/{plugin.name}\"\n shutil.copytree(src=plugin, dst=plugin_dir_build)\n (plugin_dir_build / \"src/hook_specs.h\").write_text(\n hm_generator._hook_specs_header_content(plugin.stem)\n )\n\n # Create the CMakeFile on root of the project to include others CMake files.\n main_cmakelist = project_dir_for_build / \"CMakeLists.txt\"\n main_cmakelist_content = []\n main_cmakelist_content.append(\"add_subdirectory(cpp)\\nadd_subdirectory(binding)\\n\")\n main_cmakelist_content += [\n f\"add_subdirectory(plugin/{plugin.name}/src)\\n\" for plugin in plugins_dirs\n ]\n main_cmakelist.write_text(\"\".join(main_cmakelist_content))", "def make_files_and_run(self):\n self.write_feats()\n self.mk_outputdir()\n self.make_filtered_func()\n self.cp_mask_file()\n self.run_feat_files()\n self.thresh_zstats()\n self.webpage_for_thresh_zstats()", "def create_project_structure(project_name=generate_project_name(),\n run_id=generate_run_id(),\n sample_name=generate_sample_name(),\n empty_files=False):\n raise NotImplementedError", "def setups():\n setups = []\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F2 = dict()\n kotani2017_F2['name'] = 'kotani2017_F2'\n kotani2017_F2['piltemplate'] = kotani2017_F2_pil\n kotani2017_F2['pilparams'] = [None]\n kotani2017_F2['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F2['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=1'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.5'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.05')]\n kotani2017_F2['reporter'] = 'D'\n kotani2017_F2['exp_results'] = [(7733, 7.42), (11333, 6.18), (25533, 1.40)]\n setups.append(kotani2017_F2)\n\n\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F3 = dict()\n kotani2017_F3['name'] = 'kotani2017_F3'\n kotani2017_F3['piltemplate'] = kotani2017_F3_pil\n kotani2017_F3['pilparams'] = [None]\n kotani2017_F3['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F3['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.001')]\n kotani2017_F3['reporter'] = 'D'\n kotani2017_F3['exp_results'] = [(21220, 7.72), (64203, 3.12), (86996, 0.69)]\n setups.append(kotani2017_F3)\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F4 = dict()\n kotani2017_F4['name'] = 'kotani2017_F4'\n kotani2017_F4['piltemplate'] = kotani2017_F4_pil\n kotani2017_F4['pilparams'] = [None]\n kotani2017_F4['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F4['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.001'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0')]\n kotani2017_F4['reporter'] = 'D'\n kotani2017_F4['exp_results'] = [(6815, 6.06), (9004, 4.78), (10278, 4.03), (10795, 3.73)]\n setups.append(kotani2017_F4)\n\n return setups", "def postprocess_project(output_type, exclude_types, stitch_only, clean_outputs,\n project_path):\n project_path_short = os.path.basename(os.path.normpath(project_path))\n if clean_outputs:\n bripipetools.postprocessing.OutputCleaner(project_path).clean_outputs()\n logger.info(\"Output files cleaned for '{}\".format(project_path_short))\n\n combined_paths = []\n if output_type in ['c', 'a'] and 'c' not in exclude_types:\n logger.debug(\"generating combined counts file\")\n path = os.path.join(project_path, 'counts')\n # Determine whether the path exists before calling write_table\n # important for eg: ChIPseq, which doesn't contain a 'counts' folder.\n if not os.path.exists(path):\n proceed = input(\"{} not found & will be skipped. Proceed? (y/[n]): \"\n .format(path))\n if proceed != 'y':\n logger.info(\"Exiting program.\")\n sys.exit(1)\n else:\n bripipetools.postprocessing.OutputStitcher(path).write_table()\n\n if output_type in ['m', 'a'] and 'm' not in exclude_types:\n logger.debug(\"generating combined metrics file\")\n path = os.path.join(project_path, 'metrics')\n combined_paths.append(\n bripipetools.postprocessing.OutputStitcher(path).write_table())\n\n if output_type in ['q', 'a'] and 'q' not in exclude_types:\n logger.debug(\"generating combined QC file(s)\")\n path = os.path.join(project_path, 'QC')\n bripipetools.postprocessing.OutputStitcher(\n path).write_overrepresented_seq_table()\n combined_paths.append(\n bripipetools.postprocessing.OutputStitcher(path).write_table())\n\n if output_type in ['v', 'a'] and 'v' not in exclude_types:\n logger.debug(\"generating combined validation file(s)\")\n path = os.path.join(project_path, 'validation')\n try:\n combined_paths.append(\n bripipetools.postprocessing.OutputStitcher(path).write_table()\n )\n except OSError:\n logger.warning((\"no validation files found \"\n \"for project {}; skiproject_pathing\")\n .format(project_path))\n logger.info(\"Combined output files generated for '{}' with option '{}'\"\n .format(project_path_short, output_type))\n\n if not stitch_only:\n bripipetools.postprocessing.OutputCompiler(combined_paths).write_table()\n logger.info(\"Merged all combined summary data tables for '{}'\"\n .format(project_path_short))", "def make_project(c_file, no_synth_directives, add_synth_directives, no_sync_hardware, pipeline):\n\n # create project name based on input file\n project_name = c_file.split(\".\")[0]\n verilog_file = project_name+'.v'\n project_folder = os.path.dirname(os.path.abspath(c_file))\n output_directory = project_name+\"_files\"\n\n # Create folder and generate Makefile\n print(\"INFO: Creating project...\") \n if os.path.exists(output_directory):\n shutil.rmtree(output_directory, ignore_errors=True)\n os.makedirs(output_directory)\n # remove pll, verilog, tcl...\n if os.path.exists(os.path.join(project_folder,\"pll\")):\n shutil.rmtree(os.path.join(project_folder,\"pll\"))\n if os.path.exists(os.path.join(project_folder,\"sdc\")):\n shutil.rmtree(os.path.join(project_folder,\"sdc\"))\n if os.path.exists(os.path.join(project_folder,\"dynamic_clock.v\")):\n os.remove(os.path.join(project_folder,\"dynamic_clock.v\"))\n if os.path.exists(os.path.join(project_folder,\"setup_sync_proj.tcl\")):\n os.remove(os.path.join(project_folder,\"setup_sync_proj.tcl\"))\n if os.path.exists(os.path.join(project_folder,project_name+\"_record.v\")):\n os.remove(os.path.join(project_folder,project_name+\"_record.v\"))\n if os.path.exists(os.path.join(project_folder,\"board_top.v\")):\n os.remove(os.path.join(project_folder,\"board_top.v\"))\n if os.path.exists(os.path.join(project_folder,\"connect_top.v\")):\n os.remove(os.path.join(project_folder,\"connect_top.v\"))\n\n # make:\n print(\"INFO: Cleaning previous files...\")\n execute([\"make\",\"clean\"], cd=project_folder, wait=True)\n execute([\"make\"], cd=project_folder, wait=True)\n # backup copy of verilog\n shutil.copy(verilog_file,project_name+\"_record.v\")\n\n # Clean away sdc timing constraint files\n # enhanced synthesis constraints\n sdc_file = os.path.join(project_folder, 'sdc', \"path_delays.sdc\")\n clean_file(sdc_file)\n # debug constraints\n sdc_file_debug = os.path.join(project_folder, 'sdc', \"path_delays_debug.sdc\")\n clean_file(sdc_file_debug)\n sdc_file_fmax = os.path.join(project_folder, 'sdc', \"fmax_delay.sdc\")\n clean_file(sdc_file_fmax)\n\n # make p:\n print(\"INFO: Generating Quartus Project...\")\n # project constraints\n sdc_file_project = project_name+\".sdc\"\n clean_file(sdc_file_project) # to be repopulated later\n\n # move plls\n shutil.copytree(os.path.join(TOOL_PATH,\"pll\",str(int(PLL_CLOCK))), os.path.join(project_folder,\"pll\"))\n # move clock file\n shutil.copy(os.path.join(TOOL_PATH, \"verilog\", \"dynamic_clock.v\"), os.path.join(project_folder, \"dynamic_clock.v\"))\n # move tcl file\n shutil.copy(os.path.join(TOOL_PATH, \"tcl\", \"setup_sync_proj.tcl\"), os.path.join(project_folder, \"setup_sync_proj.tcl\"))\n\n execute([\"make\",\"p\"], cd=project_folder, wait=True)\n\n # add synthesis directives\n add_directives = bool(no_sync_hardware==False and no_synth_directives==False) or bool(no_sync_hardware==True and add_synth_directives==True)\n if add_directives: \n print(\"INFO: Adding synthesis directives\")\n add_synthesis_directives(verilog_file)\n\n if no_sync_hardware: # make project without syncopation hardware\n # custom sdc\n with open(sdc_file_project, 'w') as out_f:\n out_f.write(\"create_clock -period 2.000 -name CLOCK_50 [get_ports CLOCK_50]\\n\")\n out_f.write(\"derive_pll_clocks\\n\")\n out_f.write(\"derive_clock_uncertainty\\n\")\n shutil.copy(os.path.join(TOOL_PATH, \"verilog\", \"de1_top.v\"), os.path.join(project_folder, \"connect_top.v\"))\n else: \n with open(sdc_file_project, 'w') as out_f:\n print(\"INFO: Saving project sdc file \"+sdc_file_project)\n out_f.write(\"create_clock -period 20.000 -name CLOCK_50 [get_ports CLOCK_50]\\n\")\n out_f.write(\"derive_pll_clocks\\n\")\n out_f.write(\"create_clock -period 2 -name dyn_clk [get_nodes connect_top_INST|dynamic_clock:CLOCK_GEN|clk]\\n\")\n out_f.write(\"set_false_path -from dyn_clk -to {connect_top_INST|PLL_INST|pll_inst|altera_pll_i|general[0].gpll~PLL_OUTPUT_COUNTER|divclk}\\n\")\n out_f.write(\"derive_clock_uncertainty\")\n shutil.copy(os.path.join(TOOL_PATH, \"verilog\", \"syncopation_top.v\"), os.path.join(project_folder, \"connect_top.v\"))\n \n # Profile rtl to determine parameters\n profile_rtl(verilog_file)\n\n # Insert Syncopation hardware\n insert_syncoption_hardware(pipeline, verilog_file)\n\n generate_top_module(no_sync_hardware, pipeline, verilog_file)\n execute([\"quartus_sh\", \"-t\", \"setup_sync_proj.tcl\"], cd=project_folder, wait=True)\n\n # save settings such as pipeline/synth directives\n settings = {\n 'pipeline':pipeline,\n 'no_synth_directives':no_synth_directives,\n 'no_sync_hardware':no_sync_hardware\n }\n settings = save(os.path.join(output_directory, \"settings.json\"), settings)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return tokenized list of strings from raw text input using keras functionality
def tokenize_keras(raw_data): from keras.preprocessing.text import text_to_word_sequence return [text_to_word_sequence(d) for d in raw_data]
[ "def _text_tokenize(input_data: Mapping[str, tf.Tensor]) -> tf.Tensor:\n input_str = tf.reshape(input_data['translation'], shape=[1])\n standard_text = _text_standardization(input_str)\n return tf.strings.split(standard_text)", "def _batch_tokenize(self, text: List[str]) -> List[List[str]]:\n return self.bert_model.batch_tokenize([t.strip() for t in text])", "def tokenize(lang):\n lang_tokenizer = tf.keras.preprocessing.text.Tokenizer(filters='')\n lang_tokenizer.fit_on_texts(lang)\n tensor = lang_tokenizer.texts_to_sequences(lang)\n # pad zero after sequences for the same length.\n tensor = tf.keras.preprocessing.sequence.pad_sequences(tensor,\n padding='post')\n return tensor, lang_tokenizer", "def preprocess(text_raw, tk):\n text_tokenized = word_tokenize(text_raw)\n text_encoded = tk.texts_to_sequences([text_tokenized])\n text_array = pad_sequences(text_encoded, maxlen=200, padding='post')\n return text_array", "def _tokenize_tensor(self, text):\n def _python_wrapper(string_t):\n string = tf.compat.as_text(string_t.numpy())\n tokens = self._tokenize_string(string)\n return tf.constant(tokens)\n tokens = tf.py_function(_python_wrapper, [text], tf.string)\n tokens.set_shape([None])\n return tokens", "def tokenize(self, text):\r\n return list(text)", "def create_tokenizer(dataset):\n lang_tokenizer = tf.keras.preprocessing.text.Tokenizer(char_level=True)\n lang_tokenizer.fit_on_texts([x['input'] for x in dataset])\n return lang_tokenizer", "def text_to_tokens(self, text, reverse=False, padding=False):", "def generate(fit_model):\n pred_indices = []\n pred_words = []\n # Replace start_index with actual start token\n start_index = random.randint(0, len(text) - maxlen - 1)\n current_vec = glove_matrix.get_vec(start_index)\n\n for iteration in range(NUM_PRED_WORDS):\n preds = fit_model.predict(current_vec, verbose=0)\n pred_index = sample(preds)\n pred_indices = pred_indices + [next_index]\n pred_words = pred_words + [glove_matrix.get_word(pred_index)]\n current_vec = glove_matrix.get_vec(pred_index)\n\n assert NUM_PRED_WORDS == len(pred_words)\n return pred_words", "def tokenize(self, examples: List[Example]) -> List[TokenizedExample]:", "def get_text_features() -> np.array:\r\n # Universal sentence encoder model\r\n # Original model by Google could be loaded from: https://tfhub.dev/google/universal-sentence-encoder/4\r\n # In this notebook the model is loaded from a public dataset on Kaggle\r\n # at https://www.kaggle.com/dimitreoliveira/universalsentenceencodermodels\r\n text_model = tf.keras.Sequential(\r\n [KerasLayer(txt_model_path, input_shape=[], dtype=tf.string, # Pretrained model\r\n output_shape=[512], trainable=False),\r\n tf.keras.layers.Layer(512, dtype='float16')] # This layer reduces precision of float numbers\r\n )\r\n\r\n # Convert all texts to vectors\r\n features = text_model.predict(data['title'],\r\n batch_size=BATCH_SIZE,\r\n use_multiprocessing=True,\r\n workers=-1)\r\n print('Text features extracted. Shape:', features.shape)\r\n\r\n return features", "def _tokenize_tensor(self, text):\n if compat.tf_supports(\"py_function\"):\n def _python_wrapper(string_t):\n string = tf.compat.as_text(string_t.numpy())\n tokens = self._tokenize_string(string)\n return tf.constant(tokens)\n tokens = tf.py_function(_python_wrapper, [text], tf.string)\n tokens.set_shape([None])\n return tokens\n\n text = tf.py_func(\n lambda x: tf.compat.as_bytes(\"\\0\".join(self.tokenize(x))), [text], tf.string)\n tokens = tf.string_split([text], delimiter=\"\\0\").values\n return tokens", "def _preprocess(self, txt_seq):\n input = []\n for token in txt_seq.split():\n input.append(self.word2id.get(token, self.word2id[\"<UNK>\"]))\n input.append(self.word2id[\"<END>\"])\n input = torch.LongTensor(input)\n return input", "def tokenize(self, inputs):\n if hasattr(self.tokenizer, \"batch_encode\"):\n return self.tokenizer.batch_encode(inputs)\n else:\n return [self.tokenizer.encode(x) for x in inputs]", "def pre_process(text):\n # replace (,.'\") with ''\n text = text.replace(',', '')\n text = text.replace('.', '')\n text = text.replace(\"'\", '')\n text = text.replace(\"\\\"\", '')\n\n # tokenize into words\n tokens = [word for sent in sent_tokenize(text) for word in word_tokenize(sent)]\n\n # remove stopwords\n stop = stopwords.words('english')\n tokens = [token for token in tokens if token not in stop]\n\n # remove words less than three letters\n tokens = [word for word in tokens if len(word) >= 3]\n\n # lower capitalization\n tokens = [word.lower() for word in tokens]\n\n # lemmatize\n lmtzr = WordNetLemmatizer()\n tokens = [lmtzr.lemmatize(word) for word in tokens]\n\n return tokens", "def text_to_tokens(self, text, reverse=False, padding=False):\r\n tokens = self.texts_to_sequences([text])\r\n tokens = np.array(tokens)\r\n\r\n if reverse: \r\n tokens = np.flip(tokens, axis=1)\r\n truncating = 'pre'\r\n else:\r\n truncating = 'post'\r\n\r\n if padding:\r\n tokens = pad_sequences(tokens,\r\n maxlen=self.max_tokens,\r\n padding='pre',\r\n truncating=truncating)\r\n return tokens", "def tokenize_english(self, text):\n return [tok.text for tok in self.english_model.tokenizer(text)]", "def text_preprocessing(self, texts, max_length=128):\n input_ids, attention_mask = self.tokenize_and_encode(texts, max_length)\n input_ids = torch.tensor(input_ids)\n attention_mask = torch.tensor(attention_mask)\n return input_ids, attention_mask", "def get_token_list(text):\n return text.split()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return True if word passes filter
def filter1(word): if not word: return False w = word.lower() if w in STOPWORDS: return False return True
[ "async def wordfilter(self, ctx):\n pass", "async def wordfilter_test(self, ctx, *, message):\n found = self.test_sentence(message)\n if found:\n await ctx.send(f\"Message contains `{found}`\")\n else:\n await ctx.send(\"Couldn't detect any filtered words\")", "def filter(self, value, filter_values):\n for word in filter_values:\n if word in value.lower():\n return True\n\n return False", "def accepts(self, word : str) -> bool:\n pass", "def match(self, filter_text):\n\n return filter_text.lower() in self.artist.lower() or \\\n super().match(filter_text)", "def match(self, filter_text):\n\n return filter_text.lower() in self.director.lower() or \\\n filter_text.lower() in self.actor.lower() or \\\n super().match(filter_text)", "def match(self, filter_text):\n\n return filter_text.lower() in self.author.lower() or \\\n super().match(filter_text)", "def _sentence_contains_food_word(self, sentence):\n sentence = list(map(lambda word: word.lower(), sentence))\n return reduce(lambda prev, word: prev or (word in self.food_words),\n sentence,\n False)", "def accepts(self, word):\n \n if word == '':\n return self.accepting\n \n else:\n for ranges in self.successors:\n if word[0] in ranges:\n if self.successors[ranges].accepts(word[1:]):\n return True\n \n return False", "def __contains__(self, word):\n return self.wv.__contains__(word)", "def match_all(self, text, words):\r\n text_lower = text.lower()\r\n result = all(w in text_lower for w in words)\r\n return result", "def token_filter_li2014(token):\n return (TT_FILTER.match(token.word) is not None and\n TT_FILTER.match(token.tag) is not None)", "def cleaning_filter(text):\n try:# A REFAIRE\n if \"This article has been retracted\" in text:\n text =\"retracted\"\n return False\n if \"Cette article\" in text:\n text =\"retracted\"\n return False\n if len(text) < 20:\n return False\n except:\n return False\n return True", "def inWord(letter, word):\n\n return False", "def text_is_relevant(self, text):\n for word in text:\n if word in self.relevant_words:\n return True\n return False", "def __contains__(self, word):\n if word in self.vocab:\n return True\n else:\n char_ngrams = compute_ngrams(word, self.min_n, self.max_n)\n return any(ng in self.ngrams for ng in char_ngrams)", "def custom_filter(text):\n\tregex = re.compile('[^a-zA-Z]{2,}')\n\ttext = regex.sub(' ', text.lower())\n\t# text = re.sub('[^a-zA-Z]$', ' ', text.lower())\n\ttokens = nltk.word_tokenize(text)\n\ttext = nltk.Text(tokens)\n\ttags = nltk.pos_tag(text)\n\n\tverbs_and_nouns = ['VB', 'VBD', 'VBG', 'VBN', 'VBP', 'VBZ', \n\t\t'NN', 'NNS', 'NNP', 'NNPS']\n\tcounts = Counter(word for word, tag in tags \n\t\tif tag in verbs_and_nouns and len(word) > 2)\n\treturn counts.most_common(100)", "def is_unimportant(word):\n return word in ['.', '!', ',', ] or '\\'' in word or word in stop_words", "def word_matches(word):\n if self.ignore_case:\n word = word.lower()\n\n if self.match_middle:\n return last_words in word\n else:\n return word.startswith(last_words)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return dict of wordtoid from raw text data If max_size is specified, vocab is truncated to set of highest frequency words within size.
def build_vocab(raw_data, max_size=None): data = [w for doc in tokenize_keras(raw_data) for w in doc] counter = collections.Counter(data) count_pairs = sorted(counter.items(), key=lambda x: (-x[1], x[0])) if max_size: count_pairs = count_pairs[:max_size] words, _ = list(zip(*count_pairs)) word_to_id = dict(zip(words, range(len(words)))) word_to_id[UNKNOWN_WORD] = len(word_to_id) word_to_id[PAD_WORD] = len(word_to_id) return word_to_id
[ "def embeddings_to_dict(path, max_words=None):\n w2v = {} \n with codecs.open(path,\"r\",\"utf-8\") as fid:\n #ignore first line\n fid.readline() \n #avoid extra comparisons if we want load all the words\n if max_words is None:\n for line in fid:\n entry = line.split()\n if len(entry) > 2:\n w2v[entry[0]] = np.array(entry[1:]).astype('float32')\n else:\n for i, line in enumerate(fid):\n entry = line.split()\n if len(entry) > 2:\n w2v[entry[0]] = np.array(entry[1:]).astype('float32')\n if i >= max_words:break\n return w2v", "def construct_vocab(lines, vocab_size):\n vocab = {}\n for line in lines:\n for word in line:\n if word not in vocab:\n vocab[word] = 1\n else:\n vocab[word] += 1\n \n word2id = {}\n id2word = {}\n word2id['<pad>'] = 0\n word2id['<unk>'] = 1\n id2word[0] = '<pad>'\n id2word[1] = '<pad>'\n \n sorted_word2id = sorted(\n vocab.items(),\n key=operator.itemgetter(1),\n reverse=True\n )\n\n sorted_words = [x[0] for x in sorted_word2id[:vocab_size]]\n\n for ind, word in enumerate(sorted_words):\n word2id[word] = ind + 2\n\n for ind, word in enumerate(sorted_words):\n id2word[ind + 2] = word\n\n return word2id, id2word", "def gen_vocab(data, max_tokens = 200000):\n vectorizer = TextVectorization(max_tokens=max_tokens, output_sequence_length=200)\n text_ds = tf.data.Dataset.from_tensor_slices(data).batch(128)\n vectorizer.adapt(text_ds)\n vocab = vectorizer.get_vocabulary()\n return vocab, vectorizer", "def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,\n tokenizer=None, normalize_digits=True):\n if not gfile.Exists(vocabulary_path):\n print(\"Creating vocabulary %s from data %s\" % (vocabulary_path, data_path))\n vocab = {}\n with gfile.GFile(data_path, mode=\"r\") as f:\n counter = 0\n for line in f:\n counter += 1\n line = line.strip().split('\\t')[0]\n if counter % 100000 == 0:\n print(\" processing line %d\" % counter)\n tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)\n for w in tokens:\n word = re.sub(_DIGIT_RE, \"0\", w) if normalize_digits else w\n if word in vocab:\n vocab[word] += 1\n else:\n vocab[word] = 1\n sorted_vocab = sorted(vocab, key=vocab.get, reverse=True)\n vocab_list = _START_VOCAB + sorted_vocab\n if len(vocab_list) > max_vocabulary_size:\n vocab_list = vocab_list[:max_vocabulary_size]\n print(\"Corpus %s has %d tokens, %d uniq words, %d vocab at cutoff %d.\" % (\n data_path, sum(vocab.values()), len(vocab), max_vocabulary_size, vocab[sorted_vocab[max_vocabulary_size - len(_START_VOCAB)]] ) )\n else:\n print(\"Corpus %s has %d tokens, %d uniq words, %d vocab at cutoff %d.\" % (\n data_path, sum(vocab.values()), len(vocab), len(vocab), 0))\n\n with gfile.GFile(vocabulary_path, mode=\"wb\") as vocab_file:\n for w in vocab_list:\n vocab_file.write(w + \"\\n\")", "def build_vocab(data, vocab_dir, vocab_size=400001):\n print('Building vocabulary...')\n\n all_data = [] # group all data\n for content in data:\n all_data.extend(content.split())\n\n counter = Counter(all_data) # count and get the most common words\n count_pairs = counter.most_common(vocab_size - 1)\n words, _ = list(zip(*count_pairs))\n\n words = ['<PAD>'] + list(words) # add a padding with id 0 to pad the sentence to same length\n open_file(vocab_dir, 'w').write('\\n'.join(words) + '\\n')", "def create_token_vocab(vocab_size):\n vocab_dict = tff.simulation.datasets.stackoverflow.load_word_counts()\n return list(vocab_dict.keys())[:vocab_size]", "def create_vocab(vocab_size):\n vocab_dict = tff.simulation.datasets.stackoverflow.load_word_counts(\n cache_dir='/tmp')\n return list(vocab_dict.keys())[:vocab_size]", "def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,\n tokenizer=None, normalize_digits=True):\n print(\"creating vocab from\",data_path)\n if not gfile.Exists(vocabulary_path):\n print(\"Creating vocabulary %s from data %s\" % (vocabulary_path, data_path))\n vocab = {}\n with gfile.GFile(data_path, mode=\"r\") as f:\n counter = 0\n for line in f:\n counter += 1\n if counter % 10000 == 0:\n print(\" processing line %d\" % counter)\n tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)\n for w in tokens:\n word = w\n if word in vocab:\n vocab[word] += 1\n else:\n vocab[word] = 1\n \n vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)\n if len(vocab_list) > max_vocabulary_size:\n vocab_list = vocab_list[:max_vocabulary_size]\n\n with gfile.GFile(vocabulary_path, mode=\"w\") as vocab_file:\n for w in vocab_list:\n vocab_file.write(w + \"\\n\")", "def _create_id_map(self, word_list, max_list_length):\n\n ############ 1.5 TODO\n from collections import Counter\n \n # import pdb; pdb.set_trace()\n word_rank_list = Counter(word_list).most_common(max_list_length)\n \n id_map = {}\n for idx, (word,_) in enumerate(word_rank_list):\n id_map[word] = idx\n\n ############\n # raise NotImplementedError()\n return id_map", "def load_vocab(vocab_file, t=0, vocab_size=None):\n thisvocab2id = {PAD_WORD: 0, BOS_WORD: 1, UNK_WORD: 2, EOS_WORD: 3, SEP_WORD: 4, CLS_WORD: 5, MASK_WORD: 6}\n thisid2vocab = [PAD_WORD, BOS_WORD, UNK_WORD, EOS_WORD, SEP_WORD, CLS_WORD, MASK_WORD]\n id2freq = [0 for _ in range(7)]\n with codecs.open(vocab_file, 'r') as f:\n for line in f:\n try:\n name, freq = line.strip('\\n').strip('\\r').split('\\t')\n except:\n continue\n if int(freq) >= t:\n idx = len(thisid2vocab)\n thisvocab2id[name] = idx\n thisid2vocab.append(name)\n id2freq.append(int(freq))\n if vocab_size is not None and len(thisid2vocab) == vocab_size:\n break\n id2freq[0] = sum(id2freq) // len(id2freq)\n id2freq[1] = id2freq[0]\n id2freq[2] = id2freq[0]\n id2freq[3] = id2freq[0]\n\n print('item size: ', len(thisvocab2id))\n\n return thisvocab2id, thisid2vocab, id2freq", "def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,\n tokenizer=None, normalize_digits=True,\n _DIGIT_RE=re.compile(br\"\\d\"),\n _START_VOCAB=[b\"_PAD\", b\"_GO\", b\"_EOS\", b\"_UNK\"]):\n if not gfile.Exists(vocabulary_path):\n print(\"Creating vocabulary %s from data %s\" % (vocabulary_path, data_path))\n vocab = {}\n with gfile.GFile(data_path, mode=\"rb\") as f:\n counter = 0\n for line in f:\n counter += 1\n if counter % 100000 == 0:\n print(\" processing line %d\" % counter)\n tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)\n for w in tokens:\n word = re.sub(_DIGIT_RE, b\"0\", w) if normalize_digits else w\n if word in vocab:\n vocab[word] += 1\n else:\n vocab[word] = 1\n vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)\n if len(vocab_list) > max_vocabulary_size:\n vocab_list = vocab_list[:max_vocabulary_size]\n with gfile.GFile(vocabulary_path, mode=\"wb\") as vocab_file:\n for w in vocab_list:\n vocab_file.write(w + b\"\\n\")\n else:\n print(\"Vocabulary %s from data %s exists\" % (vocabulary_path, data_path))", "def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,\n tokenizer=None, normalize_digits=True):\n if not gfile.Exists(vocabulary_path):\n print(\"Creating vocabulary %s from data %s\" % (vocabulary_path, data_path))\n vocab = {}\n with gfile.GFile(data_path, mode=\"rb\") as f:\n counter = 0\n for line in f:\n counter += 1\n if counter % 100000 == 0:\n print(\" processing line %d\" % counter)\n tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)\n for w in tokens:\n word = re.sub(_DIGIT_RE, b\"0\", w) if normalize_digits else w\n if word in vocab:\n vocab[word] += 1\n else:\n vocab[word] = 1\n vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)\n if len(vocab_list) > max_vocabulary_size:\n print(\"vocab too big\")\n vocab_list = vocab_list[:max_vocabulary_size]\n with gfile.GFile(vocabulary_path, mode=\"wb\") as vocab_file:\n for w in vocab_list:\n vocab_file.write(w + b\"\\n\")", "def make_word2id():\r\n with open(\"public_data/stats/stats_train.pkl\", 'rb') as stats:\r\n stats = pickle.load(stats)\r\n vocab = stats[\"VOCAB\"]\r\n word2id = {word: id for id, word in enumerate([\"PAD\"] + [\"UNK\"] + vocab)}\r\n with open('public_data/vocab/word2id.pkl', 'wb') as out:\r\n pickle.dump(word2id, out, protocol=4)", "def create_vocab(vocab_size: int) -> list[str]:\n return list(\n tff.simulation.datasets.stackoverflow.load_word_counts(\n vocab_size=vocab_size\n ).keys()\n )", "def build_vocab(self, min_count=3):\n word2count = defaultdict(int)\n for sentence in self.tokenized_corpus:\n for word in sentence:\n word2count[word] += 1\n\n word2dict = {}\n word2dict['PAD'] = {'id': 0}\n word2dict['UNK'] = {'id': 1}\n for word in word2count:\n if word2count[word] >= min_count:\n word2dict[word] = {'id': len(word2dict), 'count': word2count[word]}\n self.vocab = word2dict", "def get_vocab(self):\n vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)}\n vocab.update(self.added_tokens_encoder)\n return vocab", "def create_tag_vocab(vocab_size):\n tag_dict = tff.simulation.datasets.stackoverflow.load_tag_counts()\n return list(tag_dict.keys())[:vocab_size]", "def build_dict(sentences, max_words=50000):\n word_count = Counter()\n for sent in sentences:\n for w in sent.split(' '):\n word_count[w] += 1\n max_words = int(len(word_count.keys()) * 0.95)\n word_count['unk'] = MAX_NUM\n ls = word_count.most_common(max_words)\n logging.info('#Words: %d -> %d' % (len(word_count), len(ls)))\n for key in ls[:5]:\n logging.info(key)\n logging.info('...')\n for key in ls[-5:]:\n logging.info(key)\n # leave 0 to UNK\n # leave 1 to delimiter |||\n return {w[0]: index for (index, w) in enumerate(ls)}, {index:w[0] for (index, w) in enumerate(ls)}", "def _build_vocabulary(words, vocabulary_size):\n\n # create dictionary with the most common heroes\n count = [['UNK', -1]]\n count.extend(collections.Counter(words).most_common(vocabulary_size - 1))\n dictionary = dict()\n\n for word, _ in count:\n dictionary[word] = len(dictionary)\n\n data = list()\n unk_count = 0\n for word in words:\n if word in dictionary:\n index = dictionary[word]\n else:\n # the word is unknown\n index = 0\n unk_count = unk_count + 1\n data.append(index)\n\n count[0][1] = unk_count\n\n # save the dictionary's reversed version for later usage\n reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n return data, count, dictionary, reverse_dictionary" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Convert raw text data into integer ids
def raw_to_ids(raw_data, word_to_id): docs = tokenize_keras(raw_data) uid = word_to_id[UNKNOWN_WORD] return [[word_to_id.get(w, uid) for w in doc] for doc in docs]
[ "def map_text_to_id(self, text: str) -> List[int]:\n return self.map_token_to_id(self.map_text_to_token(text))", "def text_to_ids(source_text, target_text, source_vocab_to_int, target_vocab_to_int):\n source_id_text = []\n target_id_text = []\n for text in source_text.split('\\n'):\n source_id_text.append([source_vocab_to_int[w] for w in text.split()])\n \n for text in target_text.split('\\n'):\n target_id_text.append([target_vocab_to_int[w] for w in text.split()] + [target_vocab_to_int['<EOS>']])\n\n return source_id_text, target_id_text", "def text_to_ids(source_text, target_text, source_vocab_to_int, target_vocab_to_int):\n # 找不到就用<UNK>代替,在target每句的句末加上结束标识\n source_text_to_int = [[source_vocab_to_int.get(word, source_vocab_to_int['<UNK>']) for word in sentence.split()] for\n sentence in source_text.split('\\n')]\n target_text_to_int = [[target_vocab_to_int.get(word, source_vocab_to_int['<UNK>']) for word in sentence.split()] + [\n target_vocab_to_int['<EOS>']] for sentence in target_text.split('\\n')]\n\n return source_text_to_int, target_text_to_int", "def text2ids(self, text: str, length: int):\n # Tokenize\n tokens = self.tokenizer.tokenize(text)\n token_ids = self.tokenizer.tokens2ids(tokens)\n # Padding\n while len(token_ids) < length:\n token_ids.append(0)\n # Truncate\n if len(token_ids) > length:\n token_ids = token_ids[:length]\n assert len(token_ids) == length\n return token_ids", "def data_to_ids(data, mappings):\n\n def strQ2B(ustring):\n rstring = \"\"\n for uchar in ustring:\n inside_code = ord(uchar)\n if inside_code == 12288:\n inside_code = 32\n elif 65281 <= inside_code <= 65374:\n inside_code -= 65248\n rstring += unichr(inside_code)\n return rstring\n\n def strB2Q(ustring):\n rstring = \"\"\n for uchar in ustring:\n inside_code = ord(uchar)\n if inside_code == 32:\n inside_code = 12288\n elif 32 <= inside_code <= 126:\n inside_code += 65248\n rstring += unichr(inside_code)\n return rstring\n\n def map(item, mapping):\n if item in mapping:\n return mapping[item]\n item = strB2Q(item)\n if item in mapping:\n return mapping[item]\n item = strQ2B(item)\n if item in mapping:\n return mapping[item]\n return mapping['<UNK>']\n\n def map_seq(seqs, mapping):\n return [[map(item, mapping) for item in seq] for seq in seqs]\n\n ret = []\n for d, m in izip(data, mappings):\n ret.append(map_seq(d, m))\n return tuple(ret)", "def sent_to_ids(sent, tokenizer):\n tokens = sent_to_tokens(sent, tokenizer)\n ids = tokenizer.convert_tokens_to_ids(tokens)\n return ids", "def str_to_int(data_with_str):\n data = list(map(lambda x: ord(x) if isinstance(x, str) else x, data_with_str))\n return data", "def parse_ints(text):\n return [int(x) for x in re.findall(r'\\d+', text)]", "def tokens_to_ids(self,\n tokens):\n #tokens = convert_to_unicode(tokens)\n return [self.sp_processor.PieceToId(token) for token in tokens]", "def text_to_int(self, text):\n int_sequence = []\n for c in text:\n if c == ' ':\n ch = self.char_map['']\n else:\n ch = self.char_map[c]\n int_sequence.append(ch)\n return int_sequence", "def batches2IDs(batches):\n l = [ np.array( [ char2id(x) for x in characters(b) ] ) for b in batches ]\n return l", "def bytes2integers(data):\n return list(data)", "def text_to_id(self, text):\n text = self.strip_accents(text.lower())\n text = re.sub('[ ]+', '_', text)\n text = re.sub('[^0-9a-zA-Z_-]', '', text)\n return text", "def encode(self, s):\n return [int(w) + self._num_reserved_ids for w in s.split()]", "def _preprocess(self, txt_seq):\n input = []\n for token in txt_seq.split():\n input.append(self.word2id.get(token, self.word2id[\"<UNK>\"]))\n input.append(self.word2id[\"<END>\"])\n input = torch.LongTensor(input)\n return input", "def convert_texts_to_ids(self, batch_text):\n max_len = self.field_config.max_seq_len\n batch_fea_list = []\n name_block_len = []\n name_block_begin = []\n name_block_end = []\n for idx_batch, text in enumerate(batch_text):\n fea_str = text.split(' [SEP] ')\n fea_list = [[float(y) for y in x.split(' ')] for x in fea_str]\n\n # 加上截断策略\n if len(fea_list) > self.field_config.max_seq_len:\n logging.warn('input instance is to long: %s', text)\n fea_list = truncation_words(fea_list, self.field_config.max_seq_len, self.field_config.truncation_type)\n batch_fea_list.append(fea_list)\n\n return_list = []\n\n padded = [0] * self._feature_dim\n padded_ids = np.array([inst + list([padded] * (max_len - len(inst))) for inst in batch_fea_list])\n padded_ids = padded_ids.astype('float32').reshape([-1, max_len, self._feature_dim])\n\n return_list.append(padded_ids)\n\n return return_list", "def text_to_id(tweets_dict):\n text_to_id_dict = {}\n for key in tweets_dict:\n # we assume that there are no retweets as this has been preprocessed before\n text_to_id_dict[key] = tweets_dict[key][\"text\"]\n return text_to_id_dict", "def makeWordId(dic, data):\r\n ids = []\r\n for itr in data:\r\n id = []\r\n for s in itr:\r\n try:\r\n id.append(dic[s])\r\n except:\r\n id.append(4789)\r\n ids.append(id)\r\n return ids", "def text_to_id(text, word_to_id_dict):\n return [word_to_id_dict[word] for word in text.split(\" \") if word in word_to_id_dict]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
callback for when the detector has found a stop sign. Note that a distance of 0 can mean that the lidar did not pickup the stop sign at all
def stop_sign_detected_callback(self, msg): # distance of the stop sign corners = msg.corners dx = corners[3] - corners[1] dy = corners[2] - corners[0] r = dx/dy # aspect ratio rdist = np.array([.15, .20, .25, .30,.35, .40, .45, .50]) pixelheight = np.array([139, 102, 82, 64, 56, 50, 44, 40]) if dy > pixelheight[-1] and dy < pixelheight[0]: dist = np.interp(dy, pixelheight[::-1], rdist[::-1]) else: return # Get location of camera with respect to the map try: (translation,rotation) = self.tf_listener.lookupTransform('/map', '/camera', rospy.Time(0)) xcam = translation[0] ycam = translation[1] zcam = translation[2] euler = tf.transformations.euler_from_quaternion(rotation) thetacam = euler[2] except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): return # Get angle of robot with respect to the map try: (translation,rotation) = self.tf_listener.lookupTransform('/map', '/base_footprint', rospy.Time(0)) euler = tf.transformations.euler_from_quaternion(rotation) thetarobot = euler[2] except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): return # Now we have pose of robot, we want to determine stop sign angle relative # to camera frame thstopsign = (wrapToPi(msg.thetaright) + wrapToPi(msg.thetaleft))/2. zstopsign = dist*np.cos(-thstopsign) xstopsign = dist*np.sin(-thstopsign) x = xcam + xstopsign*np.cos(thetacam) - zstopsign*np.sin(thetacam) y = ycam + xstopsign*np.sin(thetacam) + zstopsign*np.cos(thetacam) # Now that we have x and y coord of stop sign in world frame, append coord found = False for i in range(len(self.stopSigns[0])): xcur = self.stopSigns[0][i] ycur = self.stopSigns[1][i] thetarobotcur = self.stopSigns[2][i] distance = np.sqrt((x - xcur)**2 + (y - ycur)**2) n = self.stopSignCounts[i] if distance < .2: if n < 100: # We have found the same stop sign as before xnew = (n/(n+1.))*xcur + (1./(n+1))*x ynew = (n/(n+1.))*ycur + (1./(n+1))*y thetarobotnew = (n/(n+1.))*thetarobotcur + (1./(n+1))*thetarobot self.stopSigns[0][i] = xnew self.stopSigns[1][i] = ynew self.stopSigns[2][i] = thetarobotnew self.stopSignCounts[i] += 1 found = True if not found: # Found a new one, append it self.stopSigns[0].append(x) self.stopSigns[1].append(y) self.stopSigns[2].append(thetarobot) self.stopSignCounts.append(1)
[ "def stop_sign_detected_callback(self, msg):\n\n # distance of the stop sign\n # print \"Stop Sign Destected\"\n dist = msg.distance\n # if self.mode==Mode.TRACK:\n # if close enough and in nav mode, stop\n if dist > 0 and dist < self.stop_min_dist and self.mode == Mode.TRACK:\n print \"Stop Sign Countdown\"\n self.init_stop_sign()", "def stop_sign_dist_callback(self, msg):\n self.stop_sign_distance = msg.data", "def check_stop(data, direction_data):\n idx = 0\n point_data = []\n # Loop while you don't reach end of data\n while idx < len(direction_data)and idx+1 < len(direction_data):\n # Get current point\n curr_point = (float(direction_data[idx][0]),float(direction_data[idx][1]))\n # Get next point\n next_point = (float(direction_data[idx+1][0]),float(direction_data[idx+1][1]))\n # Calculate haversine distance\n dist = haversine(curr_point, next_point)\n # Initialize thresholds\n threshold_speed = 0.01\n threshold_distance = 0.002\n threshold_time = 0.4\n # Get current time\n curr_time = float(data[idx][0])\n # Get next time\n next_time = float(data[idx+1][0])\n # Get time difference\n time_diff = next_time - curr_time\n # Get current speed\n curr_speed = float(data[idx][5])\n # Append stop data if speed is less than threshold speed and time greater tha threshold time\n if curr_speed < threshold_speed and time_diff > threshold_time:\n # Append data if distance less than threshold distance\n if dist < threshold_distance:\n point_data.append(data[idx])\n\n idx += 1\n\n # Get latitude and longitude for stop data\n stop_list = process_directions(point_data)\n\n return stop_list", "def _detect_stop(func):\n def wrapper(*args,**kwargs):\n self = args[0]\n self.episode_length -= 1\n if self.episode_length <=0:\n \"\"\"if the episode is end\"\"\"\n self.end = True\n else:\n if self.adsorption:\n \"\"\"just stop moving and wait until the end of episode\"\"\"\n self.state = self.previous_state\n else:\n func(*args,**kwargs)\n self._detect_obstacles()\n\n # func(*args,**kwargs)\n # self._detect_obstacles()\n # if self.adsorption:\n # \"\"\"if this step update is invalid, the point will rebond\"\"\"\n # self.state = self.previous_state\n\n if self.distance <= 0.02:\n \"\"\"if the point reached the boundary around the goal, let it stop and reset the punishment(self.reward)\"\"\"\n self.end = True\n self.reward = 0\n if self.state[0] <0 or self.state[0] > 10 or self.state[1] <0 or self.state[1] > 10:\n # self.end = True\n self.reward = -800\n return np.array(self.state), self.reward, self.end, self.distance\n return wrapper", "def detect_stopsign(self, img):\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # Detect any stop signs in the image using the classifier at various scales.\n stop_signs = self.classifier.detectMultiScale(gray, 1.02, 10)\n\n # if stop signs found, draw rectange, calc area and return True\n if stop_signs.any():\n # get position and dimensions\n x, y, w, h = stop_signs[0]\n\n # draw rectangle around stop sign\n cv2.rectangle(img, (x, y), (x+w , y+h), (255, 0, 0), 2)\n\n # calculate area of stop sign\n stopsign_area = w * h\n print(\"Stopsign found:\", stopsign_area)\n\n # return True for stopsign found\n return True\n\n # no stop sign found so return False\n return False", "def stop(self):\n return _my_preamble_pilot_swig.my_preamble_detector_sptr_stop(self)", "def init_stop_sign(self):\n\n self.stop_sign_start = rospy.get_rostime()\n self.mode = Mode.STOP\n # print \"initial stop sign\"\n # print self.stop_sign_start", "def _change_seg_stop(self, seg_img, depth_img, stop_signs, cam, _region_size=6): \r\n for stop in stop_signs:\r\n\r\n _dist = self._get_distance(stop.get_transform().location)\r\n \r\n _region = np.abs(depth_img - _dist)\r\n\r\n seg_img[(_region < _region_size) & (seg_img == 12)] = 26\r\n\r\n # lane markings\r\n trigger = stop.trigger_volume\r\n\r\n _trig_loc_world = self._trig_to_world(np.array([[0], [0], [0], [1.0]]).T, stop, trigger)\r\n _x = self._world_to_sensor(_trig_loc_world, self._get_sensor_position(cam))[0,0]\r\n\r\n if _x > 0: # stop is in front of camera\r\n\r\n bb = self._create_2d_bb_points(trigger, 4)\r\n trig_loc_world = self._trig_to_world(bb, stop, trigger)\r\n cords_x_y_z = self._world_to_sensor(trig_loc_world, self._get_sensor_position(cam), True)\r\n\r\n #if cords_x_y_z.size: \r\n cords_x_y_z = cords_x_y_z[:3, :]\r\n cords_y_minus_z_x = np.concatenate([cords_x_y_z[1, :], -cords_x_y_z[2, :], cords_x_y_z[0, :]])\r\n bbox = (self._sensor_data['calibration'] @ cords_y_minus_z_x).T\r\n\r\n camera_bbox = np.concatenate([bbox[:, 0] / bbox[:, 2], bbox[:, 1] / bbox[:, 2], bbox[:, 2]], axis=1)\r\n\r\n if np.any(camera_bbox[:,2] > 0):\r\n\r\n camera_bbox = np.array(camera_bbox)\r\n\r\n polygon = [(camera_bbox[i, 0], camera_bbox[i, 1]) for i in range(len(camera_bbox))]\r\n\r\n img = Image.new('L', (self._sensor_data['width'], self._sensor_data['height']), 0)\r\n ImageDraw.Draw(img).polygon(polygon, outline=1, fill=1)\r\n _region = np.array(img)\r\n\r\n seg_img[(_region == 1) & (seg_img == 6)] = 27", "def update_trailing_stop(self, trade, instrument, distance, local=True, distance_in_percent=True):\n close_exec_price = instrument.close_exec_price(trade.direction)\n stop_loss = trade.sl\n\n if trade.direction > 0:\n # long case\n ratio = close_exec_price / trade.entry_price\n sl_ratio = (trade.entry_price - trade.sl) / trade.entry_price\n dist = (close_exec_price - trade.sl) / trade.entry_price\n step = distance\n\n if distance_in_percent:\n # @todo\n if dist > (sl_ratio + step):\n stop_loss = close_exec_price * (1.0 - distance)\n else:\n # @todo\n pass\n\n # # if dist > (sl_ratio + step):\n # # stop_loss = close_exec_price * (1.0 - sl_ratio)\n # # logger.debug(\"update SL from %s to %s\" % (trade.sl, stop_loss))\n\n # # # alternative @todo how to trigger\n # # if ratio >= 1.10:\n # # stop_loss = max(trade.sl, close_exec_price - (close_exec_price/trade.entry_price*(close_exec_price-trade.entry_price)*0.33))\n\n # # ultra large and based on the distance of the price\n # # if dist > 0.25:\n # # stop_loss = trade.entry_price + (trade.entry_price * (dist * 0.5))\n\n elif trade.direction < 0:\n # short case\n ratio = close_exec_price / trade.entry_price\n sl_ratio = (trade.sl - trade.entry_price) / trade.entry_price\n dist = (trade.sl - close_exec_price) / trade.entry_price\n step = distance\n\n if distance_in_percent:\n # @todo\n if dist > (sl_ratio - step):\n stop_loss = close_exec_price * (1.0 - distance)\n pass\n else:\n # @todo\n pass\n\n if stop_loss != trade.sl:\n if local:\n trade.sl = stop_loss\n else:\n trade.modify_stop_loss(trader, instrument, stop_loss)", "def __stop_loss_dist_rsi(rsi):\n return (100 - rsi)/1000.0 # return value between 0 - 0.1", "def _early_stop(self, meters):\n return False", "def stop(self):\n return _ncofdm_swig.ShortPNdetector_sptr_stop(self)", "def linear_track(self, dist):\n\t\tglobal estop_flag, move_state\n\n\t\t#Disable timer interrupt, reset halfway flag, set target distance\n\t\tsignal.alarm(0) \n\t\thalfway_flag = False\n\n\t\t#Set starting position\n\t\twith self.move_state_lock:\n\t\t\tstart_x, start_y, start_z = move_state['x'], move_state['y'], move_state['z']\n\t\t#Set current position initially to start position\n\t\tcurrent_x, current_y, current_z = start_x, start_y, start_z\n\t\t#Check if the distance travelled is greater than the goal distance\n\t\twhile math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) < abs(dist):\n\t\t\t#Check if the estop flag is set, if so, kill movement\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\ttwist_msg = Twist()\n\t\t\t\tif dist < 0:\n\t\t\t\t\tif self.correction == riu.no_correction:\n\t\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate\n\t\t\t\t\telse:\n\t\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate/2\n\t\t\t\t\tif self.correction == \"left\":\n\t\t\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate/2\n\t\t\t\t\telif self.correction == \"right\":\n\t\t\t\t\t\ttwist_msg.angular.z = riu.turn_rate/2\n\t\t\t\t#If distance goal is positive, move forward\n\t\t\t\telif dist > 0:\n\t\t\t\t\tif self.correction == riu.no_correction:\n\t\t\t\t\t\ttwist_msg.linear.x = riu.move_rate\n\t\t\t\t\telse:\n\t\t\t\t\t\ttwist_msg.linear.x = riu.move_rate/2\n\t\t\t\t\tif self.correction == \"left\":\n\t\t\t\t\t\ttwist_msg.angular.z = riu.turn_rate/2\n\t\t\t\t\telif self.correction == \"right\":\n\t\t\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate/2\n\n\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#Check if the current movement is half completed, if so, send a Half message and set flag to avoid message duplication\n\t\t\t\tif (math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) >= abs(dist)/2\n\t\t\t\t\tand not halfway_flag):\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\n\t\t\t\t#update current_x, current_y, and current_z (using local variables to be thread safe)\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_x = move_state['x']\n\t\t\t\t\tcurrent_y = move_state['y']\n\t\t\t\t\tcurrent_z = move_state['z']\n\t\t\trospy.sleep(.2)\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval)", "def getstop(seq):\n pos = 0\n stop = -1 # Output - initialise as -1\n while pos + 3 <= len(seq):\n if seq[pos:pos + 3] in STOP: # Read by frame; if true, stop codon found\n stop = pos # Set stop position\n break\n pos += 3\n return stop", "def _drive_until_stop_signal(self, stop_signal_number):\n\n self.camera.reset()\n\n logger.info(\"Drive until the STOP signal %d is found\", stop_signal_number)\n\n signal_to_detect = [SignalType.STOP_SIGNAL]\n remaining_distance_until_stop = 0\n\n for image in self.camera.stream():\n try:\n signal = self.signal_detector.crop_and_detect(image, signal_types=signal_to_detect)\n except Exception as exc:\n logger.error(\"Error occured during signal detection: '%s'\", str(exc))\n continue\n\n if signal is None:\n # drop the frame\n logger.debug(\"Dropping frame because no signal detected\")\n continue\n\n try:\n digit = self.digit_detector.detect(signal.image)\n except Exception as exc:\n logger.error(\"Error occured during digit detection: '%s'\", str(exc))\n continue\n if digit is None:\n # false alarm, not a signal\n logger.debug(\"Dropping frame because no digit in signal detected\")\n continue\n\n logger.info(\"Found a Stop Signal with digit: %d\", digit)\n\n if digit == stop_signal_number:\n logger.info(\"Detected correct digit %d to stop and wait until stopped\", digit)\n self.comm.set_target_speed(0) # stop before a STOP Signal\n while self.comm.get_status()[\"current speed\"] != 0:\n logger.debug(\"wait for approach to complete\")\n\n time.sleep(1)\n logger.info(\"Successfully stopped\")\n break\n\n self.camera.reset()\n image_stream = self.camera.stream()\n image = next(image_stream)\n\n signal = self.signal_detector.crop_and_detect(image, signal_types=[SignalType.STOP_SIGNAL])\n distance = self.distance_estimator.estimate(signal.image)\n logger.info(\n \"Found the Stop Signal %d where we need to stop in %fcmd\",\n digit, distance)\n\n remaining_distance_until_stop = distance\n logger.info(\n \"We are in distance to stop. Distance remaining: %f\",\n remaining_distance_until_stop)\n\n self.comm.set_distance_to_go(remaining_distance_until_stop)\n time.sleep(1)\n logger.info(\"Completed stop drive!!\")", "def stop_curvature(self, trial, curvature_ref = None):\r\n \r\n if curvature_ref != None:\r\n self.curvature_ref = curvature_ref\r\n\r\n d_threshold = 0 #BM: cuts this much off start and end of saccade to analyze. Originally was self.px_per_deg() / 2.\r\n min_length = 10 #BM: Minimum # of samples required in saccade, in order to analyze it.\r\n \r\n self.determine_curvature = False\r\n\r\n x = self.deg_to_px_x(self.curvature_ref[0])\r\n y = self.deg_to_px_y(self.curvature_ref[1])\r\n tx = self.deg_to_px_x(self.curvature_ref[2])\r\n ty = self.deg_to_px_y(self.curvature_ref[3])\r\n\r\n a = math.atan2(ty - y, tx - x) # BM : angle (in radians) between saccade start and end with respect to due north\r\n da_store = []\r\n da_deviance = [] # BM : created for looking at peak deviance\r\n \r\n i = 0\r\n \r\n jump_size = float(math.sqrt( (tx - x)**2 + (ty - y)**2 )) # BM : distance from saccade start to end\r\n \r\n # Make an image with the saccade start and end point, connected by a line\r\n\r\n im = Image.new(\"RGB\", (int(jump_size) + 100, 100), (0, 0, 0))\r\n dr = ImageDraw.Draw(im)\r\n dr.ellipse( (48, 48, 52, 52), fill = (255, 0, 0) )\r\n dr.ellipse( (jump_size + 48, 48, jump_size + 52, 52), fill = (255, 0, 0) )\r\n dr.line( (50, 50, 50 + jump_size, 50), fill = (255, 0, 0) )\r\n\r\n for sx, sy in self.stored_samples:\r\n \r\n i += 1\r\n \r\n d = math.sqrt( (sx - x)**2 + (sy - y)**2 ) # BM : distance from current sample to starting point\r\n dt = math.sqrt( (sx - tx)**2 + (sy - ty)**2 ) # BM : distance from current sample to target\r\n \r\n t = math.atan2(sy - y, sx - x) # BM: angle (in radians) between current sample and starting point\r\n da = t - a # BM : relative angle (in radians) between current sample and straight line joining saccade start and endoint\r\n \r\n while da < -math.pi:\r\n da += 2 * math.pi\r\n\r\n while t > math.pi:\r\n da -= 2 * math.pi\r\n\r\n norm_x = d * math.cos(da) # current distance along the straight line joining starting point to target, for the given sample\r\n norm_y = d * math.sin(da) # current deviance from the straight line, for the given sample.\r\n\r\n if norm_x > d_threshold and norm_x < jump_size - d_threshold:\r\n \r\n dr.point( (50 + norm_x, 50 + norm_y), fill = (0, 255, 0) )\r\n da_store.append(da)\r\n da_deviance.append(norm_y) # BM : created this for looking at peak deviance\r\n self.process_curvature_points(trial, (norm_x, norm_y), (sx, sy), (x, y), (tx, ty), d, da)\r\n \r\n if i == 16:\r\n trial[\"EarlyX\"] = sx\r\n trial[\"EarlyY\"] = sy\r\n\r\n #print len(da_store)\r\n if len(da_store) > min_length:\r\n print 'da_store: ',len(da_store)\r\n # Median curvature\r\n #curvature = math.degrees(self.get_median(da_store)) #BM : Get median of whole saccade\r\n # Peak deviance curvature \r\n curvature = math.degrees(self.get_peak_deviance(da_deviance))/math.degrees(jump_size) # BM : Get peak deviance in either pos or neg direction, then divide by the saccade amplitude (Nummenmaaa, 2006)\r\n # Average curvature\r\n curvature_avg = math.degrees(sum(da_store[:(len(da_store)/2)]) / (len(da_store)/2)) #BM : Get average of first half of every saccade\r\n \r\n \r\n while (curvature < -180):\r\n curvature += 360 \r\n\r\n while (curvature > 180):\r\n curvature -= 180 \r\n\r\n while (curvature_avg < -180): \r\n curvature_avg += 360\r\n\r\n while (curvature_avg > 180): \r\n curvature_avg -= 180 \r\n\r\n else:\r\n curvature = 0\r\n curvature_avg = 0\r\n \r\n if curvature != 0:\r\n print 'curvature: ',curvature\r\n \r\n return curvature", "def find_prem_stop(cds_ref_seq, gene_id, tran_id):\n\n cds_ref_seq_nostop = cds_ref_seq.extract(0, cds_ref_seq.ls - 3)\n\n if egglib.tools.has_stop(cds_ref_seq_nostop):\n print('Premature stop codon detected gene: %s transcript: %s' % (gene_id, tran_id))\n return True\n else:\n return False", "def on_stop(self):", "def stop(self):\n return _ncofdm_swig.FreqOffCalc_sptr_stop(self)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Custom collate_fn that is called with list of multivariate samples to yield a minibatch It preserves the data structure, e.g., if each sample is a dictionary, it outputs a dictionary with the same set of keys but batched Tensors as values (or lists if the values can not be converted into Tensors).
def collate_fn(sample_list): x_ref_batch = [] x_pos_batch = [] x_negs_batch = [] label_batch = [] for sample in sample_list: x_ref_batch.append(sample["x_ref"]) x_pos_batch.append(sample["x_pos"]) x_negs_batch.append(sample["x_negs"]) label_batch.append(sample["label"]) # Use torch API for RNNs to pad samples to fixed length, L, and stack them in batch-tensor of dim (B,n_dim,L). x_ref_batch = pad_sequence( x_ref_batch, batch_first=True, padding_value=0) # (B,L,n_dim) x_ref_batch = x_ref_batch.transpose(1, 2) # (B,n_dim,L) x_pos_batch = pad_sequence( x_pos_batch, batch_first=True, padding_value=0) # (B,L,n_dim) x_pos_batch = x_pos_batch.transpose(1, 2) # (B,n_dim,L) # Pad neg tensors with varying length of first dim L, and produce batch (B,K,n_dim,L') where L' is padded length x_negs_batch = pad_sequence(x_negs_batch, batch_first=True, padding_value=0) # (B, L', K, n_dim) x_negs_batch = x_negs_batch.transpose(1, 2) # (B, K, L', n_dim) x_negs_batch = x_negs_batch.transpose(2, 3) # (B, K, n_dim, L') return { 'x_ref': x_ref_batch, 'x_pos': x_pos_batch, 'x_negs': x_negs_batch, 'label': label_batch }
[ "def collate_fn(batch):\n metadata = []\n for el in batch:\n metadata.append(el[\"metadata\"])\n del el[\"metadata\"]\n\n batch = default_collate(batch)\n\n batch[\"metadata\"] = metadata\n\n return batch", "def collate_fn(batch):\n data = [item[0] for item in batch]\n target = [item[1] for item in batch]\n return [data, target]", "def collate_fn(batch):\n # eliminate invalid data (where boxes is [] tensor)\n old_batch_len = len(batch)\n batch = [x for x in batch if x[1]['boxes'].shape[0] != 0]\n # try refill empty sample by other sample in current batch\n #print('batch len = ', old_batch_len)\n #print('new batch len = ', len(batch))\n new_batch_len = len(batch)\n for i in range(new_batch_len, old_batch_len):\n batch.append(copy.deepcopy(batch[i%new_batch_len]))\n #print('batch = ', batch)\n #print('filled batch len = ', len(batch))\n batch = list(zip(*batch)) # batch[0]: data tensor, batch[1]: targets dict\n\n batch[0] = nested_tensor_from_tensor_list(batch[0])\n return tuple(batch)", "def collate_fn(self, batches: List[Dict[str, np.ndarray]]) -> Dict[str, torch.Tensor]:\n if self.use_bucketing:\n batch = {k: torch.as_tensor(v) for k, v in batches[0].items() if k != 'audio_filepaths'}\n batch['segment_ids'] = batch['segment_ids'].int()\n batch['punct_labels'] = batch['punct_labels'].long()\n batch['capit_labels'] = batch['capit_labels'].long()\n if self.use_audio and self.preload_audios:\n batch['features'] = batch['features'].to(torch.float32)\n return batch\n else:\n for batch in batches:\n batch_segment_ids, batch_input_mask, batch_loss_mask = create_masks_and_segment_ids(\n batch['input_ids'],\n batch['subtokens_mask'],\n self.tokenizer.pad_id,\n self.tokenizer.cls_id,\n self.tokenizer.sep_id,\n self.ignore_start_end,\n self.ignore_extra_tokens,\n )\n batch['segment_ids'] = torch.as_tensor(batch_segment_ids, dtype=torch.int)\n batch['input_mask'] = torch.as_tensor(batch_input_mask)\n batch['loss_mask'] = torch.as_tensor(batch_loss_mask)\n batch['input_ids'] = torch.as_tensor(batch['input_ids'], dtype=torch.int)\n batch['subtokens_mask'] = torch.as_tensor(batch['subtokens_mask'])\n batch['punct_labels'] = torch.as_tensor(batch['punct_labels'], dtype=torch.long)\n batch['capit_labels'] = torch.as_tensor(batch['capit_labels'], dtype=torch.long)\n if 'features' in batch:\n batch['features'] = torch.as_tensor(batch['features'], dtype=torch.float)\n batch['features_length'] = torch.as_tensor(batch['features_length'], dtype=torch.long)\n elif self.use_audio:\n if ASR_AVAILABLE:\n waveform = AudioSegment.from_file(batch['audio_filepaths'], target_sr=self.sample_rate)\n batch['features'] = torch.as_tensor(waveform.samples, dtype=torch.float)\n batch['features_length'] = torch.as_tensor(waveform.num_samples, dtype=torch.long)\n else:\n raise ModuleNotFoundError(\n 'Nemo ASR was not installed, see https://github.com/NVIDIA/NeMo#installation for installation instructions'\n )\n\n segment_ids = pad_sequence([batch['segment_ids'] for batch in batches])\n input_mask = pad_sequence([batch['input_mask'] for batch in batches])\n loss_mask = pad_sequence([batch['loss_mask'] for batch in batches])\n input_ids = pad_sequence([batch['input_ids'] for batch in batches], padding_value=self.tokenizer.pad_id)\n subtokens_mask = pad_sequence([batch['subtokens_mask'] for batch in batches], padding_value=False)\n punct_labels = pad_sequence([batch['punct_labels'] for batch in batches], padding_value=0)\n capit_labels = pad_sequence([batch['capit_labels'] for batch in batches], padding_value=0)\n features = pad_sequence([batch['features'] for batch in batches], padding_value=0.0)\n features_length = torch.tensor([batch['features_length'] for batch in batches])\n return {\n 'input_ids': input_ids.T,\n 'subtokens_mask': subtokens_mask.T,\n 'punct_labels': punct_labels.T,\n 'capit_labels': capit_labels.T,\n 'features': features.T,\n 'features_length': features_length,\n 'segment_ids': segment_ids.T,\n 'input_mask': input_mask.T,\n 'loss_mask': loss_mask.T,\n }", "def collate_batch(dataset_list):\n # define the lists\n batch_atom_weights = []\n batch_atom_fea = []\n batch_sym_fea = []\n batch_self_fea_idx = []\n batch_nbr_fea_idx = []\n crystal_atom_idx = []\n aug_cry_idx = []\n batch_targets = []\n batch_cry_ids = []\n\n aug_count = 0\n cry_base_idx = 0\n for i, (inputs, target, *cry_ids) in enumerate(dataset_list):\n atom_weights, atom_fea, sym_fea, self_fea_idx, nbr_fea_idx = inputs\n\n # number of atoms for this crystal\n n_el = atom_fea.shape[0]\n n_i = sym_fea.shape[0]\n n_aug = int(float(n_i) / float(n_el))\n\n # batch the features together\n batch_atom_weights.append(atom_weights.repeat((n_aug, 1)))\n batch_atom_fea.append(atom_fea.repeat((n_aug, 1)))\n batch_sym_fea.append(sym_fea)\n\n # mappings from bonds to atoms\n batch_self_fea_idx.append(self_fea_idx + cry_base_idx)\n batch_nbr_fea_idx.append(nbr_fea_idx + cry_base_idx)\n\n # mapping from atoms to crystals\n # print(torch.tensor(range(i, i+n_aug)).size())\n crystal_atom_idx.append(\n torch.tensor(range(aug_count, aug_count + n_aug)).repeat_interleave(n_el)\n )\n aug_cry_idx.append(torch.tensor([i] * n_aug))\n\n # batch the targets and ids\n batch_targets.append(target)\n batch_cry_ids.append(cry_ids)\n\n # increment the id counter\n aug_count += n_aug\n cry_base_idx += n_i\n\n return (\n (\n torch.cat(batch_atom_weights, dim=0),\n torch.cat(batch_atom_fea, dim=0),\n torch.cat(batch_sym_fea, dim=0),\n torch.cat(batch_self_fea_idx, dim=0),\n torch.cat(batch_nbr_fea_idx, dim=0),\n torch.cat(crystal_atom_idx),\n torch.cat(aug_cry_idx),\n ),\n tuple(torch.stack(b_target, dim=0) for b_target in zip(*batch_targets)),\n *zip(*batch_cry_ids),\n )", "def minibatch_process(f, minibatch_size, mb_args = (), mb_kwargs = {}, fixed_kwargs={}):\n all_mb_args = list(mb_args) + list(mb_kwargs.values())\n assert len(all_mb_args)>0, 'Need some input.'\n assert callable(f), 'f must be a function'\n n_samples = len(mb_args[0])\n assert all(len(arg) == n_samples for arg in all_mb_args)\n mb_kwarg_list = mb_kwargs.items()\n fixed_kwarg_list = list(fixed_kwargs.items())\n index_generator = minibatch_index_generator(n_samples = n_samples, n_epochs=1, minibatch_size=minibatch_size, final_treatment='truncate')\n ix = next(index_generator)\n first_output = f(*(a[ix] for a in mb_args), **dict([(k, v[ix]) for k, v in mb_kwarg_list]+fixed_kwarg_list))\n if first_output is None:\n for ix in index_generator:\n f(*(a[ix] for a in mb_args), **dict([(k, v[ix]) for k, v in mb_kwarg_list]+fixed_kwarg_list))\n else:\n output_shape = first_output.shape if minibatch_size==SINGLE_MINIBATCH_SIZE else first_output.shape[1:]\n results = np.empty((n_samples, )+output_shape, dtype=first_output.dtype)\n results[:len(first_output)] = first_output\n for ix in index_generator:\n results[ix] = f(*(a[ix] for a in mb_args), **dict([(k, v[ix]) for k, v in mb_kwarg_list]+fixed_kwarg_list))\n return results", "def trivial_batch_collator(batch):\n return batch", "def collate_fn(dataBatch):\n inputBatch = pad_sequence([data[0] for data in dataBatch])\n if not any(data[1] is None for data in dataBatch):\n targetBatch = torch.cat([data[1] for data in dataBatch])\n else:\n targetBatch = None\n\n inputLenBatch = torch.stack([data[2] for data in dataBatch])\n if not any(data[3] is None for data in dataBatch):\n targetLenBatch = torch.stack([data[3] for data in dataBatch])\n else:\n targetLenBatch = None\n\n return inputBatch, targetBatch, inputLenBatch, targetLenBatch", "def collate_fn_no_master_node(batch):\n\tbatch_A, batch_nodes, batch_y = zip(*batch)\n\n\tn_graphs = len(batch_nodes)\n\tmax_n_nodes = max([nodes.shape[0] for nodes in batch_nodes])\n\n\tn_nodes = n_graphs * max_n_nodes\n\n\tadj_batch = lil_matrix((n_nodes, n_nodes))\n\tbatch_features = np.zeros(n_nodes)\n\tfor i, (A, features) in enumerate(zip(batch_A, batch_nodes)):\n\t\tstart_ix = i * max_n_nodes\n\t\tadj_batch[\n\t\tstart_ix: start_ix + A.shape[0], start_ix: start_ix + A.shape[0]\n\t\t] = A\n\t\tbatch_features[start_ix: start_ix + features.shape[0]] = features\n\n\tadj_batch = adj_batch.tocsr()\n\tbatch_A = adj_batch\n\tbatch_A = sparse_mx_to_torch_sparse_tensor(batch_A)\n\n\t# concatenate all features and labels to one long vector\n\tbatch_nodes = torch.cat(batch_nodes, dim=0)\n\tbatch_y = torch.cat(batch_y)\n\n\treturn batch_A, batch_nodes, batch_y, torch.LongTensor([n_graphs])", "def _collate_fn(batch):\r\n batch = list(zip(*batch))\r\n batch[0] = torch.stack(batch[0])\r\n batch[1] = list(batch[1])\r\n batch[2] = torch.stack(batch[2])\r\n return tuple(batch)", "def collate_fn(batch):\n batch = [data for data in batch if data is not None]\n imgs, bb_targets = list(zip(*batch))\n\n # Resize images to input shape\n imgs = torch.stack([img for img in imgs])\n\n # Add sample index to targets.\n # If each label is shape (N, 5), we concatenate along the 0th axis. To distinguish bboxes of different images,\n # we add an image index to the 1st axis.\n for i, boxes in enumerate(bb_targets):\n boxes[:, 0] = i\n\n bb_targets_cat = torch.cat(bb_targets, 0)\n\n return imgs, bb_targets_cat", "def id_collate_fn(batch):\n results = []\n\n for samples in zip(*batch):\n if isinstance(samples[0], UUID):\n # Turn into a tuple of strings\n samples = (*map(str, samples),)\n\n # Batch data\n results.append(default_collate(samples))\n return results", "def collate_pool(dataset_list):\n batch_atom_fea, batch_nbr_fea, batch_nbr_fea_idx = [], [], []\n crystal_atom_idx, batch_target = [], []\n batch_cif_ids = []\n base_idx = 0\n for i, ((atom_fea, nbr_fea, nbr_fea_idx), target, cif_id)\\\n in enumerate(dataset_list):\n n_i = atom_fea.shape[0] # number of atoms for this crystal\n batch_atom_fea.append(atom_fea)\n batch_nbr_fea.append(nbr_fea)\n batch_nbr_fea_idx.append(nbr_fea_idx+base_idx)\n new_idx = torch.LongTensor(np.arange(n_i)+base_idx)\n crystal_atom_idx.append(new_idx)\n batch_target.append(target)\n batch_cif_ids.append(cif_id)\n base_idx += n_i\n return (torch.cat(batch_atom_fea, dim=0),\n torch.cat(batch_nbr_fea, dim=0),\n torch.cat(batch_nbr_fea_idx, dim=0),\n crystal_atom_idx), \\\n torch.stack(batch_target, dim=0),\\\n batch_cif_ids", "def _collate_else(batch, collate_func):\n error_msg = \"batch must contain tensors, numbers, dicts or lists; found {}\"\n elem_type = type(batch[0])\n if elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \\\n and elem_type.__name__ != 'string_':\n elem = batch[0]\n if elem_type.__name__ == 'ndarray':\n # array of string classes and object\n if re.search('[SaUO]', elem.dtype.str) is not None:\n raise TypeError(error_msg.format(elem.dtype))\n\n return torch.stack([torch.from_numpy(b) for b in batch], 0)\n if elem.shape == (): # scalars\n py_type = float if elem.dtype.name.startswith('float') else int\n return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))\n elif isinstance(batch[0], slice):\n batch = default_collate([{\n 'start': sl.start,\n 'stop': sl.stop,\n 'step': 1 if sl.step is None else sl.step\n } for sl in batch])\n return batch\n elif isinstance(batch[0], int_classes):\n return torch.LongTensor(batch)\n elif isinstance(batch[0], float):\n return torch.DoubleTensor(batch)\n elif isinstance(batch[0], string_classes):\n return batch\n elif isinstance(batch[0], container_abcs.Mapping):\n # Hack the mapping collation implementation to print error info\n if _DEBUG:\n collated = {}\n try:\n for key in batch[0]:\n collated[key] = collate_func([d[key] for d in batch])\n except Exception:\n print('\\n!!Error collating key = {!r}\\n'.format(key))\n raise\n return collated\n else:\n return {key: collate_func([d[key] for d in batch]) for key in batch[0]}\n elif isinstance(batch[0], tuple) and hasattr(batch[0], '_fields'): # namedtuple\n return type(batch[0])(*(default_collate(samples) for samples in zip(*batch)))\n elif isinstance(batch[0], container_abcs.Sequence):\n transposed = zip(*batch)\n return [collate_func(samples) for samples in transposed]\n else:\n raise TypeError((error_msg.format(type(batch[0]))))", "def collate_fn(self, batch):\n images = list()\n boxes = list()\n labels = list()\n difficulties = list()\n\n for b in batch:\n images.append(b[0])\n boxes.append(b[1])\n labels.append(b[2])\n difficulties.append(b[3])\n\n images = torch.stack(images, dim=0)\n\n return images, boxes, labels, difficulties # tensor (N, 3, 300, 300), 3 lists of N tensors each", "def _apply_chunked(func, chunk_generator, tensor_collator):\n processed_chunks = [func(*chunk_args, **chunk_kwargs) for chunk_args, chunk_kwargs in chunk_generator]\n return cat_dataclass(processed_chunks, tensor_collator)", "def collate_fn(self, batch):\n images = list()\n targets = list()\n\n for b in batch:\n images.append(b[0])\n targets.append(b[1])\n\n # images = torch.stack(images, dim=0)\n\n return images, targets # tensor (N, 3, 300, 300), 3 lists of N tensors each", "def customize_collate_from_batch(batch):\n\n elem = batch[0]\n elem_type = type(elem)\n if isinstance(elem, torch.Tensor):\n batch_new = pad_sequence(batch) \n out = None\n if torch.utils.data.get_worker_info() is not None:\n numel = max([x.numel() for x in batch_new]) * len(batch_new)\n storage = elem.storage()._new_shared(numel)\n out = elem.new(storage)\n # here is the difference\n return torch.cat(batch_new, 0, out=out)\n\n elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \\\n and elem_type.__name__ != 'string_':\n if elem_type.__name__ == 'ndarray' or elem_type.__name__ == 'memmap':\n if np_str_obj_array_pattern.search(elem.dtype.str) is not None:\n raise TypeError(customize_collate_err_msg.format(elem.dtype))\n return customize_collate_from_batch(\n [torch.as_tensor(b) for b in batch])\n elif elem.shape == (): # scalars\n return torch.as_tensor(batch)\n elif isinstance(elem, float):\n return torch.tensor(batch, dtype=torch.float64)\n elif isinstance(elem, int_classes):\n return torch.tensor(batch)\n elif isinstance(elem, string_classes):\n return batch\n elif isinstance(elem, tuple):\n # concatenate two tuples\n tmp = elem\n for tmp_elem in batch[1:]:\n tmp += tmp_elem \n return tmp\n elif isinstance(elem, container_abcs.Sequence):\n it = iter(batch)\n elem_size = len(next(it))\n if not all(len(elem) == elem_size for elem in it):\n raise RuntimeError('each element in batch should be of equal size')\n transposed = zip(*batch)\n return [customize_collate_from_batch(samples) for samples in transposed]\n\n raise TypeError(customize_collate_err_msg.format(elem_type))", "def collate(samples):\n maxlen = 0\n l_idx = -1\n if isinstance(samples, tuple):\n samples = [samples]\n for i, X, y, seq_id in samples:\n if maxlen < X.shape[l_idx]:\n maxlen = X.shape[l_idx]\n X_ret = list()\n y_ret = list()\n idx_ret = list()\n size_ret = list()\n seq_id_ret = list()\n for i, X, y, seq_id in samples:\n dif = maxlen - X.shape[l_idx]\n X_ = X\n if dif > 0:\n X_ = F.pad(X, (0, dif))\n X_ret.append(X_)\n y_ret.append(y)\n size_ret.append(X.shape[l_idx])\n idx_ret.append(i)\n seq_id_ret.append(seq_id)\n X_ret = torch.stack(X_ret)\n y_ret = torch.stack(y_ret)\n size_ret = torch.tensor(size_ret)\n idx_ret = torch.tensor(idx_ret)\n seq_id_ret = torch.tensor(seq_id_ret)\n return (idx_ret, X_ret, y_ret, size_ret, seq_id_ret)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Import a function from a full module path
def import_from(full_name): module_name, function_name = full_name.rsplit('.', 1) mod = import_module(module_name) return getattr(mod, function_name)
[ "def import_function(func_ref):\n module_name, _, func_name = func_ref.rpartition('.')\n module = importlib.import_module(module_name)\n return getattr(module, func_name)", "def import_function(name):\n module_name, function_name = name.rsplit(\".\", 1)\n module = importlib.import_module(module_name)\n return getattr(module, function_name)", "def load_function(path):\r\n module_path, _, name = path.rpartition('.')\r\n return getattr(import_module(module_path), name)", "def import_function(name: str):\n module_name, function_name = name.rsplit(\".\", 1)\n module = importlib.import_module(module_name)\n return getattr(module, function_name)", "def import_path(modulename, path='.'):\n\tpath = _osp.abspath(path)\n\tif not _osp.isdir(path):\n\t\traise ImportError(f'no directory: {path}')\n\twith _EnsureSysPath(_osp.abspath(path)):\n\t\treturn _il.import_module(modulename)", "def import_path(fullpath, strict=True):\n path, filename = os.path.split(fullpath)\n filename, ext = os.path.splitext(filename)\n sys.path.insert(0, path)\n try:\n module = __import__(filename)\n except ImportError:\n del sys.path[0]\n raise\n del sys.path[0]\n #\n if strict:\n path = os.path.split(module.__file__)[0]\n # FIXME: doesn't *startswith* allow room for errors ?\n if not fullpath.startswith(path):\n raise ImportError(\"Module '%s' found, but not in '%s'\" % (\n filename, fullpath))\n #\n return module", "def import_from_path(module: str, path: str, name: str):\n\n spec = importlib.util.spec_from_file_location(module, path)\n foo = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(foo)\n return getattr(foo, name)", "def importFromPath(filename):\n try:\n path, name = os.path.split(filename)\n name, ext = os.path.splitext(name)\n file, filename, data = imp.find_module(name, [path])\n importedModule = imp.load_module(name, file, filename, data)\n except Exception as ae:\n raise Exception('Importing module '+ filename + ' at ' + path + os.sep + name + ' failed with error '+ str(ae))\n return importedModule", "def __import_from(module: str, name: str):\n module = __import__(module, fromlist=[name])\n return getattr(module, name)", "def dynamic_import(path, module=None):\n imp = importlib.import_module(path)\n if module:\n return getattr(imp, module)\n else:\n return imp", "def import_module(module, from_where):\n from_module = __import__(from_where, globals(), locals(), [module])\n return getattr(from_module, module)", "def import_from(path: str) -> t.Any:\n module, item = path.split(\":\")\n return getattr(import_module(module), item)", "def load_func_path(funcpath):\n modname, objname = funcpath.split(':', 1)\n\n __import__(modname)\n module = modules[modname]\n _func = eval(objname, module.__dict__)\n \n if _func is None:\n raise Exception('eval(%(objname)s) in %(modname)s came up None' % locals())\n\n return _func", "def import_module_from_module_path(path):\n return SourceFileLoader('', path).load_module()", "def _import_function(cwd, function_location, function_name, setup_name):\n sys.path.append(cwd)\n try:\n imported = __import__(function_location.strip(\".py\"))\n except ImportError:\n raise\n\n try:\n users_amplitude = getattr(imported, function_name)\n except:\n raise\n\n try:\n setup_function = getattr(imported, setup_name)\n except AttributeError:\n warnings.warn((\"Setup function {0} was not found in {1},\"\n \"going without setup function\").format(setup_name, function_location), UserWarning)\n\n def empty():\n pass\n setup_function = empty\n\n return [users_amplitude, setup_function]", "def import_callable(module_path_name):\n try:\n module_path, name = module_path_name.rsplit('.', 1)\n except ValueError:\n raise exceptions.ImproperlyConfigured('%s is not a valid module path' % module_path_name)\n try:\n mod = importlib.import_module(module_path)\n except ImportError as e:\n raise exceptions.ImproperlyConfigured('Error importing module %s: \"%s\"' % (module_path, e))\n try:\n item_callable = getattr(mod, name)\n except AttributeError:\n raise exceptions.ImproperlyConfigured('Module \"%s\" does not define a \"%s\" callable' % (module_path, name))\n\n return item_callable", "def local_import(x):\n import os\n import sys\n\n path, fileName = os.path.split(x)\n if not path:\n path = \".\"\n base, ext = os.path.splitext(fileName)\n save = sys.dont_write_bytecode\n save_path = list(sys.path)\n sys.dont_write_bytecode = True\n sys.path = [path]\n rv = __import__(base)\n sys.dont_write_bytecode = save\n sys.path = save_path\n sys.path.pop()\n return rv", "def import_from_script(path, name):\n from polygraphy.logger import G_LOGGER\n\n dir = os.path.dirname(path)\n modname = os.path.splitext(os.path.basename(path))[0]\n\n sys.path.insert(0, dir)\n\n with contextlib.ExitStack() as stack:\n\n def reset_sys_path():\n del sys.path[0]\n\n stack.callback(reset_sys_path)\n\n try:\n mod = importlib.import_module(modname)\n return getattr(mod, name)\n except Exception as err:\n ext = os.path.splitext(path)[1]\n err_msg = \"Could not import symbol: {:} from script: {:}\".format(name, path)\n if ext != \".py\":\n err_msg += \"\\nThis could be because the extension of the file is not '.py'. Note: The extension is: {:}\".format(\n ext\n )\n err_msg += \"\\nNote: Error was: {:}\".format(err)\n err_msg += \"\\nNote: sys.path was: {:}\".format(sys.path)\n G_LOGGER.critical(err_msg)", "def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a new Bloom Filter ``key`` with desired probability of false positives ``errorRate`` expected entries to be inserted as ``capacity``. Default expansion value is 2. By default, filter is autoscaling.
def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None): params = [key, errorRate, capacity] self.appendExpansion(params, expansion) self.appendNoScale(params, noScale) return self.execute_command(self.BF_RESERVE, *params)
[ "def cfCreate(self, key, capacity, expansion=None, bucket_size=None, max_iterations=None):\n params = [key, capacity]\n self.appendExpansion(params, expansion)\n self.appendBucketSize(params, bucket_size)\n self.appendMaxIterations(params, max_iterations)\n\n return self.execute_command(self.CF_RESERVE, *params)", "def __init__(self, initial_capacity, error_rate, scale_factor = 2, scale_mode = SCALE_MODE_LINEAR):\n\t\tif not initial_capacity > 0: raise ValueError(\"initial_capacity must be > 0.\")\n\t\tif not (0 < error_rate < 1): raise ValueError(\"error_rate must be between 0 and 1.\")\n\t\tif not scale_factor > 0: raise ValueError(\"scale_factor must be > 0.\")\n\t\tif not scale_mode in [1, 2]: raise ValueError(\"Invalid scale_mode, use one of the SCALE_MODE_* constants.\")\n\t\t# The capacity of the first filter.\n\t\tself.initial_capacity = initial_capacity\n\t\t# Total number of elements added.\n\t\tself.element_count = 0\n\t\t# The error rate shared by all filters.\n\t\tself.error_rate = error_rate\n\t\t# The scaling factor.\n\t\tself.scale_factor = scale_factor\n\t\t# The scaling mode.\n\t\tself.scale_mode = scale_mode\n\t\t# The filters array.\n\t\tself.filters = []\n\t\tself.filters.append(BloomFilter(self.initial_capacity, self.error_rate))", "def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendError(params, error)\n self.appendExpansion(params, expansion)\n self.appendNoCreate(params, noCreate)\n self.appendNoScale(params, noScale)\n self.appendItems(params, items)\n\n return self.execute_command(self.BF_INSERT, *params)", "def __init__(self, capacity, error_rate):\n\t\tif not capacity > 0: raise ValueError(\"capacity must be > 0\")\n\t\tif not (0 < error_rate < 1): raise ValueError(\"error_rate must be between 0 and 1.\")\n\t\t# The expected maximum capacity. The number of stored elements may exceed this capacity.\n\t\tself.capacity = capacity\n\t\t# The number of stored elements.\n\t\tself.element_count = 0\n\t\t# The maximum probability of false positives allowed for the specified capacity.\n\t\tself.error_rate = error_rate\n\t\t# Size of the bit array.\n\t\tself.size = self.calc_size()\n\t\t# The number of hashes used to store/check an element.\n\t\tself.hash_count = self.calc_hash_count()\n\t\t# The bit array.\n\t\tself.bit_array = bitarray(self.size)\n\t\tself.bit_array.setall(0)", "def bf_counter(file_name, k, n, capacity, error_rate, verbose=False):\n if verbose:\n start = time.time()\n print('BFCounter started.')\n\n heap = []\n for i in range(n):\n heap.append((0, ''))\n\n bf = BloomFilter(capacity, error_rate, 'kmer_bf')\n\n kmer_counter = defaultdict(lambda: 1)\n\n # Assign functions to local variables for performance improvement\n add_to_bf = bf.add\n heap_pushpop = heapq.heappushpop\n\n with open(file_name, 'r') as f:\n line_num = 0\n for line in f:\n if line_num % 4 == 1: # dna sequence\n kmer_count = len(line) - k\n for i in range(kmer_count):\n kmer = line[i:i + k]\n if kmer not in bf: # not in Bloom Filter\n add_to_bf(kmer)\n else: # in Bloom Filter\n kmer_counter[kmer] += 1\n line_num += 1\n if verbose:\n end_hash = time.time()\n hash_table_size = sys.getsizeof(kmer_counter) / (1024 ** 2)\n print('Hash table is created in {:.2f} seconds.'.format(\n end_hash - start))\n print('Hash table size: {:.2f} MB.'.format(hash_table_size))\n start_populate = time.time()\n print('Populating the heap...')\n\n for count, kmer in kmer_counter.items():\n # insert to the heap if count is bigger than minimum\n if count > heap[0][0]:\n heap_pushpop(heap, (count, kmer))\n\n if verbose:\n end_populate = time.time()\n print('Heap is populated in {:.2f} seconds.'.format(\n end_populate - start_populate\n ))\n\n os.remove('kmer_bf')\n if verbose:\n end = time.time()\n print('BFCounter is completed in {:.2f} seconds.'.format(end - start))\n\n return heap", "def __init__(self, capacity, initial):\n\t\tself.capacity = capacity\n\t\tself.amount = initial", "def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n for count in range(capacity):\r\n self._items.append(fillValue)", "def __init__(self, capacity, fillValue = None):\n\n self._logicalSize = 0\n self._items = list()\n for count in range(capacity):\n self._items.append(fillValue)\n self._logicalSize += 1", "def new_capacity(self, new_capacity):\n self._new_capacity = new_capacity", "def __init__(self, capacity, fillValue = None):\n \n self._items = list() \n self._fillValue = fillValue\n self._DEFAULT_CAPACITY = capacity\n self._logicalSize = 0 #as required by exercise 1\n \n \n for count in xrange(capacity):\n self._items.append(self._fillValue)", "def create_capacity_limiter(total_tokens: float) -> abc.CapacityLimiter:\n return get_asynclib().CapacityLimiter(total_tokens)", "def cfInsert(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERT, *params)", "def create_capacity_limiter(total_tokens: float) -> CapacityLimiter:\n return _get_asynclib().CapacityLimiter(total_tokens)", "def expandable_capacity(self, expandable_capacity):\n\n self._expandable_capacity = expandable_capacity", "def _generate_table(self):\n for i in xrange(32):\n self._table.append(\n BloomFilter(\n capacity=self.__capacity,\n error_rate=self.__error_rate\n )\n )", "def from_key(cls, key, quantity=1):\n\n return cls(key[0], key[1], quantity)", "def __init__(self, crypto_keys, precision, **kwargs):\n self.NEGATIVE_THRESHOLD = sys.maxsize\n self.keys = crypto_keys\n self.precision = precision", "def FixedWidthBucketer(width, num_finite_buckets=100):\n return Bucketer(width=width, growth_factor=0.0,\n num_finite_buckets=num_finite_buckets)", "def __init__(self, size=DEFAULT_SIZE, alpha=DEFAULT_ALPHA, rescale_threshold=DEFAULT_RESCALE_THRESHOLD, clock=getClock()):\r\n super(ExponentiallyDecayingReservoir, self).__init__()\r\n\r\n self.values = {}\r\n self.alpha = alpha\r\n self.size = size\r\n self.clock = clock\r\n self.rescale_threshold = rescale_threshold\r\n self.count = 0\r\n self.startTime = self.clock.time()\r\n self.nextScaleTime = self.clock.time() + self.rescale_threshold" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds to a Bloom Filter ``key`` an ``item``.
def bfAdd(self, key, item): params = [key, item] return self.execute_command(self.BF_ADD, *params)
[ "def add(key, item):\n hash_key = hash_function(key)\n hash_table[hash_key - 1] = item", "def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)", "def add_item(self, key, item):\n self.dict[key] = item\n self.is_empty = False", "def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1", "def add(self, key, timestamp=None):\n cur_bloom = self.get_active_bloom()\n cur_bloom.add(key, timestamp)", "def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True", "def add(self, key):\n filter_ = self._filter\n h = self._salt.copy()\n h.update(key)\n for pos in self._fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % self._m_size)\n self._filter = filter_", "def add_item(self, key, item):\n self[key].add(item)\n try:\n self._reverse_store[item].add(key)\n except KeyError:\n self._reverse_store[item] = set([key])", "def add(self,key,value):\n index = self.hash(key)\n if not self._buckets[index] :\n self._buckets[index] = [[key,value]]\n else:\n self._buckets[index].append([key,value])", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def update_item(self, key, item):\n self.dict[key]= item", "def add_item (self, item):\n new_item = CacheItem (item)\n cached = self.cache.get(hash(item))\n if cached is None:\n self.evict_or_add (new_item)\n cached.hits += 1", "def add_to_bag(self, item):\n self._bag.append(item)", "def insert(self, item):\n for h_num in xrange(self.k):\n val = self.hash_value(item, h_num)\n self.arr[val] = True", "def add(self, item):\n self.cache.append(item)", "def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position", "def add(self, key, value):\n new_item = self._Item(key, value)\n self._data.add_last(new_item) # Since priority queue is unsorted, we just add element at the end.", "def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position", "def replace(self, key, item):\n # If the item already existed in the list, we need to replace it.\n # Otherwise, it will be added to the list at the appropriate\n # position.\n if len(self.items) >= MAXBITMAPDISPATCH:\n new = self.to_listdispatch(BRANCH)\n return new._ireplace(key, item)\n \n notnew = bool(self.bitmap & 1 << key)\n newmap = self.bitmap | 1 << key\n idx = bit_count(self.bitmap & ((1 << key) - 1))\n return BitMapDispatch(\n newmap,\n # If notnew is True, the item that is replaced by the new item\n # is left out, otherwise the new item is inserted. Refer to\n # _ireplace for a more concise explanation.\n self.items[:idx] + [item] + self.items[idx+notnew:]\n )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds to a Bloom Filter ``key`` multiple ``items``. If ``nocreate`` remain ``None`` and ``key does not exist, a new Bloom Filter ``key`` will be created with desired probability of false positives ``errorRate`` and expected entries to be inserted as ``size``.
def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None): params = [key] self.appendCapacity(params, capacity) self.appendError(params, error) self.appendExpansion(params, expansion) self.appendNoCreate(params, noCreate) self.appendNoScale(params, noScale) self.appendItems(params, items) return self.execute_command(self.BF_INSERT, *params)
[ "def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)", "def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def cfInsert(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERT, *params)", "def add(self,key,value):\n index = self.hash(key)\n if not self._buckets[index] :\n self._buckets[index] = [[key,value]]\n else:\n self._buckets[index].append([key,value])", "def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)", "def add(key, item):\n hash_key = hash_function(key)\n hash_table[hash_key - 1] = item", "def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None):\n params = [key, errorRate, capacity]\n self.appendExpansion(params, expansion)\n self.appendNoScale(params, noScale)\n\n return self.execute_command(self.BF_RESERVE, *params)", "def _save_new_item(self, key, value, is_add=False):\n if not is_add:\n if len(value) <= ITEM_MAX_SIZE:\n self.data[key] = value\n self.existence_and_usage[key] = item.InformationOfItem(True)\n else:\n self.existence_and_usage[key] = item.InformationOfItem(False)\n path_to_item = os.path.join(self.path, str(key))\n if is_add:\n self.existence_and_usage[key].use()\n if key in self.data.keys():\n if len(self.data[key]) + len(value) > ITEM_MAX_SIZE:\n self.existence_and_usage[key] = item\\\n .InformationOfItem(False)\n del self.data[key]\n else:\n self.data[key] += value\n with open(path_to_item, 'a', encoding='utf-8') as f:\n f.write(value)\n else:\n with open(path_to_item, 'w', encoding='utf-8') as f:\n f.write(value)", "def add_item(self, newKey, newData=None):\n \n if self.size() == 0:\n (self.k1, self.d1) = (newKey, newData)\n elif newKey < self.k1:\n (self.k1, self.k2, self.k3) = (newKey, self.k1, self.k2)\n (self.d1, self.d2, self.d3) = (newData, self.d1, self.d2)\n # Kinderen een plaats naar rechts opschuiven\n (self.c2, self.c3, self.c4) = (self.c1, self.c2, self.c3)\n elif self.size() == 1 or newKey < self.k2:\n (self.k2, self.k3) = (newKey, self.k2)\n (self.d2, self.d3) = (newData, self.d2)\n # Kinderen een plaats naar rechts opschuiven, vanaf key #2\n (self.c3, self.c4) = (self.c2, self.c3)\n else:\n self.k3 = newKey\n self.d3 = newData", "def add_item(self, key, item):\n self.dict[key] = item\n self.is_empty = False", "def test_sample_container_add_exceeds_limit(self):\n self.assertEqual(self.container._data, defaultdict(list))\n\n retval = self.container.add(\"key1\", [\"1\", \"2\", \"3\", ], 2)\n\n self.assertEqual(retval, [\"1\", \"2\", \"3\", ])\n self.assertEqual([], self.container._data[\"key1\"])", "def add(self, key):\n filter_ = self._filter\n h = self._salt.copy()\n h.update(key)\n for pos in self._fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % self._m_size)\n self._filter = filter_", "def addItem(self, key):\n if key in self.dictionary:\n raise Exception(\"Key already exist in dictionary\")\n self.dictionary[key] = WordInformation(self.MAX_RATING)", "def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position", "def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position", "def appenddictitemsize(self, key, numents):\n self._dentsvertsdata[key].appendsize(numents * self._multFactor)", "def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True", "def add_element_to_deque(self, new_key, new_value):\n self.update_deque_node_key_and_value(self.deque_index_to_overwrite_next, new_key, new_value)\n self.update_number_experiences_in_deque()\n self.update_deque_index_to_overwrite_next()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks whether an ``item`` exists in Bloom Filter ``key``.
def bfExists(self, key, item): params = [key, item] return self.execute_command(self.BF_EXISTS, *params)
[ "def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False", "def has_item(self, item):\n return item in self.set", "def cfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_EXISTS, *params)", "def has_item(self, item):\n return item in self.cache", "def is_in(self, item, key=None):\n if self.root_hash == None:\n return False\n\n if key is None:\n key = item\n\n item_key = h(item)\n head_element = self.store[self.root_hash]\n return head_element.is_in(self.store, item_key, key)", "def is_item_exists(self) -> bool:\n return (\n self.get_one_or_none(\n task_id=self.task_id,\n attributes_to_get=[\n \"key\",\n ],\n )\n is not None\n )", "def has(cls, item):\n return item in cls.values()", "def bfMExists(self, key, *items):\n params = [key]\n params += items\n\n return self.execute_command(self.BF_MEXISTS, *params)", "def __contains__(self, item):\n return item in self._data", "def check_item_exists(self, item_qualifier: str) -> bool:\n pass", "def has(self, item):\n return item in self.mut", "def item_exists(self, call_number):\n return call_number in self.item_list.keys()", "def exists(self, key):\n r = self.mc.get(key)\n if r not in (None, _empty_slot):\n return True\n else:\n return self.db.exists(key)", "def contains(self, key):\n return any(bloom.contains(key) for bloom in self.blooms)", "def has_item(self, item: Inventory) -> bool:\n return (item.pk,) in self.orderitem_set.values_list('item')", "def exists(self, key):\n return key in self.store", "def has(self, item, issue):\n try:\n if issue in self[item]: return True\n else: return False\n except KeyError: pass", "def contains(self, item):\n\n if self.tree_is_empty() is True:\n return False\n return self.search(item)", "def __contains__(self, key):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.getbit(sliceKey, k)\n sliceIdx += 1\n getbits = pipe.execute() \n for bit in getbits:\n if not bit:\n return False\n return True" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks whether ``items`` exist in Bloom Filter ``key``.
def bfMExists(self, key, *items): params = [key] params += items return self.execute_command(self.BF_MEXISTS, *params)
[ "def bfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_EXISTS, *params)", "def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False", "def has_item(self, item):\n return item in self.set", "def _validate_item_filter(self):\n item_list = json.load(open(os.path.join(_base_dir, 'data', 'items.json')))\n for config_item_name, bag_count in self.items_filter.iteritems():\n if config_item_name not in item_list.viewvalues():\n if config_item_name not in item_list:\n raise ConfigException(\n \"item {} does not exist, spelling mistake? (check for valid item names in data/items.json)\".format(\n config_item_name))", "def item_exists(self, call_number):\n return call_number in self.item_list.keys()", "def is_in(self, item, key=None):\n if self.root_hash == None:\n return False\n\n if key is None:\n key = item\n\n item_key = h(item)\n head_element = self.store[self.root_hash]\n return head_element.is_in(self.store, item_key, key)", "def has(cls, item):\n return item in cls.values()", "def contains(self, key):\n return any(bloom.contains(key) for bloom in self.blooms)", "def has_items(self):\n return len(self.items) != 0", "def check_item_in(self, url):\n item_hash = tools.url_hash(url)\n if item_hash not in self.__items:\n self.__item_lock.acquire()\n self.__items.add(item_hash)\n self.__item_lock.release()\n return False\n else:\n return True", "def has_item(self, item):\n return item in self.cache", "def exists(self, key):\n return key in self.store", "def __contains__(self, key):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.getbit(sliceKey, k)\n sliceIdx += 1\n getbits = pipe.execute() \n for bit in getbits:\n if not bit:\n return False\n return True", "def cfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_EXISTS, *params)", "def __contains__(self, item):\n return item in self._data", "def does_contain(self, key):\n print \"Iterating over components...\"\n for key in self.componentDict:\n print \"Checking keys...\"\n if key in self.componentDict:\n return True\n\n else:\n print \"No key found\"", "def itemAvailable(self, item):\n return self.cache.has_key(item.pkgname)", "def check_processed_item(self, tender_id, item_id):\n return item_key(tender_id, item_id) in self.processed_items.keys()", "def hasItem(self, path): \n\t\treturn (path in self.items and self.items[path])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Begins an incremental save of the bloom filter ``key``. This is useful for large bloom filters which cannot fit into the normal SAVE and RESTORE model. The first time this command is called, the value of ``iter`` should be 0. This command will return successive (iter, data) pairs until (0, NULL) to indicate completion.
def bfScandump(self, key, iter): params = [key, iter] return self.execute_command(self.BF_SCANDUMP, *params)
[ "def cfScandump(self, key, iter):\n params = [key, iter]\n \n return self.execute_command(self.CF_SCANDUMP, *params)", "def flush(self, key=None):\n raise NotImplementedError", "def save(self) -> dict:\n for pair in self._buffer:\n yield pair.save()", "def _iter(self, key, count, increment=1):\n key %= self.size\n while count > 0:\n try:\n yield self.db[key]\n except KeyError:\n # This shouldn't happen, but there's really nothing we can do if it does.\n # Skip over the damaged part of our database, ignoring the missing item.\n pass\n key = (key + increment) % self.size\n count -= 1", "def _inf_iter(key):\n while True:\n for i in key:\n yield i", "def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1", "def add(self, key):\n filter_ = self._filter\n h = self._salt.copy()\n h.update(key)\n for pos in self._fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % self._m_size)\n self._filter = filter_", "def __next_key(self):\n self.current_kf_idx += 1\n if self.current_kf_idx >= len(self.key_frames)-1:\n self.__next_loop()", "def keys_fetch(self):\n with self.env.begin(write=False) as txn:\n cursor = txn.cursor()\n tot = txn.stat()['entries']\n i = 0\n\n path = self.db_path\n base_name = self.base_path\n cache_file_path = os.path.join(path, '_cache_' + base_name + '.pkl')\n print('cache_file_path = ', cache_file_path) # DEBUG\n\n if os.path.isfile(cache_file_path):\n self.keys = pickle.load(open(cache_file_path, 'rb'))\n self._num_examples = tot\n else:\n keys = []\n for key, _ in cursor:\n i += 1\n if i % 1000 == 0 or i == tot:\n print('Fetching {:>8d} /{:>8d} keys'.format(i, tot),\n end='\\r')\n keys.append(key)\n print('\\nDone.')\n self._num_examples = tot\n self.keys = np.asarray(keys)\n pickle.dump(self.keys, open(cache_file_path, 'wb'))", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def done(self, key):\n self.d[key] = 1", "def save_buffer(self, file_path='', iteration=None):\n if(iteration is not None):\n assert isinstance(iteration, int), \"iteration should be an integer\"\n else:\n iteration = 0\n with open(\"{}/buffer_{:04d}\".format(file_path, iteration), 'wb') as f:\n pickle.dump(self._buffer, f)", "def store(self,key,start,end,data):\n\n pass", "def _push_next(self):\n r = next(self._iter, None)\n if r is None:\n return\n async_ret = self._worker_pool.apply_async(\n self._worker_fn, (r, self._batchify_fn, self._dataset))\n self._data_buffer[self._sent_idx] = async_ret\n self._sent_idx += 1", "def train_bloom_filter(self, train_data):\n for val in train_data:\n if self.debug:\n print('val: ', val)\n for i in range(0, self.hash_size):\n k = self.hashes[i](val[0])\n if self.debug:\n print('k: ', k)\n self.bitarray[k] = 1\n if self.debug:\n print('___end training____')", "def add(self, key, timestamp=None):\n cur_bloom = self.get_active_bloom()\n cur_bloom.add(key, timestamp)", "def inc(self, key):\n # insert key or update its count\n if key in self.d:\n self.d[key] += 1\n else:\n self.d[key] = 1", "def increment(self, key: str) -> None:\n self._counter.update(Counter({key: 1}))", "def _update_append_key(self):\n self.append_key += 1" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Creates a new Cuckoo Filter ``key`` an initial ``capacity`` items.
def cfCreate(self, key, capacity, expansion=None, bucket_size=None, max_iterations=None): params = [key, capacity] self.appendExpansion(params, expansion) self.appendBucketSize(params, bucket_size) self.appendMaxIterations(params, max_iterations) return self.execute_command(self.CF_RESERVE, *params)
[ "def from_key(cls, key, quantity=1):\n\n return cls(key[0], key[1], quantity)", "def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None):\n params = [key, errorRate, capacity]\n self.appendExpansion(params, expansion)\n self.appendNoScale(params, noScale)\n\n return self.execute_command(self.BF_RESERVE, *params)", "def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n for count in range(capacity):\r\n self._items.append(fillValue)", "def __init__(self, capacity, fillValue = None):\n\n self._logicalSize = 0\n self._items = list()\n for count in range(capacity):\n self._items.append(fillValue)\n self._logicalSize += 1", "def __init__(self, initial_capacity, error_rate, scale_factor = 2, scale_mode = SCALE_MODE_LINEAR):\n\t\tif not initial_capacity > 0: raise ValueError(\"initial_capacity must be > 0.\")\n\t\tif not (0 < error_rate < 1): raise ValueError(\"error_rate must be between 0 and 1.\")\n\t\tif not scale_factor > 0: raise ValueError(\"scale_factor must be > 0.\")\n\t\tif not scale_mode in [1, 2]: raise ValueError(\"Invalid scale_mode, use one of the SCALE_MODE_* constants.\")\n\t\t# The capacity of the first filter.\n\t\tself.initial_capacity = initial_capacity\n\t\t# Total number of elements added.\n\t\tself.element_count = 0\n\t\t# The error rate shared by all filters.\n\t\tself.error_rate = error_rate\n\t\t# The scaling factor.\n\t\tself.scale_factor = scale_factor\n\t\t# The scaling mode.\n\t\tself.scale_mode = scale_mode\n\t\t# The filters array.\n\t\tself.filters = []\n\t\tself.filters.append(BloomFilter(self.initial_capacity, self.error_rate))", "def __init__(self, capacity, initial):\n\t\tself.capacity = capacity\n\t\tself.amount = initial", "def __init__(self, capacity, fillValue = None):\n \n self._items = list() \n self._fillValue = fillValue\n self._DEFAULT_CAPACITY = capacity\n self._logicalSize = 0 #as required by exercise 1\n \n \n for count in xrange(capacity):\n self._items.append(self._fillValue)", "def __init__(self, key):\n\n def keys(key, num_rounds):\n \"\"\"Yields the permuted key bitstring for i = 1..num_rounds\"\"\"\n C, D = key[:28], key[28:]\n # Rounds are 1-indexed, so shift array over by one\n left_shifts = [None, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]\n for i in range(1, num_rounds + 1):\n # Negate each rotation to rotate left.\n C, D = rotate(C, -left_shifts[i]), rotate(D, -left_shifts[i])\n yield self.permute(C + D, self._CD_permutation)\n\n self.key = list(bits_of(key, 64))\n # Permute the key. The permutation discards the parity bits...\n self.key = self.permute(self.key, self._key_permutation)\n self.number_of_rounds = 16\n # A list of the 16 keys K1 .. K16, shifted over by one to allow 1-indexing.\n self.keys = [None] + list(keys(self.key, self.number_of_rounds))", "def from_key(cls, key):\n return cls(key)", "def __init__(self, k: int):\r\n self.capacity = k\r\n self.frontIndex = 0\r\n self.lastIndex = 1\r\n self.deque = [0] * self.capacity\r\n self.size = 0 # current size\r", "def new_key(self, key_name=None):\r\n return self.key_class(self, key_name)", "def __init__(self, key_len):\n self.key = [random.randint(0, 255) for i in range(key_len)]\n self.s = [i for i in range(256)]\n self.k = [self.key[i % key_len] for i in range(256)]\n j = 0\n for i in range(256):\n j = (j + self.s[i] + self.k[i]) % 256\n self.s[i], self.s[j] = self.s[j], self.s[i]\n self.key_stream = self._get_key_stream()", "def __init__(self, capacity):\n self.capacity = capacity\n # empty cache\n self.cache = {}\n self.head = None\n self.tail = None", "def __init__(self, key, default=NOT_GIVEN):\n self.key = adapt(key,IComponentKey)\n self.default = default", "def __init__(self, knapsack_size, items):\n self.knapsack_size = knapsack_size\n self.items = items\n self._cache = dict()\n # fill-in the cache with base cases' (subproblems') solutions\n for size in range(knapsack_size + 1):\n # if there are no items, the max value is 0\n self._cache[(0, size)] = 0\n for end in range(len(items) + 1):\n # if the knapsack's size is 0 no items fit, the max value is 0\n self._cache[(end, 0)] = 0", "def __init__(self, key):\n self.block_size = 16\n self.cipher = Cipher(algorithms.AES(key), modes.ECB(), default_backend())", "def __init__(self, capacity: int) -> None:\n self.capacity = capacity # 해시 테이블의 크기를 지정\n self.table = [None] * self.capacity # 해시 테이블(리스트)을 선언", "def _newKey(self, key):\n pass", "def create_filter(key, value):\n\n collection = NameValueCollection()\n collection.Add(key, value)\n if value:\n return collection\n else:\n return None" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds an ``item`` to a Cuckoo Filter ``key``.
def cfAdd(self, key, item): params = [key, item] return self.execute_command(self.CF_ADD, *params)
[ "def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)", "def add_item(self, key, item):\n self.dict[key] = item\n self.is_empty = False", "def _m_add_items_filter(filter):", "def add_item(self, key, item):\n self[key].add(item)\n try:\n self._reverse_store[item].add(key)\n except KeyError:\n self._reverse_store[item] = set([key])", "def add(key, item):\n hash_key = hash_function(key)\n hash_table[hash_key - 1] = item", "def add(self, key):\n filter_ = self._filter\n h = self._salt.copy()\n h.update(key)\n for pos in self._fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % self._m_size)\n self._filter = filter_", "def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True", "def update_item(self, key, item):\n self.dict[key]= item", "def add_item (self, item):\n new_item = CacheItem (item)\n cached = self.cache.get(hash(item))\n if cached is None:\n self.evict_or_add (new_item)\n cached.hits += 1", "def add(self, item):\n self.cache.append(item)", "def _ireplace(self, key, item):\n self.items[key] = item\n return self", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def add_to_inventory(self, item, quantity):\n\t\tincreaseQuantity = None\n\t\taddToDict = True\n\t\tfor key in self.inventoryDictionary:\n\t\t\tif key.name == item.name:\n\t\t\t\taddToDict = False\n\t\t\t\tincreaseQuantity = key\n\t\t\t\tbreak\n\t\t\t\t\n\n\t\t\telse:\n\t\t\t\taddToDict = True\n\t\t\t\t\n\n\t\tif addToDict:\n\t\t\tself.inventoryDictionary[item] = quantity\n\t\telse:\n\t\t\tself.inventoryDictionary[increaseQuantity] += quantity", "def addItem(self, item):\n\n # create new ItemScore and dictionary entries from ItemScore\n newEntry = self.ItemScore(item)\n newDictEntry = corpora.Dictionary([self.preprocess(str(newEntry))])\n\n # add new entries to items and dictionary, update flags\n self.itemScores.append(newEntry)\n self.dictionary.merge_with(newDictEntry)", "def add(self, item, issue):\n if self.has_key(item):\n self[item].append(issue)\n else:\n self[item] = [issue]\n return 1", "def append(self, key, value):\n pass", "def add(self, item, issue):\n if self.has_key(item): self[item].append(issue)\n else: self[item] = [issue]\n return True", "def replace(self, key, item):\n return ListDispatch(\n None,\n self.items[:key] +\n [item] +\n self.items[key + 1:]\n )", "def put(self, key, item):\n if key is None or item is None:\n return\n self.cache_data[key] = item" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds multiple ``items`` to a Cuckoo Filter ``key``, allowing the filter to be created with a custom ``capacity` if it does not yet exist. ``items`` must be provided as a list.
def cfInsert(self, key, items, capacity=None, nocreate=None): params = [key] self.appendCapacity(params, capacity) self.appendNoCreate(params, nocreate) self.appendItems(params, items) return self.execute_command(self.CF_INSERT, *params)
[ "def _m_add_items_filter(filter):", "def add_items(self, *items):\n self.items.extend(items)", "def append_items(params, items):\n params.extend([\"ITEMS\"])\n params += items", "def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)", "def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)", "def add_items(self, items):\n for item in items:\n self.add(item)", "def addItems(c, items):\n\t\tcontainer.containersToSave[c['id_item_container']] = item.inventory.addItems(\n\t\t\titem.inventory.fromStr(c['items']),\n\t\t\titems\n\t\t)", "def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendError(params, error)\n self.appendExpansion(params, expansion)\n self.appendNoCreate(params, noCreate)\n self.appendNoScale(params, noScale)\n self.appendItems(params, items)\n\n return self.execute_command(self.BF_INSERT, *params)", "def add(self, *items):\n for item in items:\n self.unsorted.append(item)\n key = item[0]\n self.index[key] = item\n return self", "def add_items(self, *items: Item) -> \"Channel\":\n self._items.extend(items)\n return self", "def addItems(self, items):\n\n # create new ItemScores and dictionary entries from ItemScore\n newEntries = [self.ItemScore(item) for item in items]\n newDictEntries = corpora.Dictionary(\n [self.preprocess(str(newScore)) for newScore in newEntries])\n\n # add new entries to items and dictionary, update flags\n self.itemScores = self.itemScores + newEntries\n self.dictionary.merge_with(newDictEntries)", "def add_items(self, items):\n for item in items:\n self.addItem(item)\n # end for item in items", "def append(self, *items: BOSminer) -> None:\n for item in items:\n self.miners[item.ip] = item", "def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True", "def update(self, items: Mapping[Any, Any]) -> None:\n self.extend(list(items.values()))\n return", "def addToWatchlist(self, items):\n if not isinstance(items, list):\n items = [items]\n\n for item in items:\n if self.onWatchlist(item):\n raise BadRequest(f'\"{item.title}\" is already on the watchlist')\n ratingKey = item.guid.rsplit('/', 1)[-1]\n self.query(f'{self.METADATA}/actions/addToWatchlist?ratingKey={ratingKey}', method=self._session.put)\n return self", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def add_keys(self, keys):\n filter_ = self._filter\n salt_copy = self._salt.copy\n m_size = self._m_size\n fmt_unpack = self._fmt_unpack\n\n for key in keys:\n assert isinstance(key, str)\n h = salt_copy()\n h.update(key)\n\n # 04/05/12 Boudewijn: using a list instead of a generator is significantly faster.\n # while generators are more memory efficient, this list will be relatively short.\n # 07/05/12 Niels: using no list at all is even more efficient/faster\n for pos in fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % m_size)\n\n self._filter = filter_", "def addFilter(self, **kwargs):\n if not kwargs:\n return\n newFilt = Filter()\n for name, options in kwargs.items():\n for oper, val in options:\n newFilt.addRequirement(name, oper, val)\n self.append(newFilt)\n log.debug(\"Current filters: {s}\".format(s=str(self)))\n self._runCallbacks()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks whether an ``item`` exists in Cuckoo Filter ``key``.
def cfExists(self, key, item): params = [key, item] return self.execute_command(self.CF_EXISTS, *params)
[ "def bfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_EXISTS, *params)", "def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False", "def has_item(self, item):\n return item in self.cache", "def has_item(self, item):\n return item in self.set", "def is_item_exists(self) -> bool:\n return (\n self.get_one_or_none(\n task_id=self.task_id,\n attributes_to_get=[\n \"key\",\n ],\n )\n is not None\n )", "def __contains__(self, item):\n return item in self._data", "def has(cls, item):\n return item in cls.values()", "def is_in(self, item, key=None):\n if self.root_hash == None:\n return False\n\n if key is None:\n key = item\n\n item_key = h(item)\n head_element = self.store[self.root_hash]\n return head_element.is_in(self.store, item_key, key)", "def __contains__(self, key):\n try: self._item(key)\n except KeyValuePair.DoesNotExist: \n if self._parent != None: return self.parent.__contains__(key)\n else: return False\n return True", "def exists(self, key):\n return key in self.store", "def does_contain(self, key):\n print \"Iterating over components...\"\n for key in self.componentDict:\n print \"Checking keys...\"\n if key in self.componentDict:\n return True\n\n else:\n print \"No key found\"", "def hasItem(self, path): \n\t\treturn (path in self.items and self.items[path])", "def check_item_exists(self, item_qualifier: str) -> bool:\n pass", "def item_exists(self, call_number):\n return call_number in self.item_list.keys()", "def has(self, item):\n return item in self.mut", "def data_exists( self, key ):\n assert type( key ) == str, 'Key expected type {0}, got {1}'.format( str, type( key ) )\n\n return key in self._data", "def exists(self, key):\n r = self.mc.get(key)\n if r not in (None, _empty_slot):\n return True\n else:\n return self.db.exists(key)", "def has(self, item, issue):\n try:\n if issue in self[item]: return True\n else: return False\n except KeyError: pass", "def has(self, key):\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Deletes ``item`` from ``key``.
def cfDel(self, key, item): params = [key, item] return self.execute_command(self.CF_DEL, *params)
[ "def delete_item(self, item_id):\n pass", "def delete_item(self, item):\r\n item.delete_item_from_room(self)", "def __delitem__(self, key):\r\n self.client.delete(id=key, ignore=[404], **self.kwargs)", "def delete_dynamo_item(dynamo_client, *, table_name, key):\n dynamo_client.delete_item(TableName=table_name, Key=key)", "def __delitem__(self, key):\n temp = self.find(key)\n if temp is None:\n raise KeyError(repr(key))\n self.remove(temp)", "def del_item(self, item):\n index = self.board[item.pos[0]][item.pos[1]].index(item)\n del self.board[item.pos[0]][item.pos[1]][index]", "def delete_item(self, key):\n self.is_empty = True if len(self.dict) is 1 else False\n return self.dict.pop(key)", "def __delitem__(self, key):\n i, kv_pair = self._lookup(key, self._backing)\n if kv_pair and not kv_pair.value is Hashmap.absent:\n self._backing[i] = KeyValue(key, Hashmap.absent)\n self._deleted += 1\n\n size = len(self._backing)\n utilization = (self._used - self._deleted)/size \n if utilization < 0.16:\n self._resize(self._decr_size(size))\n else:\n raise KeyError('no such item!')", "def delete(self, item):\r\n self.fetch()\r\n t = self.make_item_tuple(item)\r\n changed = False\r\n while t in self.data:\r\n self.data.remove(t)\r\n changed = True\r\n \r\n if changed:\r\n query_cache.set(self.iden, self.data)", "def __delitem__(self, key):\n key = to_bytestring(key)\n indices = []\n for (i, (k, v)) in enumerate(self._items):\n if _keys_equal(k, key):\n indices.append(i)\n\n if not indices:\n raise KeyError(\"Nonexistent header key: {}\".format(key))\n\n for i in indices[::-1]:\n self._items.pop(i)", "def __delitem__(self, key: Union[int, str, Tuple[Union[str, int], ...]]):\n logger.debug(f'Deleting key {key}')\n if isinstance(key, (int, str)):\n key = (key,)\n logger.debug(f'Key normalized to {key}')\n if not isinstance(key, tuple):\n raise ValueError(f'Invalid key type: {type(key)}')\n if len(key) == 0:\n raise ValueError(f'An empty tuple cannot be a key')\n elif len(key) > 1:\n # delegate the deletion to a subConfig one level below us in the hierarchy\n if not isinstance(self._data[key[0]], Config):\n raise ValueError(f'Invalid path in key {key}')\n del self._data[key[0]][key[1:]]\n else:\n assert len(key) == 1\n if isinstance(self._data[key[0]], Config):\n # deleting a sub-config. Disconnect the changed signal first.\n try:\n self._data[key[0]].changed.disconnect(self._subConfigChanged)\n except RuntimeError:\n # sometimes the underlying C/C++ object vanishes first. Why???\n pass\n del self._data[key[0]]\n self.autosave()", "def delete(self, item):\n # eg. node=item to attrs, telling item type to Graphviz._setattr\n self.graph._del(self.parent.handle, **{self.type: item})", "def __delitem__(self, key):\n\n # Has the key already been deleted?\n if key in self._deleted:\n raise KeyError(key)\n\n # If we have a local override, delete that; otherwise, verify\n # it exists in the parent (if we have one)\n if key in self._values:\n del self._values[key]\n elif not self._parent or key not in self._parent:\n raise KeyError(key)\n\n # Add the key to the deleted set\n self._deleted.add(key)", "def delete(self, key):\n path_to_item = os.path.join(self.path, str(key))\n os.remove(path_to_item)\n if self.existence_and_usage[key].is_data_in_mem:\n del self.data[key]\n del self.existence_and_usage[key]", "def __delitem__(self, key:str) -> None:\n\t\tself.delAttribute(key)", "def remove_item(self, item):\n self.items.pop(item.name)", "def remove_item(self, assessment_id, item_id):\n pass", "def __delitem__(self, key):\n if key not in EntryListColumn.list():\n dict.__delitem__(key)\n elif key == EntryListColumn.ID:\n raise KeyError('Cannot delete key: ' + EntryListColumn.ID)\n else:\n self[key] = ''", "def _bucket_delitem(self, j, k):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Begins an incremental save of the Cuckoo filter ``key``. This is useful for large Cuckoo filters which cannot fit into the normal SAVE and RESTORE model. The first time this command is called, the value of ``iter`` should be 0. This command will return successive (iter, data) pairs until (0, NULL) to indicate completion.
def cfScandump(self, key, iter): params = [key, iter] return self.execute_command(self.CF_SCANDUMP, *params)
[ "def bfScandump(self, key, iter):\n params = [key, iter]\n \n return self.execute_command(self.BF_SCANDUMP, *params)", "def flush(self, key=None):\n raise NotImplementedError", "def save(self) -> dict:\n for pair in self._buffer:\n yield pair.save()", "def _iter(self, key, count, increment=1):\n key %= self.size\n while count > 0:\n try:\n yield self.db[key]\n except KeyError:\n # This shouldn't happen, but there's really nothing we can do if it does.\n # Skip over the damaged part of our database, ignoring the missing item.\n pass\n key = (key + increment) % self.size\n count -= 1", "def _inf_iter(key):\n while True:\n for i in key:\n yield i", "def __next_key(self):\n self.current_kf_idx += 1\n if self.current_kf_idx >= len(self.key_frames)-1:\n self.__next_loop()", "def done(self, key):\n self.d[key] = 1", "def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1", "def add(self, key):\n filter_ = self._filter\n h = self._salt.copy()\n h.update(key)\n for pos in self._fmt_unpack(h.digest()):\n filter_ |= 1 << (pos % self._m_size)\n self._filter = filter_", "def keys_fetch(self):\n with self.env.begin(write=False) as txn:\n cursor = txn.cursor()\n tot = txn.stat()['entries']\n i = 0\n\n path = self.db_path\n base_name = self.base_path\n cache_file_path = os.path.join(path, '_cache_' + base_name + '.pkl')\n print('cache_file_path = ', cache_file_path) # DEBUG\n\n if os.path.isfile(cache_file_path):\n self.keys = pickle.load(open(cache_file_path, 'rb'))\n self._num_examples = tot\n else:\n keys = []\n for key, _ in cursor:\n i += 1\n if i % 1000 == 0 or i == tot:\n print('Fetching {:>8d} /{:>8d} keys'.format(i, tot),\n end='\\r')\n keys.append(key)\n print('\\nDone.')\n self._num_examples = tot\n self.keys = np.asarray(keys)\n pickle.dump(self.keys, open(cache_file_path, 'wb'))", "def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False", "def next_window(self) -> Iterator[Optional[np.ndarray]]:\n while self._count >= self._window_width:\n # Preserve what we want to return by copying it.\n p1 = np.copy(self._data_store[:self._window_width, :])\n\n # Remove the data we don't need any more from the front of the buffer.\n frames_to_keep = self._count - self._window_step\n self._data_store[:frames_to_keep,\n :] = self._data_store[self._window_step:self._count, :]\n self._count -= self._window_step\n yield p1", "def store(self,key,start,end,data):\n\n pass", "def increment(self, key: str) -> None:\n self._counter.update(Counter({key: 1}))", "def filter_keys(self):\n filters = self.args.keyfilter.split('.')\n self.logger.info(u'Filtering with:{f}'.format(f=filters))\n data = self.inputdata\n newdata = {}\n for key, value in data.items():\n self.logger.info(u'\\nProcessing Key:{k}'.format(k=key))\n returned_data = dict_key_filter(key, value, filters, self.logger)\n if bool(returned_data):\n newdata[key] = returned_data\n self.logger.info(u'Data After filter:{d}'.format(d=newdata))\n self.outputdata = newdata", "def post_prepared_commit(self, key, prepared):\n docs = self.__splitprepared(prepared)\n docs[0][\"key\"] = key\n return self.client.post_commit(docs[0], docs[1])", "def key_lookup_batch(self, batchiter):\n pass", "def test_cache_incremented_by_key(self, cache):\n from furious.batcher import bump_batch\n\n cache.incr.return_value = 2\n\n val = bump_batch('group')\n\n self.assertEqual(val, 2)\n\n cache.incr.assert_called_once_with('agg-batch-group')", "def dry_run_iter(self, prepare=False, save_ungeocoded_addresses=False):\n import pprint\n self.is_dry_run = True\n self.cache_retriever = CacheRetriever(self)\n self.start_time = datetime.datetime.now()\n self.start_date = self.start_time.date()\n if save_ungeocoded_addresses:\n self.ungeocoded_addresses = {}\n self.geocode = self.geocode_and_log\n\n try:\n for datadict in self.data():\n if prepare:\n datadict = self.prepare_data(datadict)\n pprint.pprint(datadict)\n yield datadict\n finally:\n self.clear_cache()\n\n self.logger.info('Geocoding succeeded/attempted: {0}/{1}'.format(self.num_geocode_succeeded, self.num_geocode_attempted))\n if save_ungeocoded_addresses:\n self.create_geocoding_report()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes a CountMin Sketch ``key`` to dimensions (``width``, ``depth``) specified by user.
def cmsInitByDim(self, key, width, depth): params = [key, width, depth] return self.execute_command(self.CMS_INITBYDIM, *params)
[ "def __init__(self, ks=None):\n self.ks = ks\n self.N = ks.N if ks is not None else None\n self.L = ks.d if ks is not None else None\n # pass", "def __init__(self, size, fill = 0):\n dict.__init__(self)\n\n if fill != -1:\n for x in xrange(size):\n for y in xrange(size):\n self[x,y] = fill\n\n self.size = size\n self.water_z = 0", "def __init__(self, width, height):\n super().__init__(width, height)\n self.initialize_dungeon(tile = 0) # Initializes a dungeon arr of 0's", "def __init__(self, W_, H_, C_):\n self.width = W_\n self.height = H_\n self.C = C_\n\n #self.sketch = np.random.random((C_, H_, W_))\n #self.sketch = self.sketch / np.sum(self.sketch)\n self.sketch = np.zeros((C_, H_, W_))", "def minKey(key=None):", "def __init__(self, minKey: long, maxKey: long):\n ...", "def DEFAULT_MIN_DEPTH(self): # real signature unknown; restored from __doc__\n pass", "def __init__(self, elements, density, width, phase=0, name=None):\n self.width = width\n self.name = name\n super(Layer, self).__init__(elements, density, phase)", "def initialize_unbounded(obj, dimensions, key):\n select = dict(zip([d.name for d in dimensions], key))\n try:\n obj.select(selection_specs=[DynamicMap], **select)\n except KeyError:\n pass", "def __init__(self, key, owner, **kwargs):\n super(PathGeneratorStage, self).__init__(\n key, owner,\n label='Path Generation',\n **kwargs)\n self._paths = []\n self._points = []\n self._minStrideLength = 1.0e8", "def __init__(self, width, height):\n self.integer_validator(\"width\", width)\n self.__width = width\n self.integer_validator(\"height\", height)\n self.__height = height", "def __init__(self, key):\n\n def keys(key, num_rounds):\n \"\"\"Yields the permuted key bitstring for i = 1..num_rounds\"\"\"\n C, D = key[:28], key[28:]\n # Rounds are 1-indexed, so shift array over by one\n left_shifts = [None, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]\n for i in range(1, num_rounds + 1):\n # Negate each rotation to rotate left.\n C, D = rotate(C, -left_shifts[i]), rotate(D, -left_shifts[i])\n yield self.permute(C + D, self._CD_permutation)\n\n self.key = list(bits_of(key, 64))\n # Permute the key. The permutation discards the parity bits...\n self.key = self.permute(self.key, self._key_permutation)\n self.number_of_rounds = 16\n # A list of the 16 keys K1 .. K16, shifted over by one to allow 1-indexing.\n self.keys = [None] + list(keys(self.key, self.number_of_rounds))", "def __init__(self, kernel_size):\r\n super().__init__()\r\n self.kernel_size = kernel_size", "def __init__( self, name, problem, kwargs ):\n MiniAppBase.__init__( self, name, problem, kwargs )\n self.set_default_attr( 'dim', problem.get_dim() )", "def MinHks(N): \n return EntropyKS(nx.Graph([(i,i+1) for i in range(N-1)]))", "def __init__(self,Nx=24,Ny=24,kx0=-pi/3.,ky0=0.,kxmax=pi,kymax=2.*pi/np.sqrt(3.)):\n self.Nx=Nx\n self.Ny=Ny\n self.kx0=kx0\n self.ky0=ky0\n self.kxmax=kxmax\n self.kymax=kymax\n self.dkx=(kxmax-kx0)/float(Nx)\n self.dky=(kymax-ky0)/float(Ny)", "def __init__(self, k: int):\n self.length: int = k\n self.left = self.right = -1\n self.deque = [None] * self.length", "def __init__(self, hash_name, projection_count, bin_width):\r\n super(RandomDiscretizedProjections, self).__init__(hash_name)\r\n self.projection_count = projection_count\r\n self.dim = None\r\n self.vectors = None\r\n self.bin_width = bin_width", "def __init__(self, k: int):\n self.__start = 0\n self.__size = 0\n self.__buffer = [0] * k\n self.__capacity = k\n # self.queueLock = Lock()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes a CountMin Sketch ``key`` to characteristics (``error``, ``probability``) specified by user.
def cmsInitByProb(self, key, error, probability): params = [key, error, probability] return self.execute_command(self.CMS_INITBYPROB, *params)
[ "def __init__(self, probability, nodeKeys):\n self.probability = float(probability)\n self.nodeKeys = nodeKeys", "def set_min_prob(self, disease, probability):\n self.min_probs[disease] = probability", "def __init__(self, ks=None):\n self.ks = ks\n self.N = ks.N if ks is not None else None\n self.L = ks.d if ks is not None else None\n # pass", "def minKey(key=None):", "def __init__(self, k: int) -> None:\n\n assert k > 2, \"for k = 2 use Bernoulli distribution.\"\n\n self.k = k", "def __init__(self, keypoints, monotonicity, missing_value=11.11):\n super(Calibrator, self).__init__()\n xp = torch.tensor(keypoints, dtype=torch.float32)\n self.register_buffer(\"offset\", xp[:, :1].clone().detach())\n self.register_buffer(\"scale\", (xp[:, -1:] - self.offset).clone().detach())\n xp = (xp - self.offset) / self.scale\n self.register_buffer(\"keypoints\", xp)\n self.register_buffer(\"monotonicity\", torch.tensor(monotonicity, dtype=torch.float32).unsqueeze(0))\n self.missing_value = missing_value\n yp = xp[:, 1:] - xp[:, :-1]\n # [C, K - 1]\n self.yp = torch.nn.Parameter(yp, requires_grad=True)\n # [1, C]\n self.missing_y = torch.nn.Parameter(torch.zeros_like(xp[:, 0]).unsqueeze(0), requires_grad=True)", "def __init__(self, top_k: int):\n self._topk_acc_dict: Dict[int, Mean] = defaultdict(Mean)\n self.top_k: int = top_k\n\n self.__torchmetrics_requires_task = version.parse(\n torchmetrics.__version__\n ) >= version.parse(\"0.11.0\")", "def initialize(self):\r\n self.answered_queries = 0\r\n self.key = random_string(self.key_len)\r\n self.ciphertexts = []\r\n self.win = False", "def initialize(self, n_topics, prior):\n print(\"Initializing...\")\n\n # Coverage distribution P(l|k) initialization\n self.coverage_dist = np.random.random_sample((self.n_docs, n_topics))\n self.coverage_dist = normalize(self.coverage_dist)\n\n # Topic distribution P(w|theta) initialization\n self.topic_dist = np.random.random_sample((n_topics, self.n_words, self.n_views))\n self.topic_dist = normalize(self.topic_dist)\n\n # View probability P(v|D,C) initialization\n self.view_prob = np.random.random_sample((self.n_docs, self.n_views))\n\n # Account for view features by setting probabilities not related to document to zero (using bit array)\n self.view_prob = np.einsum('ij,ij->ij', self.view_prob, self.views)\n\n # Set the global prior artificially high (based on prior arg) then normalize\n self.view_prob[:,0] = np.repeat([prior], self.n_docs)\n self.view_prob = normalize(self.view_prob)", "def __init__(self, key: str, name: str, *probability_values: float, **probability_dependents: Node) -> None:\n if probability_dependents:\n assert len(probability_values) >= 2 ** len(probability_dependents.keys()\n ), f\"Not enough truth table values given for amount of dependencies: {len(probability_values)} of {2 ** len(probability_dependents.keys())}\"\n self.key = key\n self.name = name\n self.probability_links = probability_dependents\n self.probability_values = probability_values\n self.used_by = list()", "def initializeFromDict(self, inputDict):\n self.strategy = inputDict['strategy']\n self.categoricalDist = Categorical()\n self.categoricalDist.initializeFromDict(inputDict)\n initialPerm = randomUtils.randomPermutation(inputDict['outcome'].tolist(),self)\n self.pot = np.asarray(initialPerm)", "def initilize(self, mu, sig, p0):\n init = gaussian(self.x, mu, sig, p0)\n init /= np.linalg.norm(init)\n tab = {}\n for i in range(len(self.x)):\n tab[binary(i, self.n)] = init[i]\n self.initial = tab\n # initilize\n self.circ.initialize(init, self.q)", "def test_prediction_key_required(self):\n self._config['Prediction key'] = ''\n with self.assertRaisesRegex(ValueError,\n 'Please provide the prediction key'):\n self._gen.generate(\n example=self._example,\n model=self._model,\n dataset=self._dataset,\n config=self._config)", "def __init__(self, H, k):\n self._H = H\n self._k = k", "def initialize(self):\n # FIX: INITIALIZE PROCESS INPUTS??\n for mech, value in self.initial_values.items():\n mech.initialize(value)", "def __init__(self, prior_dict):\n\n self.prior_dict = prior_dict.copy()\n \n self.n_params = len(self.prior_dict.keys()) # number of parameters\n\n self.ordered_keys = prior_dict.keys()\n self.ordered_keys.sort()\n\n self.priorz = self.prior()", "def __init__(self, key, default=NOT_GIVEN):\n self.key = adapt(key,IComponentKey)\n self.default = default", "def __init__(self, key, method_name):\n Determinant.check_init(key, method_name)\n self.key = key\n self.method_name = method_name", "def __init__(self, goal=0, kP=1, kI=1, kD=1, init_pt=0):\n self._pid_lock = threading.Lock()\n\n self.set_goal(goal)\n self.reset(init_pt)\n self.set_gains({\n PIDController.KP_KEY: kP,\n PIDController.KI_KEY: kI,\n PIDController.KD_KEY: kD\n })" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Adds/increases ``items`` to a CountMin Sketch ``key`` by ''increments''. Both ``items`` and ``increments`` are lists. Example cmsIncrBy('A', ['foo'], [1])
def cmsIncrBy(self, key, items, increments): params = [key] self.appendItemsAndIncrements(params, items, increments) return self.execute_command(self.CMS_INCRBY, *params)
[ "def append_items_and_increments(params, items, increments):\n for i in range(len(items)):\n params.append(items[i])\n params.append(increments[i])", "def inc(self, key, value=1):\n return self.increment({key:value})[key]", "def incr(x_c, x, inc=1):\n x_c[x] = x_c.get(x, 0) + inc", "def GetContinuousIncrements(self, p_int=..., p_int=..., p_int=..., p_int=..., p_int=..., p_int=..., *args, **kwargs):\n ...", "def incr(self, key, delta=1):\n\t\treturn self._incrdecr(\"incr\", key, delta)", "def inc(self, key):\n # insert key or update its count\n if key in self.d:\n self.d[key] += 1\n else:\n self.d[key] = 1", "def hincrby(self, *args):\n if self._cluster:\n return self.execute(u'HINCRBY', *args, shard_key=args[0])\n return self.execute(u'HINCRBY', *args)", "def increment(self, key: str) -> None:\n self._counter.update(Counter({key: 1}))", "def GetIncrements(self):\n ...", "def increment(self, key):\n self.counters[self.name][key] += 1", "def incr_proof_item(item, start, n):\n item.id = incr_id_after(item.id, start, n)\n item.prevs = [incr_id_after(id, start, n) for id in item.prevs]\n if item.subproof:\n for subitem in item.subproof.items:\n incr_proof_item(subitem, start, n)", "def inc(arg):\n return Shuffle(ShuffleType.Increment, arg)", "def increment(self, a: int, b: int):", "def __iadd__(self, increment):\n self.update(self.val + increment)\n return self", "async def increment_for(self, item: DocItem) -> int:\n key = f\"{self.namespace}:{item_key(item)}:{item.symbol_id}\"\n await self.redis_session.client.expire(key, WEEK_SECONDS * 3)\n return int(await self.redis_session.client.incr(key))", "async def increment_for(self, item: DocItem) -> int:\n key = f\"{self.namespace}:{item_key(item)}:{item.symbol_id}\"\n with await self._get_pool_connection() as connection:\n await connection.expire(key, WEEK_SECONDS * 3)\n return int(await connection.incr(key))", "def count_items(xs, x_c=None):\n if x_c == None:\n x_c = {}\n for x in xs:\n incr(x_c, x)\n return x_c", "def zincrby(self, *args):\n if self._cluster:\n return self.execute(u'ZINCRBY', *args, shard_key=args[0])\n return self.execute(u'ZINCRBY', *args)", "def incr(self, n = 1):\n return _almathswig.SwigPyIterator_incr(self, n)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Merges ``numKeys`` of sketches into ``destKey``. Sketches specified in ``srcKeys``. All sketches must have identical width and depth. ``Weights`` can be used to multiply certain sketches. Default weight is 1. Both ``srcKeys`` and ``weights`` are lists.
def cmsMerge(self, destKey, numKeys, srcKeys, weights=[]): params = [destKey, numKeys] params += srcKeys self.appendWeights(params, weights) return self.execute_command(self.CMS_MERGE, *params)
[ "def __merge_keys(\n self, kv_src_bucket, kv_dest_bucket, kvs_num=1, filter_exp=None):\n valid_keys_src, deleted_keys_src = kv_src_bucket[\n kvs_num].key_set()\n valid_keys_dest, deleted_keys_dest = kv_dest_bucket[\n kvs_num].key_set()\n\n self.log.info(\"src_kvstore has %s valid and %s deleted keys\"\n % (len(valid_keys_src), len(deleted_keys_src)))\n self.log.info(\"dest kvstore has %s valid and %s deleted keys\"\n % (len(valid_keys_dest), len(deleted_keys_dest)))\n\n if filter_exp:\n # If key based adv filter\n if \"META().id\" in filter_exp:\n filter_exp = filter_exp.split('\\'')[1]\n\n filtered_src_keys = [key for key in valid_keys_src if re.search(str(filter_exp), key) is not None]\n valid_keys_src = filtered_src_keys\n self.log.info(\n \"{0} keys matched the filter expression {1}\".format(\n len(valid_keys_src),\n filter_exp))\n\n for key in valid_keys_src:\n # replace/add the values for each key in src kvs\n if key not in deleted_keys_dest:\n partition1 = kv_src_bucket[kvs_num].acquire_partition(key)\n partition2 = kv_dest_bucket[kvs_num].acquire_partition(key)\n # In case of lww, if source's key timestamp is lower than\n # destination than no need to set.\n if self.__lww and partition1.get_timestamp(\n key) < partition2.get_timestamp(key):\n continue\n key_add = partition1.get_key(key)\n partition2.set(\n key,\n key_add[\"value\"],\n key_add[\"expires\"],\n key_add[\"flag\"])\n kv_src_bucket[kvs_num].release_partition(key)\n kv_dest_bucket[kvs_num].release_partition(key)\n\n for key in deleted_keys_src:\n if key not in deleted_keys_dest:\n partition1 = kv_src_bucket[kvs_num].acquire_partition(key)\n partition2 = kv_dest_bucket[kvs_num].acquire_partition(key)\n # In case of lww, if source's key timestamp is lower than\n # destination than no need to delete.\n if self.__lww and partition1.get_timestamp(\n key) < partition2.get_timestamp(key):\n continue\n partition2.delete(key)\n kv_src_bucket[kvs_num].release_partition(key)\n kv_dest_bucket[kvs_num].release_partition(key)\n\n valid_keys_dest, deleted_keys_dest = kv_dest_bucket[\n kvs_num].key_set()\n self.log.info(\"After merging: destination bucket's kv_store now has {0}\"\n \" valid keys and {1} deleted keys\".\n format(len(valid_keys_dest), len(deleted_keys_dest)))", "def copySkinWeights(*args, destinationSkin: Union[AnyStr, bool]=\"\", influenceAssociation:\n Union[AnyStr, List[AnyStr], bool]=\"\", mirrorInverse: bool=True, mirrorMode:\n Union[AnyStr, bool]=\"\", noBlendWeight: bool=True, noMirror: bool=True,\n normalize: bool=True, sampleSpace: Union[int, bool]=0, smooth: bool=True,\n sourceSkin: Union[AnyStr, bool]=\"\", surfaceAssociation: Union[AnyStr,\n bool]=\"\", uvSpace: Union[List[AnyStr, AnyStr], bool]=None, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[None, Any]:\n pass", "def join_w(targs, srcs, ws):\n # convert targs/srcs to dicts if given as arrays\n if not isinstance(targs, dict):\n targs_ = copy(targs)\n targs = {\n cell_type: targs_ == cell_type for cell_type in set(targs_)\n }\n if not isinstance(srcs, dict):\n srcs_ = copy(srcs)\n srcs = {\n cell_type: srcs_ == cell_type for cell_type in set(srcs_)\n }\n \n # make sure all targ/src masks have same shape\n targ_shapes = [mask.shape for mask in targs.values()]\n src_shapes = [mask.shape for mask in srcs.values()]\n \n if len(set(targ_shapes)) > 1:\n raise Exception('All targ masks must have same shape.')\n \n if len(set(src_shapes)) > 1:\n raise Exception('All targ masks must have same shape.')\n \n n_targ = targ_shapes[0][0]\n n_src = src_shapes[0][0]\n \n # make sure weight matrix dimensions match sizes\n # of targ/src classes\n for syn, ws_ in ws.items():\n for (targ, src), w_ in ws_.items():\n if not w_.shape == (targs[targ].sum(), srcs[src].sum()):\n raise Exception(\n 'Weight matrix for {}: ({}, {}) does not match '\n 'dimensionality specified by targ/src masks.')\n \n # loop through synapse types\n dtype = list(list(ws.values())[0].values())[0].dtype\n ws_full = {}\n \n for syn, ws_ in ws.items():\n \n w = np.zeros((n_targ, n_src), dtype=dtype)\n \n # loop through population pairs\n for (targ, src), w_ in ws_.items():\n \n # get mask of all cxns from src to targ\n mask = np.outer(targs[targ], srcs[src])\n \n assert mask.sum() == w_.size\n \n w[mask] = w_.flatten()\n \n ws_full[syn] = w\n \n return ws_full", "def transfer_weights(src_model, dest_model):\r\n # ingore the first layer Input()\r\n # layer 1-24 to 1-24\r\n for i in range(1, 24):\r\n dest_model.layers[i].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 1-24 successfully!\")\r\n\r\n # layer 25-45 to 65-85\r\n for i in range(25, 45):\r\n dest_model.layers[i+40].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 25-45 successfully!\")\r\n\r\n # layer 46-65 to 126-145\r\n for i in range(46, 65):\r\n dest_model.layers[i+80].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 46-65 successfully!\")\r\n\r\n # 69 to 189\r\n dest_model.layers[69+120].set_weights(src_model.layers[69].get_weights())\r\n print(\"Partially load weights from layer 69 successfully!\")", "def add_images_weighted(summand1:Image, summand2:Image, destination :Image = None, factor1:float=1, factor2:float=1) -> Image:\n\n parameters = {\n \"src\":summand1,\n \"src1\":summand2,\n \"dst\":destination,\n \"factor\":float(factor1),\n \"factor1\":float(factor2)\n }\n\n execute(__file__, '../clij-opencl-kernels/kernels/add_images_weighted_' + str(len(destination.shape)) + 'd_x.cl', 'add_images_weighted_' + str(len(destination.shape)) + 'd', destination.shape, parameters)\n\n return destination", "def fill_weights(src_net, dest_net):\n\n # then fill up weights from src to dest.\n\n assert set(dest_net.params.keys()) <= set(src_net.params.keys()), \"some layers non existent in src net!\"\n\n # changing value in place is safe.\n for layer_name, param in dest_net.params.items():\n assert len(param) == len(src_net.params[layer_name])\n for idx in range(len(param)):\n param[idx].data[...] = src_net.params[layer_name][idx].data\n return dest_net", "def _weights_for_num_sources(source_waveforms, num_sources):\n source_norms = tf.sqrt(tf.reduce_mean(tf.square(source_waveforms), axis=-1))\n max_sources = signal_util.static_or_dynamic_dim_size(source_waveforms, 1)\n num_sources_per_example = tf.reduce_sum(\n tf.cast(tf.greater(source_norms, 1e-8), tf.float32),\n axis=1, keepdims=True)\n has_num_sources = tf.equal(num_sources_per_example, num_sources)\n return tf.tile(has_num_sources, (1, max_sources))", "def weightKmers(self, weightDict):\n for k, w in weightDict.iteritems():\n assert k in self.kmers\n self.G.edge[k + \"_L\"][k + \"_R\"]['weight'] = w", "def add_edge(self, src_key, dest_key, weight=1):\n self.vertices[src_key].add_neighbour(self.vertices[dest_key], weight)", "def set_weight_key(self, weight_key = None):", "def add_edge(self, src_key, dest_key, weight=1):\n\t\tself.vertices[src_key].add_neighbour(self.vertices[dest_key], weight)", "def copyDeformerWeights(*args, destinationDeformer: Union[AnyStr, bool]=\"\", destinationShape:\n Union[AnyStr, bool]=\"\", mirrorInverse: bool=True, mirrorMode:\n Union[AnyStr, bool]=\"\", noMirror: bool=True, smooth: bool=True,\n sourceDeformer: Union[AnyStr, bool]=\"\", sourceShape: Union[AnyStr,\n bool]=\"\", surfaceAssociation: Union[AnyStr, bool]=\"\", uvSpace:\n Union[List[AnyStr, AnyStr], bool]=None, q=True, query=True, e=True,\n edit=True, **kwargs)->Union[None, Any]:\n pass", "def copy_skin_weights(source_skin, target_skin):\n\n # gets the shape back from the source_skin and target_skin\n # need to do this as providing the sourceSkin and destinationSkin arguments\n # to the copySkinWeights command does not update correctly the shapes\n\n source_shape = cmds.ls(cmds.listHistory(\"{}.outputGeometry\".format(\n source_skin), pdo=False, future=True), dag=True,\n noIntermediate=True)\n target_shape = cmds.ls(cmds.listHistory(\n \"{}.outputGeometry\".format(target_skin),\n pdo=False, future=True), dag=True,\n noIntermediate=True)\n\n # checks if source and target shapes list are bigger than 1\n if len(source_shape) > 1:\n source_shape = source_shape[0]\n if len(target_shape) > 1:\n target_shape = target_shape[0]\n\n cmds.select(source_shape, target_shape)\n\n # copy skin command\n cmds.copySkinWeights(surfaceAssociation=\"closestPoint\", noMirror=True,\n influenceAssociation=(\"label\",\n \"closestJoint\",\n \"oneToOne\"))\n\n # forces refresh\n cmds.refresh()", "def hard_copy_weights(self, target, source):\n for target_param, param in zip(target.parameters(), source.parameters()):\n target_param.data.copy_(param.data)", "def set_pixel_coeffs_from_dicts(self, coeffs, weights=None):\n keys = list(coeffs.keys())\n if weights is None:\n weights_ = None\n else:\n weights_ = [weights[key] for key in keys]\n \n for i in range(self.n_sat):\n for j in range(self.cpm_sources[i].n_pixels):\n data = [coeffs[key][i][j] for key in keys]\n average = np.average(np.array(data), 0, weights=weights_)\n self.cpm_sources[i].set_pixel_coeffs(j, average.reshape((-1, 1)))", "def distribute_by_weights(path: Tensor, nimages: int, path_target: Tensor = None, weights: Tensor = None, climbing_pivots: list = None):\n # Ensure storage for coordinates\n if path_target is None:\n path_target = path.new(nimages, path.shape[1])\n else:\n assert path_target is not path, \"Source must be unequal to target for redistribution\"\n assert path_target.shape[0] == nimages\n # Ensure weights\n if weights is None:\n weights = path.new(nimages - 1).fill_(1)\n else:\n assert len(weights.shape) == 1\n assert weights.shape[0] == nimages - 1\n\n # In climbing mode, reinterpolate only between the climbing images\n if climbing_pivots is not None:\n assert path.shape[0] == nimages, \"Cannot change number of items when reinterpolating with respect to climbing images.\"\n assert len(climbing_pivots) == nimages\n assert all(isinstance(b, bool) for b in climbing_pivots), \"Image must be climbing or not.\"\n start = 0\n for i, is_climbing in enumerate(climbing_pivots):\n if is_climbing or i == nimages - 1:\n distribute_by_weights(path[start:i + 1], i + 1 - start, path_target[start:i + 1], weights[start:i])\n start = i\n return path_target\n\n if path is path_target:\n # For the computation the original path is necessary\n path_source = path.clone()\n else:\n path_source = path\n\n # The current distances between elements on chain\n current_distances = (path_source[:-1] - path_source[1:]).norm(2, 1)\n target_positions = (weights / weights.sum()).cumsum(0) * current_distances.sum() # Target positions of elements (spaced by weights)\n\n # Put each new item spaced by weights (measured along line) on the line\n last_idx = 0 # Index of previous pivot\n pos_prev = 0. # Position of previous pivot on chain\n pos_next = current_distances[last_idx].item() # Position of next pivot on chain\n path_target[0] = path_source[0]\n for i in range(1, nimages - 1):\n position = target_positions[i - 1].item()\n while position > pos_next:\n last_idx += 1\n pos_prev = pos_next\n pos_next += current_distances[last_idx].item()\n\n t = (position - pos_prev) / (pos_next - pos_prev)\n path_target[i] = (t * path_source[last_idx + 1] + (1 - t) * path_source[last_idx])\n path_target[nimages - 1] = path_source[-1]\n\n return path_target", "def linear_interpolation_keys(self, keys):\n if len(keys) != len(self.dims):\n raise ValueError(\"Number of keys must be equal to the number of\" +\n \" dimensions. (Got \" + str(len(keys)) + \"/\"\n + str(len(self.dims)) + \")\")\n \n weightedKeys = []\n for key, dim in zip(keys, self.dims):\n weightedKeys.append(dim.linear_interpolation_indexes(key))\n \n while len(weightedKeys) > 1:\n newKeys = []\n for key1 in weightedKeys[-2]:\n for key2 in weightedKeys[-1]:\n newKeys.append({'key':key1['key'] + key2['key'],\n 'weight':key1['weight']*key2['weight']})\n weightedKeys.pop(-1)\n weightedKeys[-1] = newKeys\n\n return weightedKeys[0]", "def _copy_weights(self, source_network, target_network): \n for target_param, source_param in zip(target_network.parameters(), source_network.parameters()):\n target_param.data.copy_(source_param.data)", "def combine_cache_keys(cache_keys):\r\n if len(cache_keys) == 1:\r\n return cache_keys[0]\r\n else:\r\n combined_id = Target.maybe_readable_combine_ids(cache_key.id for cache_key in cache_keys)\r\n combined_hash = hash_all(sorted(cache_key.hash for cache_key in cache_keys))\r\n combined_num_sources = sum(cache_key.num_sources for cache_key in cache_keys)\r\n combined_sources = \\\r\n sorted(list(itertools.chain(*[cache_key.sources for cache_key in cache_keys])))\r\n return CacheKey(combined_id, combined_hash, combined_num_sources, combined_sources)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return full list of items in TopK list of ``key```.
def topkList(self, key): return self.execute_command(self.TOPK_LIST, key)
[ "def sub_key_list(self, keyword, filter = False):\n\n assert keyword in self.key_list(), 'keyword not present: ' + keyword\n sub_head_place = self.pointers[self.keywords.index(keyword)]\n sub_kp = KP(self.fp, sub_head_place)\n return sub_kp.key_list(filter = filter)", "def get_list(key):\n ret = hookenv.action_get(key)\n return ret.split() if ret else []", "def list_keys(keyfile):\n totp = TOTP(keyfile)\n totp.list_keys()", "def key_list(dict):\n list = []\n for key in dict:\n list.append(key)\n return list", "def getKeyList(self):\r\n\t\tDict = self.getKeyDict()\r\n\t\tList = Dict.values()\r\n\t\treturn List", "def gather_slice_list_items(slices, key):\n return list(itertools.chain(*[s[key] for s in slices if key in s]))", "def _extract_topk(alist, k):\r\n scores = [t.score for t in alist]\r\n indices = np.argsort(alist)[:k]\r\n return [alist[idx] for idx in indices]", "def list_values(key):\n return meta.list_values(key=key)", "def get_keytab_keys(keytab):\n klist = get_var('KLIST')\n entries = []\n command = \"%s -e -k -t %s\" % (klist, keytab)\n logger.info(\"Running: %s \" % (command))\n pipe = os.popen(command)\n for line in pipe.readlines():\n logger.info(line.rstrip())\n if line.startswith('Keytab name:'):\n continue\n if line.startswith('KVNO'):\n continue\n if line.startswith('----'):\n continue\n m = re.match(r'^\\s*(\\d+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+\\((.+)\\)', line)\n if m:\n kvno = int(m.group(1))\n principal = m.group(4)\n enctype = normalize_enctype(m.group(5))\n else:\n raise AssertionError(\"Unexpected klist line: %s\" % (line))\n entries.append({'kvno':kvno, 'principal':principal, 'enctype':enctype})\n rc = pipe.close()\n if rc:\n raise AssertionError(\"klist failed: exit code=%d\" % (rc))\n return entries", "def getKeys(self): #Find out why memory does not like adding itself to a list\r\n\r\n those_Keys = []\r\n runner = self.headptr\r\n\r\n while runner != None:\r\n those_Keys.append(runner.key)\r\n runner = runner.next\r\n\r\n \r\n quicksort(those_Keys, 0 ,len(those_Keys)-1)\r\n return those_Keys", "def topk(vec, k):\n vec = torch.topk(vec, k)\n return vec.view(-1).data.tolist()", "def get_session_list(key):\n return session.get(key, [])", "def GetSubkeys(self):", "def list_object_search_key(self, key_part: str) -> list:\n\n return [i.key for i in self.client.objects.all() if key_part in i.key]", "def _get_stored_list(self, key):\n return self._persistence_redis.get_stored_list(key=key)", "def uplink_buys_by_key(self, key):\n buys = []\n for buy in self.uplinkbuys:\n if buy.mindkey == key:\n buys.append(buy)\n return buys", "def get_descendants(self, key: str) -> Sequence[str]:\n raise NotImplementedError", "def getall(self, key):\n return self.values.get(key, [])", "def get_keys(self, key):\n\t\ttry:\n\t\t\tkey = self.make_key(key + \"*\")\n\t\t\treturn self.keys(key)\n\n\t\texcept (ConnectionError, TimeoutError):\n\t\t\tregex = re.compile(cstr(key).replace(\"|\", \"\\|\").replace(\"*\", \"[\\w]*\"))\n\t\t\treturn [k for k in list(local.cache) if regex.match(k.decode())]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a new pipeline object that can queue multiple commands for later execution. ``transaction`` indicates whether all commands should be executed atomically. Apart from making a group of operations atomic, pipelines are useful for reducing the backandforth overhead between the client and server. Overridden in order to provide the right client through the pipeline.
def pipeline(self, transaction=True, shard_hint=None): p = Pipeline( connection_pool=self.connection_pool, response_callbacks=self.response_callbacks, transaction=transaction, shard_hint=shard_hint) return p
[ "def pipeline(self, transaction=True, shard_hint=None):\r\n return AsyncStrictPipeline(\r\n self.connection_pool,\r\n self.response_callbacks,\r\n transaction,\r\n shard_hint)", "def createPipe(self, transaction):\n pipe = detectPipeClass(transaction.dev, transaction.endpt)(self)\n name = \"Dev %s, %s\" % (transaction.dev, transaction.getTransferString())\n self.appendCanvas(name, pipe.stack)\n return pipe", "def pipelines(self):\n return PipelineManager(session=self._session)", "def pipeline(self) -> Pipeline:\n if self._to_pipeline is None:\n raise AttributeError(\n \"pipeline not available because `to_pipeline` was not set on __init__.\"\n )\n return self._to_pipeline(self)", "def pipe(self, **kwargs):\n return SyncPipe(source=self.fetch(), **kwargs)", "def wrap_transaction(self):\n new_script = self.__class__()\n new_script.append(\n [BeginStatement()] + self.statements + [CommitStatement()])\n\n return new_script", "def pipeline(self):\n pipeline = self._redis_client.pipeline()\n members = {key: val.clone(redis_client=pipeline)\n for key, val in self._members.items()}\n return RedisPipeline(pipeline, members)", "def _pipeline(self):\n try:\n b = self._pipeline_cache\n except AttributeError:\n r = open_redis_connection()\n b = self._pipeline_cache = r.pipeline()\n return b", "def from_pipeline(cls, pipeline, proba=None, repeat=None):\n if proba is None:\n if repeat is None:\n new_p = cls(pipeline=pipeline)\n else:\n if pipeline.num_actions == 1 and pipeline.get_last_action_proba() is None:\n new_p = cls(pipeline=pipeline, repeat=repeat)\n else:\n new_p = cls()\n new_p.append_pipeline(pipeline, repeat=repeat)\n else:\n if pipeline.num_actions == 1 and pipeline.get_last_action_repeat() is None:\n new_p = cls(pipeline=pipeline, proba=proba)\n else:\n new_p = cls()\n new_p.append_pipeline(pipeline, proba=proba)\n return new_p", "def pipeline(self):\n # gotta avoid circular imports by deferring\n from .pipeline import Pipeline\n return Pipeline().from_source(self._collection)", "def create_single_task(self, with_pipe:bool=False, *args, **kwargs) -> TaskContainer:\n task = self.create_base_container()\n task.pos_args = args\n task.kw_args = kwargs\n task.parallel = False\n task.thread_pool = None\n\n if not with_pipe:\n task.merge_fn = None\n\n return task", "def compose(self, other):\r\n return ComposedPipeline(self, other)", "def make_pipeline(self, steps=None):\n pipe_steps = self.custom_steps + steps if steps else self.custom_steps\n\n return Pipeline(pipe_steps)", "def _open_pipeline(self) -> StreamConsumer:\n # Create a stream consumer for the first operator in the pipeline. This\n # consumer is the one that will receive all dataset rows first.\n pipeline = self.pipeline[0].open(schema=self.source.columns)\n # Create consumer for downstream operators and connect the consumer\n # with each other. This assumes that all operaotrs (except the last\n # one) yield consumer that are also producer.\n producer = pipeline\n for op in self.pipeline[1:]:\n consumer = op.open(producer.columns)\n producer.set_consumer(consumer)\n producer = consumer\n return pipeline", "def create_pipeline(*args):\n\t\tconsumer = Pipeline.create_consumer(args[-1])\n\t\tnext(consumer)\n\n\t\ttemp = consumer\n\t\tfor func in reversed(args[1:-1]):\n\t\t\tstage = Pipeline.create_stage(func, temp)\n\t\t\tnext(stage)\n\t\t\ttemp = stage\n\n\t\tproducer = Pipeline.create_producer(args[0], temp)\n\t\tnext(producer)\n\n\t\treturn producer", "async def connect_pipeline(\n *, connect=None, bind=None, loop=None, translation_table=None\n):\n if loop is None:\n loop = asyncio.get_event_loop()\n\n transp, proto = await create_zmq_connection(\n lambda: _ClientProtocol(loop, translation_table=translation_table),\n zmq.PUSH,\n connect=connect,\n bind=bind,\n loop=loop,\n )\n return PipelineClient(loop, proto)", "def delay_pipeline(pipeline, pipe):\n _pipeline = delayed(pipeline[0].curry())(pipe)\n for task in pipeline[1:]:\n _pipeline = delayed(task.curry())(_pipeline)\n\n return _pipeline", "def PipeConnection(incoming, outgoing):\r\n return Connection(Channel(PipeStream(incoming, outgoing)))", "def make_pipeline(context):\n \n # Base universe of top 500 US stocks.\n base_universe_filter = Q500US()\n\n # Stocks of only tech sector.\n tech_sector = Sector(mask=base_universe_filter)\n tech_universe_filter = base_universe_filter & tech_sector.eq(311)\n\n # Top 10 tech stocks with largest market cap.\n mkt_cap_filter = morningstar.valuation.market_cap.latest\n top_mkt_cap_tech_filter = mkt_cap_filter.top(context.NUM_SYMBOLS, mask=tech_universe_filter)\n\n # Bollinger band factor with Stdev factor 2.\n lower_band_factor, middle_factor, upper_band_factor = BollingerBands(window_length=22, k=2, mask=top_mkt_cap_tech_filter)\n\n # Percent difference between (price, lower_band) and (price, upper_band).\n price = USEquityPricing.close.latest\n buy_percent_factor = ((lower_band_factor - price)*100)/price\n sell_percent_factor = ((price - upper_band_factor)*100)/price\n\n # Mean reversion buy and sell filters.\n # Sell when price exceeds upper-band and buy when price is below lower-band.\n buy_filter = buy_percent_factor > 0\n sell_filter = sell_percent_factor > 0\n\n # Build and return the Pipeline.\n pipe_bbands = Pipeline(columns={'buy_percent': buy_percent_factor,\n 'lower_band': lower_band_factor,\n 'buy': buy_filter,\n 'price': price,\n 'sell': sell_filter,\n 'upper_band': upper_band_factor,\n 'sell_percent': sell_percent_factor}, screen=top_mkt_cap_tech_filter)\n \n return pipe_bbands" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This will return the graph data for the outage module
def get_outage(self): try: assert self._db_connection, { STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR, MESSAGE_KEY: DB_ERROR} if self.equipment == COKE_DRUM_VALUE and self.module == OUTAGE_VALUE: """ This will return the graph data for the selected outage module """ query_params = { TAG_NAME_REQUEST: self.query_params.GET[TAG_NAME_REQUEST], START_DATE_REQUEST: self.query_params.GET[START_DATE_REQUEST], END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST] } MODULE_LEVEL_MULTILINE_TAG = tuple(LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH) if MULTILINE_REQUEST in self.query_params.GET: """ This will return the graph data for the actual and predicted tags for the selected outage module """ query_params[MULTILINE_REQUEST] = self.query_params.GET[MULTILINE_REQUEST] if query_params: if START_DATE_REQUEST not in query_params or not query_params[START_DATE_REQUEST] and \ MULTILINE_REQUEST not in query_params: graph_data = django_search_query_all( DETAILED_OUTAGE_GRAPH_NULL_START_DATE.format( self.module, query_params[TAG_NAME_REQUEST], query_params[END_DATE_REQUEST])) elif query_params[START_DATE_REQUEST] and MULTILINE_REQUEST not in query_params: graph_data = django_search_query_all( DETAILED_OUTAGE_GRAPH.format( self.module, query_params[TAG_NAME_REQUEST], query_params[START_DATE_REQUEST], query_params[END_DATE_REQUEST])) elif query_params[START_DATE_REQUEST] and query_params[MULTILINE_REQUEST]: if query_params[TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH: graph_data = django_search_query_all( DETAILED_OUTAGE_MODULE_MULTILINE_GRAPH.format( self.module, MODULE_LEVEL_MULTILINE_TAG, query_params[START_DATE_REQUEST], query_params[END_DATE_REQUEST])) else: graph_data = django_search_query_all( DETAILED_OUTAGE_GRAPH.format( self.module, query_params[TAG_NAME_REQUEST], query_params[START_DATE_REQUEST], query_params[END_DATE_REQUEST])) df_data = pd.DataFrame(graph_data) min_max = django_search_query_all( MIN_MAX_DATA.format( self.module, query_params[TAG_NAME_REQUEST] )) df_min_max_data = pd.DataFrame(min_max) graph = [] if not df_data.empty: df_data = df_data.where(pd.notnull(df_data) == True, None) df_data.sort_values(TIMESTAMP_KEY, ascending=True, inplace=True) df_unit = df_data[UNIT].iloc[0] df_description = df_data[DESCRIPTION].iloc[0] df_timestamp = list(dict.fromkeys(list(df_data[TIMESTAMP_KEY]))) if query_params[TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH: df_result = df_data.groupby(TAG_NAME_REQUEST) actual_north_data = [] predicted_north_data = [] actual_south_data = [] predicted_south_data = [] if len(df_result) == 2: df_description = \ df_data[df_data[TAG_NAME_REQUEST] == query_params[TAG_NAME_REQUEST]][ DESCRIPTION].iloc[0] df_north_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG) actual_north_data = list(df_north_actual['north_drum_tag_value']) df_north_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG) predicted_north_data = list(df_north_predicted['north_drum_tag_value']) df_south_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG) actual_south_data = list(df_south_actual['south_drum_tag_value']) df_south_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG) predicted_south_data = list(df_south_predicted['south_drum_tag_value']) elif len(df_result) == 1: if df_result[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG: df_description = \ df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG][ DESCRIPTION].iloc[0] df_north_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG) actual_north_data = list(df_north_actual['north_drum_tag_value']) df_south_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG) actual_south_data = list(df_south_actual['south_drum_tag_value']) elif df_result[TAG_NAME_REQUEST] != OUTAGE_MODULE_LEVEL_ACTUAL_TAG: df_description = \ df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_PREDICTED_TAG][ DESCRIPTION].iloc[0] df_north_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG) predicted_north_data = list(df_north_predicted['north_drum_tag_value']) df_south_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG) predicted_south_data = list(df_south_predicted['south_drum_tag_value']) temp = {"north_actual": actual_north_data, "north_predicted": predicted_north_data, "south_actual": actual_south_data, "south_predicted": predicted_south_data, "x_axis": df_timestamp, "unit": df_unit, "description": df_description} else: temp = {"y_axis": list(df_data[TAG_VALUE]), "x_axis": df_timestamp, "unit": df_unit, "description": df_description} if not df_min_max_data.empty: temp["min_data"] = df_min_max_data[MIN_VALUE].iloc[0] temp["max_data"] = df_min_max_data[MAX_VALUE].iloc[0] else: temp["min_data"] = None temp["max_data"] = None graph.append(temp) return graph except AssertionError as e: log_error("Exception due to : %s" + str(e)) return asert_res(e) except Exception as e: log_error("Exception due to : %s" + str(e)) return json_InternalServerError
[ "def get_graph(self) -> dict:\n response = requests.get(self.channel, params=\"get_graph\")\n return json_to_graph(response.content)", "def get_graph_summary(self):\n\n pass", "def export_graph(self, graph):\n pass", "def _graph(self):\n return self._anm.overlay_nx_graphs[self._overlay_id]", "def get_graph_data(self, poems):\n\n name = self.region\n\n iden = name.replace(\".\", \"\").replace(\"-\", \"\")\n iden = iden.replace(\"/\", \"\").replace(\",\", \"\").split(\" \")\n\n if iden[0].lower() != \"the\":\n iden = iden[0]\n else:\n iden = iden[1]\n\n total = len(poems)\n\n connected_data = Metrics.get_single_graph_data(poems_list=poems,\n name_of_context=\"region\",\n name=name,\n poems_backref=\"regions\")\n\n data = {\"name\": name,\n \"total\": total,\n \"iden\": iden,\n \"others\": connected_data}\n\n return data", "def get_graph(self):\n return self.graph", "def get_graph(self):\n return json.dumps(self.graph.get_edgelist(), separators=(',',':'))", "def default_graph(self):\n return self[\"data\"]", "def getGraph(self):\n return self._parent.getGraph()", "def getOutageHistory(self):\n return self._OutageHistory", "def graph(self):", "def get_graph_results(self):\n\n return graph_results", "def get_graph_data():\n pairings = session.get(\"pairings\")\n if not pairings:\n flash(\"User did not enter a query.\")\n return redirect(url_for(\"index\"))\n\n dataset = formatting.format_pairings(pairings)\n session.clear()\n return jsonify(dataset)", "def data(self):\n return OverlayGraphData(self._anm, self._overlay_id)", "def graph(self) -> dict:\n return self.flat_graph()", "def export_json_graph(self, destpath):\n export = {}\n export['vertices'] = self.vertices\n export['edges'] = self.edges\n export['_totals'] = {}\n export['_photo'] = {}\n export['_photo']['credit'] = self.photo['credit']\n export['_photo']['entity_max'] = self.photo['max']\n export['_totals']['media'] = len(self.media)\n export['_totals']['wilds'] = len(self.wilds)\n export['_totals']['zoos'] = len(self.zoos)\n export['_totals']['locations'] = len(self.wilds) + len(self.zoos)\n export['_totals']['pandas'] = self.sum_pandas()\n export['_totals']['last_born'] = self.summary['birthday']\n export['_totals']['last_died'] = self.summary['death']\n with open(destpath, 'wb') as wfh:\n wfh.write(json.dumps(export, \n ensure_ascii=False,\n indent=4,\n sort_keys=True).encode('utf8'))\n print(\"Dataset exported: %d pandas at %d locations (%d wild, %d zoo)\"\n % (export['_totals']['pandas'], export['_totals']['locations'],\n export['_totals']['wilds'], export['_totals']['zoos']))", "def _get_graph_to_dump(self) -> nx.DiGraph:\n out_graph = nx.DiGraph()\n for node_name, node in self._nx_graph.nodes.items():\n op_exec_context = node[NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR]\n scope_str = str(op_exec_context.scope_in_model)\n out_graph.add_node(node_name, type=op_exec_context.operator_name,\n id=node[NNCFGraph.ID_NODE_ATTR],\n scope=scope_str)\n for u, v in self._nx_graph.edges:\n out_graph.add_edge(u, v)\n return out_graph", "def graph():\n return jsonify(app.config[\"jsonified\"])", "def full_graph(verteces):\n \n raise Exception(\"TODO IMPLEMENT ME!\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a mock component of a general model.
def mock_component(): component = Mock() component.free_parameters = flex.double([1.0]) component.free_parameter_esds = None component.n_params = 1 component.var_cov_matrix = sparse.matrix(1, 1) return component
[ "def get_mock(self):\n return self.mock", "def test_get_model_method(self):\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)", "def mockable(self):\r\n return _Mockable(self)", "def mock_core(decoy: Decoy) -> ProtocolCore:\n mock_core = decoy.mock(cls=ProtocolCore)\n decoy.when(mock_core.fixed_trash.get_name()).then_return(\"cool trash\")\n decoy.when(mock_core.fixed_trash.get_display_name()).then_return(\"Cool Trash\")\n decoy.when(mock_core.fixed_trash.get_well_columns()).then_return([])\n return mock_core", "def testGetReigsteredModel(self):\n from soc.models.student import Student\n model = models_logic.getModel('soc.models.student.Student')\n self.assertEqual(model, Student)", "def _get_internal_model(self):\n return self.internal_model", "def get_model_object(category: str) -> object:\n if category == 'seq':\n return IQTestSeqSample()\n elif category == 'diagram':\n return IQTestDiagramSample()\n elif category == 'verbal':\n return IQTestVerbalSample()\n return None", "def get_model(self):\n return None", "def model_base() -> Base:\n return Base", "def get_fake_model(fields=None, model_base=PostgresModel, meta_options={}):\n\n model = define_fake_model(fields, model_base, meta_options)\n\n with connection.schema_editor() as schema_editor:\n schema_editor.create_model(model)\n\n return model", "def _make_mock_registered_model(id: int, name: str):\n\n return MockRegisteredModel(\n mock_conn,\n mock_config,\n _RegistryService.RegisteredModel(\n id=id,\n name=name,\n ),\n )", "def get_main_model(self):\n return self", "def test_coupledmodels_get(self):\n pass", "def get_response_model_ctor(self):\n return self._response_model_ctor", "def test_init_optional_base_def_return(_mocked_init_model_factory: mock.MagicMock):\n # pylint: disable=protected-access\n spec = mock.MagicMock()\n base = mock.MagicMock()\n\n returned_base, _ = open_alchemy._init_optional_base(\n base=base, spec=spec, define_all=True\n )\n\n assert returned_base == base", "def _create_model(self):\n # load moduĺe\n model_module = importlib.import_module(self.model_name)\n\n # import model\n model = model_module.getModel()\n\n return model", "def make_mock_registered_model(mock_conn, mock_config) -> Callable:\n\n class MockRegisteredModel(RegisteredModel):\n def __repr__(self): # avoid network calls when displaying test results\n return object.__repr__(self)\n\n def _make_mock_registered_model(id: int, name: str):\n \"\"\"Return a mocked RegisteredModel object.\"\"\"\n\n return MockRegisteredModel(\n mock_conn,\n mock_config,\n _RegistryService.RegisteredModel(\n id=id,\n name=name,\n ),\n )\n\n return _make_mock_registered_model", "def setUp(self):\r\n self.mock_model = Mock()", "def test_construct_model(instance):\n construct_model(instance)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a mock data manager of a general model.
def mock_data_manager(components): dm = Mock() dm.components = components dm.fixed_components = [] return dm
[ "def setup_dummy_data_manager():\n import repoze.filesafe\n repoze.filesafe._local.manager = mgr = DummyDataManager()\n return mgr", "def _get_data_manager(self):\n\n ftype = self.conf['General']['save_as']\n if ftype == 'npz':\n return NPZDataManager(self.conf, self.log)\n elif ftype == 'hdf5':\n return HDF5DataManager(self.conf, self.log)\n else:\n raise ValueError('Invalid file type in config')", "def data_manager(self) -> PaydaySimDataManager:\r\n if self._data_manager is None:\r\n # noinspection PyTypeChecker\r\n self._data_manager: PaydaySimDataManager = CommonDataManagerRegistry().locate_data_manager(ModInfo.get_identity(),\r\n identifier=PaydaySimDataManager.IDENTIFIER)\r\n return self._data_manager", "def default_store_fixture(database) -> DefaultStore:\n return DefaultStore(model=ModelMock, database=database)", "def get_data_manager(self):\n\n return self._data_manager", "def setUp(self):\r\n self.mock_model = Mock()", "def _make_datamanager(self) -> boilr.data.BaseDatasetManager:\n raise NotImplementedError", "def create_model_manager(self, log=None):\n return self.model\n\n if (log is None) : log = self.log\n import mmtbx.restraints\n import mmtbx.model\n restraints_manager = mmtbx.restraints.manager(\n geometry=self.geometry,\n normalization=True)\n return mmtbx.model.manager(\n xray_structure=self.xray_structure,\n pdb_hierarchy=self.pdb_hierarchy,\n restraints_manager=restraints_manager,\n log=log)", "def setUp(self):\n self.mock_model = Mock()", "def get_mock(self):\n return self.mock", "def get_fake_model(fields=None, model_base=PostgresModel, meta_options={}):\n\n model = define_fake_model(fields, model_base, meta_options)\n\n with connection.schema_editor() as schema_editor:\n schema_editor.create_model(model)\n\n return model", "def test_get_model_method(self):\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)", "def test_default_manager(self):\n\n class Book(RestObject):\n pass\n\n class Author(RestObject):\n pass\n \n self.assertTrue(isinstance(Book.objects, RestManager))\n self.assertTrue(Book.objects.object_class, Book)\n\n self.assertTrue(isinstance(Author.objects, RestManager))\n self.assertTrue(Author.objects.object_class, Author)\n\n self.assertNotEqual(Book.objects, Author.objects)\n \n book = Book()\n # Cannot test AttributeError with self.assertRaises\n try:\n book.objects.all()\n except AttributeError, e:\n self.assertEqual('%s' % e, 'Manager is not accessible via Book instances')", "def _get_base_manager(self, model_class, manager_name):\r\n if hasattr(model_class, manager_name):\r\n base_manager = getattr(model_class, manager_name).__class__\r\n else:\r\n base_manager = Manager\r\n\r\n return base_manager", "def mockable(self):\r\n return _Mockable(self)", "def manager() -> Manager:\n global _defaultManager\n return _defaultManager", "def mock_setting_data() -> SettingsData:\n return SettingsData(\n {\n \"default\": None,\n \"min\": None,\n \"access\": None,\n \"max\": None,\n \"unit\": None,\n \"type\": None,\n \"id\": \"data_id\",\n }\n )", "def test_init(self):\n rec_dao, rel_dao = (\"fake record dao\", \"fake relationship dao\")\n expected_factory = Mock()\n expected_factory.create_record_dao = Mock(return_value=rec_dao)\n expected_factory.create_relationship_dao = Mock(return_value=rel_dao)\n test_ds = DataStore(expected_factory)\n self.assertEqual(test_ds.records._record_dao, rec_dao) # pylint: disable=protected-access\n self.assertEqual(\n test_ds.relationships._relationship_dao, # pylint: disable=protected-access\n rel_dao)", "def as_manager(cls):\n manager = DefaultManager.from_queryset(cls)()\n manager._built_with_as_manager = True\n return manager" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test for the general multi_active_parameter_manage class.
def test_multi_apm(): components_1 = { "scale": mock_component(), "decay": mock_component(), "absorption": mock_component(), } components_2 = {"scale": mock_component(), "decay": mock_component()} multi_apm = multi_active_parameter_manager( ScalingTarget(), [components_1, components_2], [["scale", "decay"], ["scale"]], active_parameter_manager, ) # Test correct setup of apm_list attribute. for apm in multi_apm.apm_list: assert isinstance(apm, active_parameter_manager) assert len(multi_apm.apm_list) == 2 assert multi_apm.components_list == ["scale", "decay", "scale"] assert multi_apm.n_active_params == 3 assert multi_apm.apm_data[0] == {"start_idx": 0, "end_idx": 2} assert multi_apm.apm_data[1] == {"start_idx": 2, "end_idx": 3} # Test parameter selection. multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0])) assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0]) assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5]) assert multi_apm.select_parameters(1) == flex.double([2.0]) # Test setting parameter esds. multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3])) assert components_1["scale"].free_parameter_esds == flex.double([0.1]) assert components_1["decay"].free_parameter_esds == flex.double([0.2]) assert components_2["scale"].free_parameter_esds == flex.double([0.3]) # Test setting var_cov matrices for each component. var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0]) var_cov.reshape(flex.grid(3, 3)) multi_apm.calculate_model_state_uncertainties(var_cov) assert components_1["scale"].var_cov_matrix[0, 0] == 1.0 assert components_1["decay"].var_cov_matrix[0, 0] == 2.0 assert components_2["scale"].var_cov_matrix[0, 0] == 3.0
[ "def test_scaling_active_parameter_manager():\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(2)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n assert list(scaling_apm.constant_g_values[0]) == list(\n components_2[\"2\"].calculate_scales()\n )\n assert len(scaling_apm.constant_g_values) == 1\n assert scaling_apm.n_obs == [2]\n\n # Test that no constant_g_values if both components selected\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n assert scaling_apm.constant_g_values is None\n\n # Check that one can't initialise with an unequal number of reflections,\n # either within the selection or overall.\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n\n data_manager = mock_data_manager(components_2)\n pmg = ScalingParameterManagerGenerator(\n [data_manager], target=ScalingTarget(), mode=\"concurrent\"\n )\n assert isinstance(pmg.apm_type, type(scaling_active_parameter_manager))", "def test_multi(self):\n self.assertEqual(6, foo.multi(2, 3))", "def test_class_callparams(self):\n\n @Configurable(\n conf=[\n Parameter('test0', value=True),\n Parameter('test1', value=False)\n ]\n )\n class Test(object):\n\n def __init__(self, test0=None):\n\n super(Test, self).__init__()\n\n self.test0 = test0\n\n test = Test()\n\n self.assertTrue(test.test0)\n self.assertFalse(test.test1)", "def test_test_group_parameters(self):\n pass", "def test_all_params(self):\n persistence_helper = PersistenceHelper(use_riak=True, is_sync=True)\n self.assertEqual(persistence_helper.use_riak, True)\n self.assertEqual(persistence_helper.is_sync, True)", "def test_overridable_parameter() -> None:\n param_dict = ParamClass.get_overridable_parameters()\n assert \"name\" in param_dict\n assert \"flag\" in param_dict\n assert \"not_flag\" in param_dict\n assert \"seed\" in param_dict\n assert \"number\" in param_dict\n assert \"integers\" in param_dict\n assert \"optional_int\" in param_dict\n assert \"optional_float\" in param_dict\n assert \"tuple1\" in param_dict\n assert \"int_tuple\" in param_dict\n assert \"enum\" in param_dict\n assert \"readonly\" not in param_dict\n assert \"_non_override\" not in param_dict\n assert \"constant\" not in param_dict", "def test_configure_to_reconfigure_param(self):\n\n class ToConfigure(object):\n \"\"\"Class to configure.\"\"\"\n\n def __init__(self):\n super(ToConfigure, self).__init__()\n self.test = None\n\n target = ToConfigure()\n\n param = 'test'\n\n conf = configuration(category('TEST', Parameter(param, value=True)))\n\n self.configurable.configure(conf=conf, targets=[target])\n self.assertTrue(target.test)", "def _validate_params(self, request_set, target_set=None, context=None):\n\n # Perform first-pass validation in Function.__init__():\n # - returns full set of params based on subclass paramClassDefaults\n super(Mechanism, self)._validate_params(request_set,target_set,context)\n\n params = target_set\n\n #region VALIDATE TIME SCALE\n try:\n param_value = params[TIME_SCALE]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n self.timeScale = timeScaleSystemDefault\n else:\n if isinstance(param_value, TimeScale):\n self.timeScale = params[TIME_SCALE]\n else:\n if self.prefs.verbosePref:\n print(\"Value for {0} ({1}) param of {2} must be of type {3}; default will be used: {4}\".\n format(TIME_SCALE, param_value, self.name, type(TimeScale), timeScaleSystemDefault))\n #endregion\n\n #region VALIDATE INPUT STATE(S)\n\n # MODIFIED 6/10/16\n # FIX: SHOULD CHECK LENGTH OF INPUT_STATES PARAM (LIST OF NAMES OR SPECIFICATION DICT) AGAINST LENGTH OF\n # FIX: self.variable 2D ARRAY AND COMPARE variable SPECS, IF PROVIDED, WITH CORRESPONDING ELEMENTS OF\n # FIX: self.variable 2D ARRAY\n try:\n param_value = params[INPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # INPUT_STATES not specified:\n # - set to None, so that it is set to default (self.variable) in instantiate_inputState\n # - if in VERBOSE mode, warn in instantiate_inputState, where default value is known\n params[INPUT_STATES] = None\n\n else:\n # INPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_inputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n # Note:\n # * number of inputStates is validated against length of the owner mechanism's execute method variable (EMV)\n # in instantiate_inputState, where an inputState is assigned to each item (value) of the EMV\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.InputState import InputState\n # If not valid...\n if not ((isclass(item) and (issubclass(item, InputState) or # InputState class ref\n issubclass(item, Projection))) or # Project class ref\n isinstance(item, InputState) or # InputState object\n isinstance(item, dict) or # InputState specification dict\n isinstance(item, ParamValueProjection) or # ParamValueProjection tuple\n isinstance(item, str) or # Name (to be used as key in inputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.variable) in instantiate_inputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" InputState, specification dict or value, nor a list of dict of them; \"\n \"variable ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n INPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.variable,\n self.execute.__self__.name))\n i += 1\n params[INPUT_STATES] = param_value\n #endregion\n\n #region VALIDATE EXECUTE METHOD PARAMS\n try:\n function_param_specs = params[FUNCTION_PARAMS]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n elif self.prefs.verbosePref:\n print(\"No params specified for {0}\".format(self.__class__.__name__))\n else:\n if not (isinstance(function_param_specs, dict)):\n raise MechanismError(\"{0} in {1} must be a dict of param specifications\".\n format(FUNCTION_PARAMS, self.__class__.__name__))\n # Validate params\n from PsyNeuLink.Components.States.ParameterState import ParameterState\n for param_name, param_value in function_param_specs.items():\n try:\n default_value = self.paramInstanceDefaults[FUNCTION_PARAMS][param_name]\n except KeyError:\n raise MechanismError(\"{0} not recognized as a param of execute method for {1}\".\n format(param_name, self.__class__.__name__))\n if not ((isclass(param_value) and\n (issubclass(param_value, ParameterState) or\n issubclass(param_value, Projection))) or\n isinstance(param_value, ParameterState) or\n isinstance(param_value, Projection) or\n isinstance(param_value, dict) or\n isinstance(param_value, ParamValueProjection) or\n iscompatible(param_value, default_value)):\n params[FUNCTION_PARAMS][param_name] = default_value\n if self.prefs.verbosePref:\n print(\"{0} param ({1}) for execute method {2} of {3} is not a ParameterState, \"\n \"projection, ParamValueProjection, or value; default value ({4}) will be used\".\n format(param_name,\n param_value,\n self.execute.__self__.componentName,\n self.__class__.__name__,\n default_value))\n #endregion\n # FIX: MAKE SURE OUTPUT OF EXECUTE FUNCTION / SELF.VALUE IS 2D ARRAY, WITH LENGTH == NUM OUTPUT STATES\n\n #region VALIDATE OUTPUT STATE(S)\n\n # FIX: MAKE SURE # OF OUTPUTS == LENGTH OF OUTPUT OF EXECUTE FUNCTION / SELF.VALUE\n try:\n param_value = params[OUTPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # OUTPUT_STATES not specified:\n # - set to None, so that it is set to default (self.value) in instantiate_outputState\n # Notes:\n # * if in VERBOSE mode, warning will be issued in instantiate_outputState, where default value is known\n # * number of outputStates is validated against length of owner mechanism's execute method output (EMO)\n # in instantiate_outputState, where an outputState is assigned to each item (value) of the EMO\n params[OUTPUT_STATES] = None\n\n else:\n # OUTPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_outputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.OutputState import OutputState\n # If not valid...\n if not ((isclass(item) and issubclass(item, OutputState)) or # OutputState class ref\n isinstance(item, OutputState) or # OutputState object\n isinstance(item, dict) or # OutputState specification dict\n isinstance(item, str) or # Name (to be used as key in outputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.value) in instantiate_outputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" OutputState, specification dict or value, nor a list of dict of them; \"\n \"output ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n OUTPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.value,\n self.execute.__self__.name))\n i += 1\n params[OUTPUT_STATES] = param_value", "def test_params(self):\n self.assertEqual(self.oneport_example1.params, ['Cm'])\n self.assertEqual(self.twoport_example1.params, ['Cm'])\n self.assertEqual(self.fourport_example1.params, ['Cm'])", "def check_params(self):\n raise NotImplementedError", "def test_put_parameter(self):\n pass", "def test_multi(self):\n self.assertEqual(6, multi(2, 3))", "def test_ParameterManagerGenerator_concurrent():\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n data_manager = mock_data_manager(components_1)\n\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" in apm.components_list\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"1\": mock_component(), \"2\": mock_component()}\n data_manager_1 = mock_data_manager(components_1)\n data_manager_2 = mock_data_manager(components_2)\n\n pmg = ParameterManagerGenerator(\n [data_manager_1, data_manager_2],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n multi_apms = pmg.parameter_managers()\n assert len(multi_apms) == 1\n multi_apm = multi_apms[0]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert \"scale\" in multi_apm.apm_list[0].components_list\n assert \"decay\" in multi_apm.apm_list[0].components_list\n assert \"absorption\" in multi_apm.apm_list[0].components_list\n assert \"1\" in multi_apm.apm_list[1].components_list\n assert \"2\" in multi_apm.apm_list[1].components_list\n\n # now try fixing a component\n data_manager.fixed_components = [\"absorption\"]\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" not in apm.components_list", "def test_verify_set_multi(self):\n self._verify([self.applied_commands['setm']])", "def test_get_mt_settings(self):\n pass", "def test_set_params():\n\n tpot_obj = TPOTClassifier()\n assert tpot_obj.set_params() is tpot_obj", "def test_parameters(db):\n\n t = db.query(db.Parameters).astropy()\n assert len(t) > 0, 'Parameters table is empty'\n\n # Check usage of Parameters\n param_list = db.query(db.ModeledParameters.c.parameter).astropy()\n if len(param_list) > 0:\n # Get unique values\n param_list = list(param_list['parameter'])\n param_list = list(set(param_list))\n t = db.query(db.Parameters).filter(db.Parameters.c.parameter.notin_(param_list)).astropy()\n if len(t) > 0:\n print('The following parameters are not being used:')\n print(t)\n # Skipping actual assertion test\n # assert len(t) == 0, f'{len(t)} unused parameters'", "def test_checkCustoms(self):\n self.failUnlessEqual(self.nice.opts['myflag'], \"PONY!\")\n self.failUnlessEqual(self.nice.opts['myparam'], \"Tofu WITH A PONY!\")", "def test_set_params_2():\n tpot_obj = TPOTClassifier(generations=2)\n tpot_obj.set_params(generations=3)\n\n assert tpot_obj.generations == 3" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the apm factory for concurrent refinement.
def test_ParameterManagerGenerator_concurrent(): components_1 = { "scale": mock_component(), "decay": mock_component(), "absorption": mock_component(), } data_manager = mock_data_manager(components_1) pmg = ParameterManagerGenerator( [data_manager], apm_type=active_parameter_manager, target=ScalingTarget(), mode="concurrent", ) apms = pmg.parameter_managers() assert len(apms) == 1 apm = apms[0] assert isinstance(apm, multi_active_parameter_manager) assert "scale" in apm.components_list assert "decay" in apm.components_list assert "absorption" in apm.components_list components_1 = { "scale": mock_component(), "decay": mock_component(), "absorption": mock_component(), } components_2 = {"1": mock_component(), "2": mock_component()} data_manager_1 = mock_data_manager(components_1) data_manager_2 = mock_data_manager(components_2) pmg = ParameterManagerGenerator( [data_manager_1, data_manager_2], apm_type=active_parameter_manager, target=ScalingTarget(), mode="concurrent", ) multi_apms = pmg.parameter_managers() assert len(multi_apms) == 1 multi_apm = multi_apms[0] assert isinstance(multi_apm, multi_active_parameter_manager) for apm in multi_apm.apm_list: assert isinstance(apm, active_parameter_manager) assert "scale" in multi_apm.apm_list[0].components_list assert "decay" in multi_apm.apm_list[0].components_list assert "absorption" in multi_apm.apm_list[0].components_list assert "1" in multi_apm.apm_list[1].components_list assert "2" in multi_apm.apm_list[1].components_list # now try fixing a component data_manager.fixed_components = ["absorption"] pmg = ParameterManagerGenerator( [data_manager], apm_type=active_parameter_manager, target=ScalingTarget(), mode="concurrent", ) apms = pmg.parameter_managers() assert len(apms) == 1 apm = apms[0] assert isinstance(apm, multi_active_parameter_manager) assert "scale" in apm.components_list assert "decay" in apm.components_list assert "absorption" not in apm.components_list
[ "def test_multiple_factories(self, mocker):\n sdk_ready_flag = threading.Event()\n\n def _init(self, ready_flag, some, auth_api, streaming_enabled, telemetry_runtime_producer, telemetry_init_consumer, sse_url=None):\n self._ready_flag = ready_flag\n self._synchronizer = mocker.Mock(spec=Synchronizer)\n self._streaming_enabled = False\n self._telemetry_runtime_producer = telemetry_runtime_producer\n self._telemetry_init_consumer = telemetry_init_consumer\n mocker.patch('splitio.sync.manager.Manager.__init__', new=_init)\n\n def _start(self, *args, **kwargs):\n sdk_ready_flag.set()\n mocker.patch('splitio.sync.manager.Manager.start', new=_start)\n\n def _stop(self, *args, **kwargs):\n pass\n mocker.patch('splitio.sync.manager.Manager.stop', new=_stop)\n\n mockManager = Manager(sdk_ready_flag, mocker.Mock(), mocker.Mock(), False, mocker.Mock(), mocker.Mock())\n\n def _make_factory_with_apikey(apikey, *_, **__):\n return SplitFactory(apikey, {}, True, mocker.Mock(spec=ImpressionsManager), mockManager, mocker.Mock(), mocker.Mock(), mocker.Mock())\n\n factory_module_logger = mocker.Mock()\n build_in_memory = mocker.Mock()\n build_in_memory.side_effect = _make_factory_with_apikey\n build_redis = mocker.Mock()\n build_redis.side_effect = _make_factory_with_apikey\n build_localhost = mocker.Mock()\n build_localhost.side_effect = _make_factory_with_apikey\n mocker.patch('splitio.client.factory._LOGGER', new=factory_module_logger)\n mocker.patch('splitio.client.factory._build_in_memory_factory', new=build_in_memory)\n mocker.patch('splitio.client.factory._build_redis_factory', new=build_redis)\n mocker.patch('splitio.client.factory._build_localhost_factory', new=build_localhost)\n\n _INSTANTIATED_FACTORIES.clear() # Clear all factory counters for testing purposes\n\n factory1 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == []\n\n factory2 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 1,\n 'factory'\n )]\n\n factory_module_logger.reset_mock()\n factory3 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 2,\n 'factories'\n )]\n\n factory_module_logger.reset_mock()\n factory4 = get_factory('some_other_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have an instance of the Split factory. \"\n \"Make sure you definitely want this additional instance. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\"\n )]\n\n event = threading.Event()\n factory1.destroy(event)\n event.wait()\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n factory2.destroy()\n factory3.destroy()\n factory4.destroy()", "def test_update_auto_allocate_run(self):\n pass", "def test_run_qiime_alpha_rarefaction_parallel(self):\n \n run_qiime_alpha_rarefaction(\n self.fasting_otu_table_fp, \n self.fasting_mapping_fp,\n self.wf_out, \n call_commands_serially,\n self.params,\n self.qiime_config,\n tree_fp=self.fasting_tree_fp,\n num_steps=10, \n parallel=True, \n min_seqs_per_sample=10,\\\n status_update_callback=no_status_updates)\n \n pd_control_plot_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'html_plots','PD_whole_treeTreatmentControl_ave.png')\n pd_treatment_plot_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'average_plots','PD_whole_treeTreatment.png')\n pd_averages_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'average_tables','PD_whole_treeTreatment.txt')\n \n # check that final output files have non-zero size\n self.assertTrue(getsize(pd_control_plot_fp) > 0)\n self.assertTrue(getsize(pd_treatment_plot_fp) > 0)\n self.assertTrue(getsize(pd_averages_fp) > 0)\n \n # Check that the log file is created and has size > 0\n log_fp = glob(join(self.wf_out,'log*.txt'))[0]\n self.assertTrue(getsize(log_fp) > 0)", "def test_communicator_manager(self):\n communicator_manager = None\n try:\n args = {'workflow': 'eventservice_hpc',\n 'queue': 'BNL_CLOUD_MCORE',\n 'site': 'BNL_CLOUD_MCORE',\n 'port': 25443,\n 'url': 'https://aipanda007.cern.ch',\n 'job_label': 'ptest',\n 'pilot_user': 'ATLAS',\n 'node': socket.getfqdn(),\n 'mem': 16000,\n 'disk_space': 160000,\n 'working_group': '',\n 'cpu': 2601.0,\n 'info': None}\n\n communicator_manager = CommunicationManager()\n communicator_manager.start()\n self.assertTrue(communicator_manager.is_alive())\n\n jobs = communicator_manager.get_jobs(njobs=2, args=args)\n self.assertEqual(len(jobs), 2)\n\n jobs = communicator_manager.get_jobs(njobs=1, args=args)\n self.assertEqual(len(jobs), 1)\n\n job_list = []\n for job in jobs:\n job_data = {'node': socket.getfqdn(),\n 'pilotErrorCode': 0,\n 'startTime': time.time(),\n 'jobMetrics': 'coreCount=8',\n 'schedulerID': 'unknown',\n 'timestamp': time_stamp(),\n 'exeErrorCode': 0,\n 'pilotID': 'unknown|PR|2.0.0 (80)',\n 'transExitCode': 0,\n 'pilotErrorDiag': '',\n 'exeErrorDiag': ''}\n job_data['jobId'] = job['PandaID']\n job_data['siteName'] = 'BNL_CLOUD_MCORE'\n job_data['state'] = 'running'\n job_data['attemptNr'] = job['attemptNr'] + 1\n job_list.append(job_data)\n status = communicator_manager.update_jobs(jobs=job_list)\n self.assertEqual(status[0], True)\n\n events = communicator_manager.get_event_ranges(num_event_ranges=1, job=jobs[0])\n self.assertEqual(len(events), 1)\n\n for event in events:\n event_range_status = {\"errorCode\": 1220, \"eventRangeID\": event['eventRangeID'], \"eventStatus\": 'failed'}\n event_range_message = {'version': 0, 'eventRanges': json.dumps(event_range_status)}\n res = communicator_manager.update_events(update_events=event_range_message)\n self.assertEqual(res['StatusCode'], 0)\n\n events = communicator_manager.get_event_ranges(num_event_ranges=2, job=jobs[0])\n self.assertEqual(len(events), 2)\n\n update_events = []\n for event in events:\n event_range = {\"eventRangeID\": event['eventRangeID'], \"eventStatus\": 'finished'}\n update_events.append(event_range)\n event_range_status = [{\"zipFile\": {\"numEvents\": len(update_events),\n \"objstoreID\": 1318,\n \"adler32\": '000000',\n \"lfn\": 'test_file',\n \"fsize\": 100,\n \"pathConvention\": 1000},\n \"eventRanges\": update_events}]\n\n event_range_message = {'version': 1, 'eventRanges': json.dumps(event_range_status)}\n res = communicator_manager.update_events(update_events=event_range_message)\n self.assertEqual(res['StatusCode'], 0)\n\n communicator_manager.stop()\n time.sleep(2)\n self.assertFalse(communicator_manager.is_alive())\n except Exception as ex:\n if communicator_manager:\n communicator_manager.stop()\n raise ex", "def test_run_qiime_alpha_rarefaction_parallel(self):\n \n run_qiime_alpha_rarefaction(\n self.fasting_otu_table_fp, \n self.fasting_mapping_fp,\n self.wf_out, \n call_commands_serially,\n self.params,\n self.qiime_config,\n tree_fp=self.fasting_tree_fp,\n num_steps=10, \n parallel=True, \n min_seqs_per_sample=10,\\\n status_update_callback=no_status_updates)\n \n pd_control_plot_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'html_plots','PD_whole_treecol_3_row_4_ave.png')\n pd_treatment_plot_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'average_plots','PD_whole_treeTreatment.png')\n pd_averages_fp = join(self.wf_out,'alpha_rarefaction_plots',\n 'average_tables','PD_whole_treeTreatment.txt')\n pd_collated_fp = join(self.wf_out,'alpha_div_collated',\n 'PD_whole_tree.txt')\n \n # For all samples, test that PD generally increases \n # with more sequences -- this may occasionally fail, but\n # it should be rare (because rarefaction is done randomly)\n pd_collated_f = list(open(pd_collated_fp))\n for col in range(3,11):\n pd_step1 = pd_collated_f[1].strip().split()[col]\n pd_step5 = pd_collated_f[5].strip().split()[col]\n pd_step11 = pd_collated_f[11].strip().split()[col]\n self.assertTrue(pd_step1 < pd_step5 < pd_step11,\n \"PD did not increase with more sequences.\")\n \n # check that final output files have non-zero size\n self.assertTrue(getsize(pd_control_plot_fp) > 0)\n self.assertTrue(getsize(pd_treatment_plot_fp) > 0)\n self.assertTrue(getsize(pd_averages_fp) > 0)\n \n # Check that the log file is created and has size > 0\n log_fp = glob(join(self.wf_out,'log*.txt'))[0]\n self.assertTrue(getsize(log_fp) > 0)", "def test_unique_reaper(self):\n with accept(ServiceRunner(accept_delay=0.1), name=\"outer\"):\n with pytest.raises(RuntimeError):\n ServiceRunner(accept_delay=0.1).accept()", "def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0", "def test_run_alpha_rarefaction_parallel(self):\r\n\r\n run_alpha_rarefaction(\r\n self.test_data['biom'][0],\r\n self.test_data['map'][0],\r\n self.test_out,\r\n call_commands_serially,\r\n self.params,\r\n self.qiime_config,\r\n tree_fp=self.test_data['tree'][0],\r\n num_steps=5,\r\n parallel=True,\r\n min_rare_depth=3,\r\n max_rare_depth=18,\r\n status_update_callback=no_status_updates)\r\n\r\n html_fp = join(self.test_out, 'alpha_rarefaction_plots',\r\n 'rarefaction_plots.html')\r\n pd_averages_fp = join(self.test_out, 'alpha_rarefaction_plots',\r\n 'average_tables', 'PD_whole_treeSampleType.txt')\r\n pd_collated_fp = join(self.test_out, 'alpha_div_collated',\r\n 'PD_whole_tree.txt')\r\n\r\n # Confirm that palm and gut alpha diversities are different,\r\n # and suggestive of statistical significance (we only have a\r\n # few sequences, so we don't get significant results)\r\n ttest_res, alpha_avg = compare_alpha_diversities(open(pd_collated_fp),\r\n open(\r\n self.test_data[\r\n 'map'][0]),\r\n 'SampleType',\r\n 18,\r\n test_type='parametric')\r\n feces_palm_t = ttest_res[('feces', 'L_palm')][0]\r\n self.assertTrue(feces_palm_t < 0,\r\n \"t-statistic too high: %1.3f, but should be less than 0\"\r\n % feces_palm_t)\r\n\r\n # check that final output files have non-zero size\r\n self.assertTrue(getsize(html_fp) > 0)\r\n\r\n # Check that the log file is created and has size > 0\r\n log_fp = glob(join(self.test_out, 'log*.txt'))[0]\r\n self.assertTrue(getsize(log_fp) > 0)", "def test_announce_routes(fib_t0):\n assert(True)", "def test_patch_tam_advisory_instance(self):\n pass", "def test_ipam_services_update(self):\n pass", "def test_FinancesAdvicesWorkflow(self):\n def _check_date(item, modified_date, volatile_date):\n '''Check that item modified date was updated.'''\n new_modified_date = item.modified()\n self.assertNotEqual(modified_date, new_modified_date)\n new_volatile_date = get_cachekey_volatile('Products.PloneMeeting.MeetingItem.modified')\n self.assertNotEqual(volatile_date, new_volatile_date)\n return new_modified_date, new_volatile_date\n\n cfg = self.meetingConfig\n self.changeUser('dgen')\n gic1_uid = cfg.getOrderedGroupsInCharge()[0]\n item = self.create('MeetingItem', groupsInCharge=(gic1_uid, ))\n item_uid = item.UID()\n self.assertEqual(self.transitions(item), ['proposeToValidationLevel1'])\n # ask finances advice\n fin_group_uid = finance_group_uid()\n item.setOptionalAdvisers((fin_group_uid + '__rowid__unique_id_002', ))\n item._update_after_edit()\n # advice still not askable, askable as level2 or level3\n self.assertEqual(self.transitions(item),\n ['proposeToValidationLevel1'])\n self.do(item, 'proposeToValidationLevel1')\n self.assertEqual(self.transitions(item),\n ['backToItemCreated', 'proposeToValidationLevel2'])\n self.do(item, 'proposeToValidationLevel2')\n self.assertEqual(self.transitions(item),\n ['backToProposedToValidationLevel1',\n 'proposeToValidationLevel3',\n 'wait_advices_from_proposedToValidationLevel2'])\n self.do(item, 'wait_advices_from_proposedToValidationLevel2')\n # a MeetingManager is able to send back but not a normal user\n self.assertEqual(\n self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices',\n 'backTo_validated_from_waiting_advices'])\n # but another user can not\n self._addPrincipalToGroup('bourgmestre', self.dirgen_creators)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level1reviewers)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level2reviewers)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level3reviewers)\n self.changeUser('bourgmestre')\n self.assertTrue(self.hasPermission(\"View\", item))\n self.assertEqual(self.transitions(item), [])\n\n # give advice\n self.changeUser('dfin')\n self.assertEqual(self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices'])\n # advice may be taken over\n self.assertTrue(item.adapted().mayTakeOver())\n # advice giveable when item complete\n self.assertFalse(item.adviceIndex[fin_group_uid]['advice_addable'])\n self.assertTrue(item.adapted().mayEvaluateCompleteness())\n # we will check that item modified date is invalidated when advice changed\n # this is responsible for updating collections counter in faceted portlet\n volatile_date = get_cachekey_volatile('Products.PloneMeeting.MeetingItem.modified')\n item_modified = item.modified()\n item.setCompleteness('completeness_complete')\n item._update_after_edit()\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n advice_portal_type = item._advicePortalTypeForAdviser(fin_group_uid)\n advice = self.addAdvice(item,\n advice_group=fin_group_uid,\n advice_type='positive_finance',\n advice_portal_type=advice_portal_type)\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n self.assertTrue(advice.advice_hide_during_redaction)\n self.assertEqual(self.transitions(advice),\n ['proposeToFinancialController'])\n # once advice given but hidden during redaction, item may no more be sent back\n self.assertEqual(self.transitions(item), [])\n # financial controller\n self.do(advice, 'proposeToFinancialController')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToAdviceCreated',\n 'proposeToFinancialEditor'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_controller\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial editor\n self.do(advice, 'proposeToFinancialEditor')\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'proposeToFinancialReviewer'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_editor\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial reviewer\n self.do(advice, 'proposeToFinancialReviewer')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'backToProposedToFinancialEditor',\n 'proposeToFinancialManager'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_reviewer\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial manager\n self.do(advice, 'proposeToFinancialManager')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'backToProposedToFinancialReviewer',\n 'signFinancialAdvice'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_manager\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # sign advice\n self.do(advice, 'signFinancialAdvice')\n self.assertEqual(self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices',\n 'backTo_validated_from_waiting_advices'])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialManager'])\n self.assertFalse(advice.advice_hide_during_redaction)\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_financial_advice_signed\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # validate item\n self.do(item, 'backTo_validated_from_waiting_advices')\n self.assertEqual(item.query_state(), 'validated')\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_advice_given\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)", "async def test_par_delta2():\n await delta_tester(ParDelta2)", "def test_update_tam_advisory_instance(self):\n pass", "def test_efs_client_connections_alarm():\n pass", "def test_lama_job_runner():\n\n configs = registration_root.glob('*.toml')\n\n for cfg in configs:\n delete_previous_files()\n\n print(f\"\\n{'#'*8} Doing config {cfg.name} {'#'*8}\")\n\n lama_job_runner.lama_job_runner(cfg, wt_registration_dir, make_job_file=True, log_level=logging.ERROR)\n lama_job_runner.lama_job_runner(cfg, wt_registration_dir, log_level=logging.ERROR)\n\n lama_job_runner.lama_job_runner(cfg, mut_registration_dir, make_job_file=True, log_level=logging.ERROR)\n lama_job_runner.lama_job_runner(cfg, mut_registration_dir, log_level=logging.ERROR)\n # return # Just do the first", "def testPeriodic(self):", "def test_active(self):\n nodes = [create_node(\"a\", \"service1\"),\n create_node(\"b\", \"service2\")]\n static = StaticRoutes(nodes).create(self.disco, self.runtime)\n self.runtime.dispatcher.startActor(static)\n self.runtime.dispatcher.pump()\n\n self.assertEqual(knownNodes(self.disco, \"service1\", \"sandbox\"), [nodes[0]])\n self.assertEqual(knownNodes(self.disco, \"service2\", \"sandbox\"), [nodes[1]])", "def test_09_deploy_vm_multiple_aff_grps(self):\n\n self.create_aff_grp(aff_grp=self.services[\"host_anti_affinity\"], acc=self.account.name, domainid=self.domain.id)\n self.create_aff_grp(aff_grp=self.services[\"host_anti_affinity\"], acc=self.account.name, domainid=self.domain.id)\n\n vm1, hostid1 = self.create_vm_in_aff_grps(ag_list=[self.aff_grp[0].name,\n self.aff_grp[1].name], account_name=self.account.name, domain_id=self.domain.id)\n vm2, hostid2 = self.create_vm_in_aff_grps(ag_list=[self.aff_grp[0].name,\n self.aff_grp[1].name], account_name=self.account.name, domain_id=self.domain.id)\n\n aff_grps_names = [self.aff_grp[0].name, self.aff_grp[1].name]\n aff_grps_names.sort()\n\n for vm in [vm1, vm2]:\n list_aff_grps = AffinityGroup.list(self.api_client,\n virtualmachineid=vm.id)\n\n list_aff_grps_names = [list_aff_grps[0].name, list_aff_grps[1].name]\n\n list_aff_grps_names.sort()\n self.assertEqual(aff_grps_names, list_aff_grps_names,\n \"One of the Affinity Groups is missing %s\"\n %list_aff_grps_names)\n\n vm1.delete(self.api_client)\n vm2.delete(self.api_client)\n wait_for_cleanup(self.apiclient, [\"expunge.delay\", \"expunge.interval\"])\n\n self.aff_grp[0].delete(self.api_client)\n self.aff_grp[1].delete(self.api_client)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the apm factory for consecutive refinement.
def test_ParameterManagerGenerator_consecutive(): components_1 = { "scale": mock_component(), "decay": mock_component(), "absorption": mock_component(), } data_manager = mock_data_manager(components_1) data_manager.consecutive_refinement_order = [["scale", "decay"], ["absorption"]] # Test single dataset case. pmg = ParameterManagerGenerator( [data_manager], apm_type=active_parameter_manager, target=ScalingTarget(), mode="consecutive", ) apms = list(pmg.parameter_managers()) assert len(apms) == 2 apm = apms[0] assert isinstance(apm, multi_active_parameter_manager) assert "scale" in apm.components_list assert "decay" in apm.components_list assert "absorption" not in apm.components_list apm = apms[1] assert isinstance(apm, multi_active_parameter_manager) assert "scale" not in apm.components_list assert "decay" not in apm.components_list assert "absorption" in apm.components_list # Test multi dataset case. components_2 = {"1": mock_component(), "2": mock_component()} data_manager_2 = mock_data_manager(components_2) data_manager_2.consecutive_refinement_order = [["1"], ["2"]] pmg = ParameterManagerGenerator( [data_manager, data_manager_2], apm_type=active_parameter_manager, target=ScalingTarget(), mode="consecutive", ) apms = list(pmg.parameter_managers()) assert len(apms) == 2 multi_apm = apms[0] assert isinstance(multi_apm, multi_active_parameter_manager) apm_1 = multi_apm.apm_list[0] assert "scale" in apm_1.components_list assert "decay" in apm_1.components_list assert "absorption" not in apm_1.components_list assert multi_apm.apm_list[1].components_list == ["1"] multi_apm = apms[1] assert isinstance(multi_apm, multi_active_parameter_manager) assert multi_apm.apm_list[0].components_list == ["absorption"] assert multi_apm.apm_list[1].components_list == ["2"] # Test multi dataset case with different number of cycles for each data_manager. components_2 = {"1": mock_component()} data_manager_2 = mock_data_manager(components_2) data_manager_2.consecutive_refinement_order = [["1"], ["2"]] pmg = ParameterManagerGenerator( [data_manager, data_manager_2], apm_type=active_parameter_manager, target=ScalingTarget(), mode="consecutive", ) assert pmg.param_lists[0] == [["scale", "decay"], ["absorption"]] assert pmg.param_lists[1] == [["1"]] apms = list(pmg.parameter_managers()) assert len(apms) == 2 multi_apm = apms[0] assert isinstance(multi_apm, multi_active_parameter_manager) apm_1 = multi_apm.apm_list[0] assert "scale" in apm_1.components_list assert "decay" in apm_1.components_list assert "absorption" not in apm_1.components_list assert multi_apm.apm_list[1].components_list == ["1"] multi_apm = apms[1] assert isinstance(multi_apm, multi_active_parameter_manager) assert multi_apm.apm_list[0].components_list == ["absorption"] # Only change relative to previous test case. assert multi_apm.apm_list[1].components_list == [] # Test fixing the decay parameter. data_manager.fixed_components = ["decay"] pmg = ParameterManagerGenerator( [data_manager], apm_type=active_parameter_manager, target=ScalingTarget(), mode="consecutive", ) apms = list(pmg.parameter_managers()) assert len(apms) == 2 apm = apms[0] assert isinstance(apm, multi_active_parameter_manager) assert "scale" in apm.components_list assert "decay" not in apm.components_list assert "absorption" not in apm.components_list apm = apms[1] assert isinstance(apm, multi_active_parameter_manager) assert "scale" not in apm.components_list assert "decay" not in apm.components_list assert "absorption" in apm.components_list
[ "def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0", "def test_create_pattern(self):\n pass", "def test_epm_reuse_rf(self):\n scen = Scenario(self.scen_fn, cmd_args={'run_obj':'quality'})\n scen.feature_array = None\n validator = Validator(scen, self.trajectory)\n old_rh = RunHistory(average_cost)\n for config in [e[\"incumbent\"] for e in self.trajectory]:\n old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',\n seed=127)\n self.assertTrue(isinstance(validator.validate_epm(runhistory=old_rh),\n RunHistory))\n self.assertTrue(isinstance(validator.validate_epm(\n output_fn=\"test/test_files/validation/\"),\n RunHistory))\n self.assertRaises(ValueError, validator.validate_epm, reuse_epm=False)", "def test_announce_routes(fib_t0):\n assert(True)", "def test_FinancesAdvicesWorkflow(self):\n def _check_date(item, modified_date, volatile_date):\n '''Check that item modified date was updated.'''\n new_modified_date = item.modified()\n self.assertNotEqual(modified_date, new_modified_date)\n new_volatile_date = get_cachekey_volatile('Products.PloneMeeting.MeetingItem.modified')\n self.assertNotEqual(volatile_date, new_volatile_date)\n return new_modified_date, new_volatile_date\n\n cfg = self.meetingConfig\n self.changeUser('dgen')\n gic1_uid = cfg.getOrderedGroupsInCharge()[0]\n item = self.create('MeetingItem', groupsInCharge=(gic1_uid, ))\n item_uid = item.UID()\n self.assertEqual(self.transitions(item), ['proposeToValidationLevel1'])\n # ask finances advice\n fin_group_uid = finance_group_uid()\n item.setOptionalAdvisers((fin_group_uid + '__rowid__unique_id_002', ))\n item._update_after_edit()\n # advice still not askable, askable as level2 or level3\n self.assertEqual(self.transitions(item),\n ['proposeToValidationLevel1'])\n self.do(item, 'proposeToValidationLevel1')\n self.assertEqual(self.transitions(item),\n ['backToItemCreated', 'proposeToValidationLevel2'])\n self.do(item, 'proposeToValidationLevel2')\n self.assertEqual(self.transitions(item),\n ['backToProposedToValidationLevel1',\n 'proposeToValidationLevel3',\n 'wait_advices_from_proposedToValidationLevel2'])\n self.do(item, 'wait_advices_from_proposedToValidationLevel2')\n # a MeetingManager is able to send back but not a normal user\n self.assertEqual(\n self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices',\n 'backTo_validated_from_waiting_advices'])\n # but another user can not\n self._addPrincipalToGroup('bourgmestre', self.dirgen_creators)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level1reviewers)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level2reviewers)\n self._addPrincipalToGroup('bourgmestre', self.dirgen_level3reviewers)\n self.changeUser('bourgmestre')\n self.assertTrue(self.hasPermission(\"View\", item))\n self.assertEqual(self.transitions(item), [])\n\n # give advice\n self.changeUser('dfin')\n self.assertEqual(self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices'])\n # advice may be taken over\n self.assertTrue(item.adapted().mayTakeOver())\n # advice giveable when item complete\n self.assertFalse(item.adviceIndex[fin_group_uid]['advice_addable'])\n self.assertTrue(item.adapted().mayEvaluateCompleteness())\n # we will check that item modified date is invalidated when advice changed\n # this is responsible for updating collections counter in faceted portlet\n volatile_date = get_cachekey_volatile('Products.PloneMeeting.MeetingItem.modified')\n item_modified = item.modified()\n item.setCompleteness('completeness_complete')\n item._update_after_edit()\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n advice_portal_type = item._advicePortalTypeForAdviser(fin_group_uid)\n advice = self.addAdvice(item,\n advice_group=fin_group_uid,\n advice_type='positive_finance',\n advice_portal_type=advice_portal_type)\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n self.assertTrue(advice.advice_hide_during_redaction)\n self.assertEqual(self.transitions(advice),\n ['proposeToFinancialController'])\n # once advice given but hidden during redaction, item may no more be sent back\n self.assertEqual(self.transitions(item), [])\n # financial controller\n self.do(advice, 'proposeToFinancialController')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToAdviceCreated',\n 'proposeToFinancialEditor'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_controller\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial editor\n self.do(advice, 'proposeToFinancialEditor')\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'proposeToFinancialReviewer'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_editor\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial reviewer\n self.do(advice, 'proposeToFinancialReviewer')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'backToProposedToFinancialEditor',\n 'proposeToFinancialManager'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_reviewer\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # financial manager\n self.do(advice, 'proposeToFinancialManager')\n self.assertEqual(self.transitions(item), [])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialController',\n 'backToProposedToFinancialReviewer',\n 'signFinancialAdvice'])\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_proposed_to_financial_manager\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # sign advice\n self.do(advice, 'signFinancialAdvice')\n self.assertEqual(self.transitions(item),\n ['backTo_proposedToValidationLevel2_from_waiting_advices',\n 'backTo_proposedToValidationLevel3_from_waiting_advices',\n 'backTo_validated_from_waiting_advices'])\n self.assertEqual(self.transitions(advice),\n ['backToProposedToFinancialManager'])\n self.assertFalse(advice.advice_hide_during_redaction)\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_financial_advice_signed\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)\n # validate item\n self.do(item, 'backTo_validated_from_waiting_advices')\n self.assertEqual(item.query_state(), 'validated')\n # indexAdvisers is correctly reindexed\n advice_index_value = \"delay__{0}_advice_given\".format(fin_group_uid)\n self.assertTrue(self.catalog(UID=item_uid, indexAdvisers=[advice_index_value]))\n # item modified date was updated\n item_modified, volatile_date = _check_date(item, item_modified, volatile_date)", "def test_update_pattern(self):\n pass", "def test_multipole_moments(self):\n printcool(\"Test GMX, OpenMM, and TINKER multipole moments using AMBER force field\")\n missing_pkgs = []\n for eng in ['TINKER', 'GMX', 'OpenMM']:\n if eng not in self.engines:\n missing_pkgs.append(eng)\n if len(missing_pkgs) > 0:\n pytest.skip(\"Missing packages: %s\" % ', '.join(missing_pkgs))\n Data = OrderedDict()\n for name, eng in self.engines.items():\n Data[name] = eng.multipole_moments(shot=5, optimize=False)\n datadir = os.path.join(self.cwd, 'files', 'test_engine', self.__class__.__name__)\n if SAVEDATA:\n fout = os.path.join(datadir, 'test_multipole_moments.dipole.dat')\n if not os.path.exists(os.path.dirname(fout)): os.makedirs(os.path.dirname(fout))\n np.savetxt(fout, np.array(list(Data[list(self.engines.keys())[0]]['dipole'].values())))\n fout = os.path.join(datadir, 'test_multipole_moments.quadrupole.dat')\n np.savetxt(fout, np.array(list(Data[list(self.engines.keys())[0]]['quadrupole'].values())))\n RefDip = np.loadtxt(os.path.join(datadir, 'test_multipole_moments.dipole.dat'))\n RefQuad = np.loadtxt(os.path.join(datadir, 'test_multipole_moments.quadrupole.dat'))\n for n1 in self.engines.keys():\n d1 = np.array(list(Data[n1]['dipole'].values()))\n q1 = np.array(list(Data[n1]['quadrupole'].values()))\n np.testing.assert_allclose(d1, RefDip, rtol=0, atol=0.001, err_msg=\"%s dipole moments do not match the reference\" % n1)\n np.testing.assert_allclose(q1, RefQuad, rtol=0, atol=0.001, err_msg=\"%s quadrupole moments do not match the reference\" % n1)", "def test_alchemical_phase_factory_building(self):\n with mmtools.utils.temporary_directory() as tmp_dir:\n template_script = self.get_implicit_template_script(tmp_dir)\n\n # AbsoluteAlchemicalFactory options.\n template_script['options']['alchemical_pme_treatment'] = 'exact'\n\n # Test that options are passed to AlchemicalPhaseFactory correctly.\n exp_builder = ExperimentBuilder(script=template_script)\n for experiment in exp_builder.build_experiments():\n for phase_factory in experiment.phases:\n assert phase_factory.alchemical_factory.alchemical_pme_treatment == 'exact'\n # Overwrite AbsoluteAlchemicalFactory default for disable_alchemical_dispersion_correction.\n assert phase_factory.alchemical_factory.disable_alchemical_dispersion_correction == True", "def test_parallel_gate_different_length(self):\n qc = QuantumCircuit(2, 2)\n qc.x(0)\n qc.x(1)\n qc.measure(0, 0)\n qc.measure(1, 1)\n\n durations = InstructionDurations(\n [(\"x\", [0], 200), (\"x\", [1], 400), (\"measure\", None, 1000)]\n )\n pm = PassManager(ALAPSchedule(durations))\n qc_alap = pm.run(qc)\n\n alap_expected = QuantumCircuit(2, 2)\n alap_expected.delay(200, 0)\n alap_expected.x(0)\n alap_expected.x(1)\n alap_expected.measure(0, 0)\n alap_expected.measure(1, 1)\n\n self.assertEqual(qc_alap, alap_expected)\n\n pm = PassManager(ASAPSchedule(durations))\n qc_asap = pm.run(qc)\n\n asap_expected = QuantumCircuit(2, 2)\n asap_expected.x(0)\n asap_expected.x(1)\n asap_expected.measure(0, 0) # immediately start after X gate\n asap_expected.measure(1, 1)\n asap_expected.delay(200, 0)\n\n self.assertEqual(qc_asap, asap_expected)", "def test_create_flyers_phase2(self):\n # Use a unique date to avoid collision with other tests.\n send_date = datetime.date(2011, 11, 10)\n slots = SLOT_FACTORY.create_slots(create_count=15)\n coupons = SLOT_FACTORY.prepare_slot_coupons_for_flyer(slots, send_date)\n # 12550\n flyer_placement = FlyerPlacement.objects.create(slot=slots[0],\n site=self.site, send_date=send_date)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.zip_type,\n geolocation_id=23181)\n # Dutchess, Westchester\n flyer_placement = FlyerPlacement.objects.create(slot=slots[1],\n site=self.site, send_date=send_date)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.county_type,\n geolocation_id=1844)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.county_type,\n geolocation_id=1890)\n # 12601, 10570\n flyer_placement = FlyerPlacement.objects.create(slot=slots[2],\n site=self.site, send_date=send_date)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.zip_type,\n geolocation_id=16045)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.zip_type,\n geolocation_id=16142)\n # 12518\n flyer_placement = FlyerPlacement.objects.create(slot=slots[3],\n site=self.site, send_date=send_date)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.zip_type,\n geolocation_id=15145)\n # White Plains\n flyer_placement = FlyerPlacement.objects.create(slot=slots[4],\n site=self.site, send_date=send_date)\n FlyerPlacementSubdivision.objects.create(\n flyer_placement=flyer_placement,\n geolocation_type=self.city_type,\n geolocation_id=18258)\n pre_count = Flyer.objects.filter(send_date=send_date).count()\n admin_data = create_flyers_this_site_phase2(site=self.site,\n send_date=send_date, national_coupons=Coupon.objects.none())\n LOG.debug('admin_data: %s' % admin_data)\n self.assertEqual(admin_data[0], \n 'Hudson Valley, [<FlyerSubdivision: 12550>]')\n flyers = Flyer.objects.filter(send_date=send_date)\n LOG.debug([(\n flyer, flyer.flyer_coupons.all()) for flyer in flyers])\n # All of these flyers need at least one FlyerSubdivision.\n self.assertFalse(flyers.annotate(sub_count=Count(\n 'flyer_subdivisions')).filter(sub_count=0).count())\n LOG.debug([(flyer, flyer.flyer_subdivisions.all()) for flyer in flyers])\n self.assertEqual(flyers.count(), pre_count + 6)\n # Assert this flyer has correct paid coupon, extra coupons, and goes to \n # zip 12550.\n flyer = flyers.get(flyer_subdivisions__geolocation_id=23181)\n self.assertEqual(flyer.flyer_coupons.count(), 10)\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[0]).count())\n self.assertEqual(flyer.flyer_subdivisions.count(), 1)\n # Assert this flyer has another paid coupon too, and goes to zip 12601.\n # (10570 is a subset of Westchester and 12601 is a subset of Dutchess.)\n flyer = flyers.get(flyer_subdivisions__geolocation_id=16045)\n self.assertEqual(flyer.flyer_coupons.count(), 10)\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[1]).count())\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[2]).count())\n self.assertEqual(flyer.flyer_subdivisions.count(), 2)\n # Assert this flyer has the paid coupon and goes to 12518.\n flyer = flyers.get(flyer_subdivisions__geolocation_id=15145)\n self.assertEqual(flyer.flyer_subdivisions.count(), 1)\n self.assertEqual(flyer.flyer_coupons.count(), 10)\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[3]).count())\n # Assert this flyer for remaining zips of Dutchess.\n flyer = flyers.get(flyer_subdivisions__geolocation_id=30218)\n self.assertTrue(flyer.flyer_subdivisions.filter(\n geolocation_id=31367, geolocation_type__model='uszip').count())\n # Assert Westchester gets two flyers: one for 10570 and one without.\n # White Plains zip remains with the original Westchester flyer.\n flyer = flyers.get(flyer_subdivisions__geolocation_id=18258)\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[4]).count())\n # This Pleasantville zip, 10570, has a flyer with an extra coupon.\n flyer = flyers.get(flyer_subdivisions__geolocation_id=16142)\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[2]).count())\n self.assertTrue(flyer.flyer_coupons.filter(coupon=coupons[1]).count())", "def test_patch_tam_advisory_instance(self):\n pass", "def test_agent_infer_states(self):\n\n ''' VANILLA method (fixed point iteration) with one hidden state factor and one observation modality '''\n num_obs = [5]\n num_states = [3]\n num_controls = [1]\n A = utils.random_A_matrix(num_obs, num_states)\n B = utils.random_B_matrix(num_states, num_controls)\n\n agent = Agent(A=A, B=B, inference_algo = \"VANILLA\")\n\n o = tuple([np.random.randint(obs_dim) for obs_dim in num_obs])\n qs_out = agent.infer_states(o)\n\n qs_validation = inference.update_posterior_states(A, o, prior=agent.D)\n\n for f in range(len(num_states)):\n self.assertTrue(np.isclose(qs_validation[f], qs_out[f]).all())\n\n ''' VANILLA method (fixed point iteration) with multiple hidden state factors and multiple observation modalities '''\n num_obs = [2, 4]\n num_states = [2, 3]\n num_controls = [2, 3]\n A = utils.random_A_matrix(num_obs, num_states)\n B = utils.random_B_matrix(num_states, num_controls)\n\n agent = Agent(A=A, B=B, inference_algo = \"VANILLA\")\n\n o = tuple([np.random.randint(obs_dim) for obs_dim in num_obs])\n qs_out = agent.infer_states(o)\n\n qs_validation = inference.update_posterior_states(A, o, prior=agent.D)\n\n for f in range(len(num_states)):\n self.assertTrue(np.isclose(qs_validation[f], qs_out[f]).all())\n\n ''' Marginal message passing inference with multiple hidden state factors and multiple observation modalities '''\n num_obs = [5]\n num_states = [3]\n num_controls = [1]\n A = utils.random_A_matrix(num_obs, num_states)\n B = utils.random_B_matrix(num_states, num_controls)\n\n agent = Agent(A=A, B=B, inference_algo = \"MMP\")\n\n o = tuple([np.random.randint(obs_dim) for obs_dim in num_obs])\n qs_pi_out = agent.infer_states(o)\n\n policies = control.construct_policies(num_states, num_controls, policy_len = 1)\n\n qs_pi_validation, _ = inference.update_posterior_states_v2(A, B, [o], policies, prior = agent.D, policy_sep_prior = False)\n\n for p_idx in range(len(policies)):\n for f in range(len(num_states)):\n self.assertTrue(np.isclose(qs_pi_validation[p_idx][0][f], qs_pi_out[p_idx][0][f]).all())\n\n ''' Marginal message passing inference with multiple hidden state factors and multiple observation modalities '''\n num_obs = [2, 4]\n num_states = [2, 2]\n num_controls = [2, 2]\n A = utils.random_A_matrix(num_obs, num_states)\n B = utils.random_B_matrix(num_states, num_controls) \n\n planning_horizon = 3\n backwards_horizon = 1\n agent = Agent(A=A, B=B, inference_algo=\"MMP\", policy_len=planning_horizon, inference_horizon=backwards_horizon)\n o = [0, 2]\n qs_pi_out = agent.infer_states(o)\n\n policies = control.construct_policies(num_states, num_controls, policy_len = planning_horizon)\n\n qs_pi_validation, _ = inference.update_posterior_states_v2(A, B, [o], policies, prior = agent.D, policy_sep_prior = False)\n\n for p_idx in range(len(policies)):\n for t in range(planning_horizon+backwards_horizon):\n for f in range(len(num_states)):\n self.assertTrue(np.isclose(qs_pi_validation[p_idx][t][f], qs_pi_out[p_idx][t][f]).all())", "def run(self):\n\n from dials.algorithms.refinement.refiner import phil_scope\n params = phil_scope.fetch(source=phil.parse('')).extract()\n\n # disable outlier rejection for speed of refiner construction\n params.refinement.reflections.outlier.algorithm='null'\n\n refiner = RefinerFactory.from_parameters_data_experiments(params,\n self._reflections, self._experiments)\n\n d1 = self._experiments[0].detector\n d2 = refiner.get_experiments()[0].detector\n\n assert d1.is_similar_to(d2)\n print \"OK\"\n return", "def test_multipole_moments(self):\n printcool(\"Testing OpenMM and TINKER multipole moments with AMOEBA\")\n if not hasattr(self, 'T'):\n pytest.skip(\"TINKER programs are not in the PATH.\")\n MO = self.O.multipole_moments(optimize=False)\n DO = np.array(list(MO['dipole'].values()))\n QO = np.array(list(MO['quadrupole'].values()))\n MT = self.T.multipole_moments(optimize=False)\n DT = np.array(list(MT['dipole'].values()))\n QT = np.array(list(MT['quadrupole'].values()))\n datadir = os.path.join(self.cwd, 'files', 'test_engine', self.__class__.__name__)\n if SAVEDATA:\n fout = os.path.join(datadir, 'test_multipole_moments.dipole.dat')\n if not os.path.exists(os.path.dirname(fout)): os.makedirs(os.path.dirname(fout))\n np.savetxt(fout, DT)\n fout = os.path.join(datadir, 'test_multipole_moments.quadrupole.dat')\n np.savetxt(fout, QT)\n DR = np.loadtxt(os.path.join(datadir, 'test_multipole_moments.dipole.dat'))\n QR = np.loadtxt(os.path.join(datadir, 'test_multipole_moments.quadrupole.dat'))\n #self.logger.debug(\">ASSERT OpenMM and TINKER Engines give the correct dipole\\n\")\n np.testing.assert_allclose(DO, DR,\n err_msg=\"OpenMM dipoles do not match the reference\", rtol=0, atol=0.001)\n np.testing.assert_allclose(DT, DR,\n err_msg=\"TINKER dipoles do not match the reference\", rtol=0, atol=0.001)\n #self.logger.debug(\">ASSERT OpenMM and TINKER Engines give the correct quadrupole\\n\")\n np.testing.assert_allclose(QO, QR,\n err_msg=\"OpenMM quadrupoles do not match the reference\", rtol=0, atol=0.001)\n np.testing.assert_allclose(QT, QR,\n err_msg=\"TINKER quadrupoles do not match the reference\", rtol=0, atol=0.001)", "def test_second_matches(self):\n processor = MultiaggregationProcessor()\n processor.setup(\"test\", {\n \"ruleset\" : CFG_FILE,\n \"aggregator_class\" : MockAggregator,\n \"aggregate_on_clear\" : True\n })\n \n processor.aggregators[1].state = \"AGGR\"\n self.assertEqual(processor.process(\"test\"), \"AGGR\")", "def test_multiple_factories(self, mocker):\n sdk_ready_flag = threading.Event()\n\n def _init(self, ready_flag, some, auth_api, streaming_enabled, telemetry_runtime_producer, telemetry_init_consumer, sse_url=None):\n self._ready_flag = ready_flag\n self._synchronizer = mocker.Mock(spec=Synchronizer)\n self._streaming_enabled = False\n self._telemetry_runtime_producer = telemetry_runtime_producer\n self._telemetry_init_consumer = telemetry_init_consumer\n mocker.patch('splitio.sync.manager.Manager.__init__', new=_init)\n\n def _start(self, *args, **kwargs):\n sdk_ready_flag.set()\n mocker.patch('splitio.sync.manager.Manager.start', new=_start)\n\n def _stop(self, *args, **kwargs):\n pass\n mocker.patch('splitio.sync.manager.Manager.stop', new=_stop)\n\n mockManager = Manager(sdk_ready_flag, mocker.Mock(), mocker.Mock(), False, mocker.Mock(), mocker.Mock())\n\n def _make_factory_with_apikey(apikey, *_, **__):\n return SplitFactory(apikey, {}, True, mocker.Mock(spec=ImpressionsManager), mockManager, mocker.Mock(), mocker.Mock(), mocker.Mock())\n\n factory_module_logger = mocker.Mock()\n build_in_memory = mocker.Mock()\n build_in_memory.side_effect = _make_factory_with_apikey\n build_redis = mocker.Mock()\n build_redis.side_effect = _make_factory_with_apikey\n build_localhost = mocker.Mock()\n build_localhost.side_effect = _make_factory_with_apikey\n mocker.patch('splitio.client.factory._LOGGER', new=factory_module_logger)\n mocker.patch('splitio.client.factory._build_in_memory_factory', new=build_in_memory)\n mocker.patch('splitio.client.factory._build_redis_factory', new=build_redis)\n mocker.patch('splitio.client.factory._build_localhost_factory', new=build_localhost)\n\n _INSTANTIATED_FACTORIES.clear() # Clear all factory counters for testing purposes\n\n factory1 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == []\n\n factory2 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 1,\n 'factory'\n )]\n\n factory_module_logger.reset_mock()\n factory3 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 2,\n 'factories'\n )]\n\n factory_module_logger.reset_mock()\n factory4 = get_factory('some_other_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have an instance of the Split factory. \"\n \"Make sure you definitely want this additional instance. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\"\n )]\n\n event = threading.Event()\n factory1.destroy(event)\n event.wait()\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n factory2.destroy()\n factory3.destroy()\n factory4.destroy()", "def test_update_meal_plan_free(self):\n pass", "def test_bare_pass_manager_multiple(self):\n qc0 = QuantumCircuit(1)\n qc1 = QuantumCircuit(2)\n\n pm = PassManager([])\n result = pm.run([qc0, qc1])\n\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 2)\n\n for qc, new_qc in zip([qc0, qc1], result):\n self.assertIsInstance(new_qc, QuantumCircuit)\n self.assertEqual(new_qc, qc) # pm has no passes", "def test_search_meal_plan_free(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test the scalingspecific parameter manager.
def test_scaling_active_parameter_manager(): components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(2)} scaling_apm = scaling_active_parameter_manager(components_2, ["1"]) assert list(scaling_apm.constant_g_values[0]) == list( components_2["2"].calculate_scales() ) assert len(scaling_apm.constant_g_values) == 1 assert scaling_apm.n_obs == [2] # Test that no constant_g_values if both components selected scaling_apm = scaling_active_parameter_manager(components_2, ["1", "2"]) assert scaling_apm.constant_g_values is None # Check that one can't initialise with an unequal number of reflections, # either within the selection or overall. with pytest.raises(AssertionError): components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(1)} scaling_apm = scaling_active_parameter_manager(components_2, ["1", "2"]) with pytest.raises(AssertionError): components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(1)} scaling_apm = scaling_active_parameter_manager(components_2, ["1"]) data_manager = mock_data_manager(components_2) pmg = ScalingParameterManagerGenerator( [data_manager], target=ScalingTarget(), mode="concurrent" ) assert isinstance(pmg.apm_type, type(scaling_active_parameter_manager))
[ "def test_get_measure_parameters(self):\n pass", "def test_parameters(self):\n self.assert_initialize_driver()\n #reply = self.driver_client.cmd_dvr('get_resource', Parameter.ALL)\n #self.assert_driver_parameters(reply, verify_sample_interval=True)", "def testingParameters(cal_file = None):\n params = parameters.ParametersSpliner()\n\n params.setAttr(\"max_frame\", \"int\", -1) \n params.setAttr(\"start_frame\", \"int\", -1) \n\n params.setAttr(\"background_sigma\", \"float\", 8.0)\n\n if cal_file is not None:\n params.setAttr(\"camera_calibration\", \"filename\", cal_file)\n else:\n params.setAttr(\"camera_gain\", \"float\", settings.camera_gain)\n params.setAttr(\"camera_offset\", \"float\", settings.camera_offset)\n\n params.setAttr(\"find_max_radius\", \"int\", 5)\n params.setAttr(\"fit_error_model\", \"string\", settings.fit_error_model)\n params.setAttr(\"iterations\", \"int\", settings.iterations)\n params.setAttr(\"no_fitting\", \"int\", 0)\n params.setAttr(\"pixel_size\", \"float\", settings.pixel_size)\n params.setAttr(\"sigma\", \"float\", 1.5)\n params.setAttr(\"spline\", \"filename\", \"psf.spline\")\n params.setAttr(\"threshold\", \"float\", 6.0)\n\n # Don't do tracking.\n params.setAttr(\"descriptor\", \"string\", \"1\")\n params.setAttr(\"radius\", \"float\", \"0.0\")\n\n # Don't do drift-correction.\n params.setAttr(\"d_scale\", \"int\", 2)\n params.setAttr(\"drift_correction\", \"int\", 0)\n params.setAttr(\"frame_step\", \"int\", 500)\n params.setAttr(\"z_correction\", \"int\", 0)\n\n # 'peak_locations' testing.\n if hasattr(settings, \"peak_locations\") and (settings.peak_locations is not None):\n params.setAttr(\"peak_locations\", \"filename\", settings.peak_locations)\n \n return params", "def test_change_parameter(self, generate_gaussian, generate_delta):\n\n # Get a reference to the gaussian and delta peaks\n gauss, delta = generate_gaussian, generate_delta\n\n # Assert that we can change the parameter limits correctly\n gauss.set_parameter_limit('A', [100, 200])\n delta.set_parameter_limit('M', [1000, 5000])\n\n assert gauss.limits['A'] == sorted([100, 200])\n assert delta.limits['M'] == sorted([1000, 5000])", "def test_biot_parameter_scaling(**kw):\n _this_file = Path(os.path.abspath(__file__)).parent\n _results_path = _this_file / \"results/test_biot_parameter_scaling/default\"\n _results_path.mkdir(parents=True, exist_ok=True) # Create path if not exists\n __setup_logging(_results_path)\n logger.info(f\"Path to results: {_results_path}\")\n\n # --- DOMAIN ARGUMENTS ---\n params = {\n 'mesh_args':\n {'mesh_size_frac': 10, 'mesh_size_min': .1*10, 'mesh_size_bound': 6*10},\n 'bounding_box':\n {'xmin': -20, 'xmax': 80, 'ymin': 50, 'ymax': 150, 'zmin': -25, 'zmax': 75},\n 'shearzone_names':\n [\"S1_1\", \"S1_2\", \"S1_3\", \"S3_1\", \"S3_2\"],\n 'folder_name':\n _results_path,\n 'solver':\n 'direct',\n 'stress':\n gts.isc_modelling.stress_tensor(),\n 'source_scalar_borehole_shearzone':\n {'borehole': 'INJ1', 'shearzone': 'S1_1'},\n # Initially: We calculate unscaled variables.\n 'length_scale':\n 1,\n 'scalar_scale':\n 1,\n }\n setup = gts.ContactMechanicsBiotISC(params)\n setup.create_grid()\n setup.well_cells()\n setup.set_parameters()\n\n # Save copies of the original data on the 3D grid\n gb = setup.gb\n g = gb.grids_of_dimension(3)[0]\n data = gb.node_props(g)\n mech_params = data['parameters']['mechanics']\n flow_params = data['parameters']['flow']\n mech = {\n 'bc_values':\n mech_params['bc_values'].copy(),\n 'source':\n mech_params['source'].copy(),\n 'fourth_order_tensor':\n mech_params['fourth_order_tensor'].copy(),\n 'biot_alpha':\n mech_params['biot_alpha'], # Float\n }\n\n flow = {\n 'bc_values':\n flow_params['bc_values'].copy(),\n 'mass_weight':\n flow_params['mass_weight'], # Float\n 'source':\n flow_params['source'].copy(),\n 'second_order_tensor':\n flow_params['second_order_tensor'].copy(),\n 'biot_alpha':\n flow_params['biot_alpha'], # Float\n }\n\n # Scale grid:\n setup.scalar_scale = kw.get('ss', 1 * pp.GIGA)\n setup.length_scale = kw.get('ls', 100)\n setup.create_grid(overwrite_grid=True)\n ss = setup.scalar_scale\n ls = setup.length_scale\n\n # Recompute parameters\n setup.prepare_simulation()\n\n # Mimic NewtonSolver:\n setup.before_newton_iteration()\n\n # Check size of entries in matrix A.\n A, b = setup.assembler.assemble_matrix_rhs()\n logger.info(\"------------------------------------------\")\n logger.info(f\"Max element in A {np.max(np.abs(A)):.2e}\")\n logger.info(f\"Max {np.max(np.sum(np.abs(A), axis=1)):.2e} and min {np.min(np.sum(np.abs(A), axis=1)):.2e} A sum.\")\n\n # Find the new parameter dictionaries\n dim = 3\n gb = setup.gb\n g = gb.grids_of_dimension(dim)[0]\n data = gb.node_props(g)\n scaled_mech = data['parameters']['mechanics']\n scaled_flow = data['parameters']['flow']\n all_bf, east, west, north, south, top, bottom = setup.domain_boundary_sides(g)\n\n # --- Do the comparisons: ---\n test_cell = 0\n logger.info(f\"scalar_scale={setup.scalar_scale:.2e}. length_scale={setup.length_scale:.2e}\")\n # - FLOW -\n # Permeability [m2]\n # k_scl = k * scalar_scale / length_scale ** 2\n k = flow['second_order_tensor'].values[0, 0, :]\n k_scl = scaled_flow['second_order_tensor'].values[0, 0, :]\n logger.info(f\"unscaled k/mu={k[test_cell]:.2e}\")\n logger.info(f\"Scaled k/mu={k_scl[test_cell]:.2e}\")\n assert np.allclose(k * ss / ls ** 2, k_scl), \"k_scl = k * scalar_scale / length_scale ** 2\"\n\n # Mass weight / Effective Storage term (possibly aperture scaled) [1/Pa]\n # mw_scl = mw * scalar_scale\n mw = flow['mass_weight']\n mw_scl = scaled_flow['mass_weight']\n logger.info(f'mass_weight={mw:.2e}')\n logger.info(f'mass_weight scaled={mw_scl:.2e}')\n assert np.allclose(mw * ss, mw_scl), \"mw_scl = mw * scalar_scale\"\n\n # Source [m ** dim / s]\n # fs_scl = fs * length_scale ** dim\n fs = flow['source'][test_cell]\n fs_scl = scaled_flow['source'][test_cell]\n logger.info(f\"Unscaled flow source={fs:.2e}\")\n logger.info(f\"Scaled flow source={fs_scl:.2e}\")\n assert np.allclose(fs * ls ** dim, fs), \"fs_scl = fs * length_scale ** dim\"\n\n # Boundary conditions (FLOW)\n # Dirchlet [Pa]\n # fd_scl = fd / scalar_scale\n fd = flow['bc_values']\n fd_scl = scaled_flow['bc_values']\n logger.info(f\"bc flow={fd[all_bf][0]:.2e}\")\n logger.info(f\"bc flow scaled={fd_scl[all_bf][0]:.2e}\")\n assert np.allclose(fd / ss, fd_scl), \"fd_scl = fd / scalar_scale\"\n # Neumann [m2 / s] (integrated across 2D surfaces)\n\n\n # Biot alpha should remain unchanged\n assert flow['biot_alpha'] == scaled_flow['biot_alpha']\n\n # - MECHANICS -\n # Mu and Lambda [Pa]\n # m_scl = m / scalar_scale AND lm_scl = lm / scalar_scale\n mu = mech['fourth_order_tensor'].mu\n mu_scl = scaled_mech['fourth_order_tensor'].mu\n lmbda = mech['fourth_order_tensor'].lmbda\n lmbda_scl = scaled_mech['fourth_order_tensor'].lmbda\n\n logger.info(f\"mu={mu[test_cell]:.2e}. mu scaled = {mu_scl[test_cell]:.2e}\")\n logger.info(f\"lambda={lmbda[test_cell]:.2e}. lambda scaled={lmbda_scl[test_cell]:.2e}\")\n assert np.allclose(mu / ss, mu_scl)\n assert np.allclose(lmbda / ss, lmbda_scl)\n\n # Mechanics source [Pa m2] (integrated over 3D volume)\n # In the code: ms_scl = ms * length_scale / scalar_scale (assuming integration is scaled)\n # ms_scl = ms / (scalar_scale * length_scale ** 2)\n ms = mech['source'].reshape((3, -1), order='F')\n ms_scl = scaled_mech['source'].reshape((3, -1), order='F')\n logger.info(f\"Mechanics source={ms[:, test_cell]}\")\n logger.info(f\"Mechanics source scaled={ms_scl[:, test_cell]}\")\n assert np.allclose(ms / (ss * ls ** 2), ms_scl)\n\n\n # Boundary conditions (MECHANICS)\n # Neumann [Pa m2] (integrated across 2D surfaces)\n # Note: In the code, we divide by scalar_scale. length_scale**2 is incorporated by pre-scaled grid.\n # mn_scl = mn / ( scalar_scale * length_scale**(dim-1) )\n mn = mech['bc_values'].reshape((3, -1), order='F')\n mn_scl = scaled_mech['bc_values'].reshape((3, -1), order='F')\n logger.info(f\"mech neumann (3 faces on east) =\\n{mn[:, east][:, :3]}\")\n logger.info(f\"mech neumann scaled (3 faces on east) =\\n{mn_scl[:, east][:, :3]}\")\n assert np.allclose(mn[:, all_bf] / (ss * ls ** (dim - 1)), mn_scl[:, all_bf])\n\n return setup, mech, flow, scaled_mech, scaled_flow", "def scale_mode():\r\n pass", "def is_scale_enabled(self) -> bool:\r\n ...", "def test_param_changer(self):\n land = Landscape()\n landscape = \"J\"\n new_param = {\"f_max\": 700}\n land.param_changer(landscape, new_param)\n assert land.landscape_parameters[landscape][\"f_max\"] == new_param[\"f_max\"]", "async def test_floating_point_scale(hass, mock_hub):\n register_config = {\n CONF_COUNT: 1,\n CONF_DATA_TYPE: DATA_TYPE_INT,\n CONF_SCALE: 2.4,\n CONF_OFFSET: 0,\n CONF_PRECISION: 2,\n }\n await run_test(\n hass,\n mock_hub,\n register_config,\n SENSOR_DOMAIN,\n register_words=[1],\n expected=\"2.40\",\n )", "def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0", "def test_trans_parameters(spectrum_inst):\n # test initial parameters\n assert spectrum_inst.freq0 == 0\n assert spectrum_inst.wSize == 5\n scales = len(spectrum_inst.scales)\n assert scales == 48 # set for the specific spectrum\n mod = spectrum_inst.modify_parameters\n mod(48, 0)\n assert spectrum_inst.freq0 == 47\n assert spectrum_inst.wSize == 0\n mod(48, 10)\n assert spectrum_inst.freq0 == 47\n assert spectrum_inst.wSize == 0\n mod(48, 1)\n assert spectrum_inst.freq0 == 47\n assert spectrum_inst.wSize == 0\n mod(47, 1)\n assert spectrum_inst.freq0 == 47\n assert spectrum_inst.wSize == 0\n mod(46, 2)\n assert spectrum_inst.freq0 == 46\n assert spectrum_inst.wSize == 1\n mod(0, 48)\n assert spectrum_inst.freq0 == 0\n assert spectrum_inst.wSize == 47\n mod(0, 47)\n assert spectrum_inst.freq0 == 0\n assert spectrum_inst.wSize == 47\n mod(1, 47)\n assert spectrum_inst.freq0 == 1\n assert spectrum_inst.wSize == 46", "def _validate_params(self, request_set, target_set=None, context=None):\n\n # Perform first-pass validation in Function.__init__():\n # - returns full set of params based on subclass paramClassDefaults\n super(Mechanism, self)._validate_params(request_set,target_set,context)\n\n params = target_set\n\n #region VALIDATE TIME SCALE\n try:\n param_value = params[TIME_SCALE]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n self.timeScale = timeScaleSystemDefault\n else:\n if isinstance(param_value, TimeScale):\n self.timeScale = params[TIME_SCALE]\n else:\n if self.prefs.verbosePref:\n print(\"Value for {0} ({1}) param of {2} must be of type {3}; default will be used: {4}\".\n format(TIME_SCALE, param_value, self.name, type(TimeScale), timeScaleSystemDefault))\n #endregion\n\n #region VALIDATE INPUT STATE(S)\n\n # MODIFIED 6/10/16\n # FIX: SHOULD CHECK LENGTH OF INPUT_STATES PARAM (LIST OF NAMES OR SPECIFICATION DICT) AGAINST LENGTH OF\n # FIX: self.variable 2D ARRAY AND COMPARE variable SPECS, IF PROVIDED, WITH CORRESPONDING ELEMENTS OF\n # FIX: self.variable 2D ARRAY\n try:\n param_value = params[INPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # INPUT_STATES not specified:\n # - set to None, so that it is set to default (self.variable) in instantiate_inputState\n # - if in VERBOSE mode, warn in instantiate_inputState, where default value is known\n params[INPUT_STATES] = None\n\n else:\n # INPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_inputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n # Note:\n # * number of inputStates is validated against length of the owner mechanism's execute method variable (EMV)\n # in instantiate_inputState, where an inputState is assigned to each item (value) of the EMV\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.InputState import InputState\n # If not valid...\n if not ((isclass(item) and (issubclass(item, InputState) or # InputState class ref\n issubclass(item, Projection))) or # Project class ref\n isinstance(item, InputState) or # InputState object\n isinstance(item, dict) or # InputState specification dict\n isinstance(item, ParamValueProjection) or # ParamValueProjection tuple\n isinstance(item, str) or # Name (to be used as key in inputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.variable) in instantiate_inputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" InputState, specification dict or value, nor a list of dict of them; \"\n \"variable ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n INPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.variable,\n self.execute.__self__.name))\n i += 1\n params[INPUT_STATES] = param_value\n #endregion\n\n #region VALIDATE EXECUTE METHOD PARAMS\n try:\n function_param_specs = params[FUNCTION_PARAMS]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n elif self.prefs.verbosePref:\n print(\"No params specified for {0}\".format(self.__class__.__name__))\n else:\n if not (isinstance(function_param_specs, dict)):\n raise MechanismError(\"{0} in {1} must be a dict of param specifications\".\n format(FUNCTION_PARAMS, self.__class__.__name__))\n # Validate params\n from PsyNeuLink.Components.States.ParameterState import ParameterState\n for param_name, param_value in function_param_specs.items():\n try:\n default_value = self.paramInstanceDefaults[FUNCTION_PARAMS][param_name]\n except KeyError:\n raise MechanismError(\"{0} not recognized as a param of execute method for {1}\".\n format(param_name, self.__class__.__name__))\n if not ((isclass(param_value) and\n (issubclass(param_value, ParameterState) or\n issubclass(param_value, Projection))) or\n isinstance(param_value, ParameterState) or\n isinstance(param_value, Projection) or\n isinstance(param_value, dict) or\n isinstance(param_value, ParamValueProjection) or\n iscompatible(param_value, default_value)):\n params[FUNCTION_PARAMS][param_name] = default_value\n if self.prefs.verbosePref:\n print(\"{0} param ({1}) for execute method {2} of {3} is not a ParameterState, \"\n \"projection, ParamValueProjection, or value; default value ({4}) will be used\".\n format(param_name,\n param_value,\n self.execute.__self__.componentName,\n self.__class__.__name__,\n default_value))\n #endregion\n # FIX: MAKE SURE OUTPUT OF EXECUTE FUNCTION / SELF.VALUE IS 2D ARRAY, WITH LENGTH == NUM OUTPUT STATES\n\n #region VALIDATE OUTPUT STATE(S)\n\n # FIX: MAKE SURE # OF OUTPUTS == LENGTH OF OUTPUT OF EXECUTE FUNCTION / SELF.VALUE\n try:\n param_value = params[OUTPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # OUTPUT_STATES not specified:\n # - set to None, so that it is set to default (self.value) in instantiate_outputState\n # Notes:\n # * if in VERBOSE mode, warning will be issued in instantiate_outputState, where default value is known\n # * number of outputStates is validated against length of owner mechanism's execute method output (EMO)\n # in instantiate_outputState, where an outputState is assigned to each item (value) of the EMO\n params[OUTPUT_STATES] = None\n\n else:\n # OUTPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_outputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.OutputState import OutputState\n # If not valid...\n if not ((isclass(item) and issubclass(item, OutputState)) or # OutputState class ref\n isinstance(item, OutputState) or # OutputState object\n isinstance(item, dict) or # OutputState specification dict\n isinstance(item, str) or # Name (to be used as key in outputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.value) in instantiate_outputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" OutputState, specification dict or value, nor a list of dict of them; \"\n \"output ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n OUTPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.value,\n self.execute.__self__.name))\n i += 1\n params[OUTPUT_STATES] = param_value", "def test_get_measure_parameters_by_id(self):\n pass", "def testGPUSettings(self):\n workload = newWorkload(\"UnitTests\")\n task = workload.newTask(\"CMSSWTemplate\")\n stepHelper = task.makeStep(\"TemplateTest\")\n step = stepHelper.data\n template = CMSSWTemplate()\n template(step)\n\n helper = template.helper(step)\n\n self.assertEqual(helper.getGPURequired(), \"forbidden\")\n self.assertIsNone(helper.getGPURequirements())\n helper.setGPUSettings(\"optional\", \"test 1 2 3\")\n self.assertEqual(helper.getGPURequired(), \"optional\")\n self.assertItemsEqual(helper.getGPURequirements(), \"test 1 2 3\")\n helper.setGPUSettings(\"required\", {\"key1\": \"value1\", \"key2\": \"value2\"})\n self.assertEqual(helper.getGPURequired(), \"required\")\n self.assertItemsEqual(helper.getGPURequirements(), {\"key1\": \"value1\", \"key2\": \"value2\"})", "def test_test_group_parameters(self):\n pass", "def validator_scale():\n scale = request.params.get('scale')\n if scale is None:\n return False\n else: \n try:\n scale = int(scale)\n except ValueError:\n return False\n c.scale = scale\n return True", "def test_scaling_rules_2():\n adp = Mock(require_backward_grad_sync=True)\n opm = Mock(param_groups=[1, 0, 2, -1])\n gns = Mock(optimizer=opm)\n adp.gns = gns\n linearscale = LinearScale()\n linearscale.initialize(adp, opm)\n input_scales = [0.5, 1, 2, 4, 10]\n expected_ans = [0.5, 1., 2., 4., 10.]\n for scale, ans in zip(input_scales, expected_ans):\n np.testing.assert_equal(linearscale.scale_lr(scale), ans)", "def describe_scaling_parameters(DomainName=None):\n pass", "def test_set_landscape_parameters():\n land = bl.Landscape()\n new_parameters = {\"f_max\": 150}\n land.set_landscape_parameters(new_parameters=new_parameters)\n for key in new_parameters.keys():\n assert new_parameters[key] >= 0" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Receive a request from the worker work_socket receive a request on this socket timeout if request isn't received by the timeout, raise six.moves.queue.Empty default = blocks forever This polls on both the worker and up_queue sockets and will throw an exception if there is anything available on the upqueue as this indicates that nothing is running.
def recv(self, work_socket, timeout=None): poller = zmq.Poller() poller.register(self.up_queue_recv_socket, zmq.POLLIN) poller.register(work_socket, zmq.POLLIN) for socket, state in poller.poll(timeout): if socket == self.up_queue_recv_socket and state == zmq.POLLIN: result, e = self.up_queue.get() if e is not None: raise e else: raise cellprofiler_core.pipeline.event.CancelledException( "Unexpected exit during recv" ) if socket == work_socket and state == zmq.POLLIN: return cellprofiler_core.utilities.zmq.communicable.Communicable.recv( work_socket ) raise six.moves.queue.Empty
[ "def handle_request(self):\n # Support people who used socket.settimeout() to escape\n # handle_request before self.timeout was available.\n timeout = self.socket.gettimeout()\n if timeout is None:\n timeout = self.timeout\n elif self.timeout is not None:\n timeout = min(timeout, self.timeout)\n if timeout is not None:\n deadline = time() + timeout\n\n # Wait until a request arrives or the timeout expires - the loop is\n # necessary to accommodate early wakeups due to EINTR.\n with _ServerSelector() as selector:\n selector.register(self, selectors.EVENT_READ)\n\n while True:\n ready = selector.select(timeout)\n if ready:\n return self._handle_request_noblock()\n else:\n if timeout is not None:\n timeout = deadline - time()\n if timeout < 0:\n return self.handle_timeout()", "def read_worker(self):\n while True:\n if self.__connection.poll():\n msg = self.__connection.recv()\n self.__read_queue.put(msg)", "def listen_to_workers(self):\n while self.status != 'closed':\n # Wait on request\n try:\n if not self.to_workers.poll(100):\n continue\n except zmq.ZMQError:\n break\n\n with logerrors():\n address, header, payload = self.to_workers.recv_multipart()\n header = pickle.loads(header)\n if 'address' not in header:\n header['address'] = address\n log(self.address, 'Receive job from worker', address, header)\n\n try:\n function = self.worker_functions[header['function']]\n except KeyError:\n log(self.address, 'Unknown function', header)\n else:\n future = self.pool.apply_async(function, args=(header, payload))", "def _recv(self) -> None:\n if not self.connected or now() < self.next_poll:\n return\n self.next_poll += self.poll_interval\n data = []\n while True:\n try:\n data.append(self.endpoint.recv(BUFFSIZE))\n except BlockingIOError:\n break\n if data:\n stream = io.BytesIO(b\"\".join(data))\n while True:\n try:\n info = pickle.load(stream)\n msg = Message(*info)\n self.inq.append(msg)\n except EOFError:\n break", "def recv(self, pollTimeout = 500):\n # Check backlog \n with self.lock:\n if not self.backlog.empty():\n msg = self.backlog.get()\n else:\n nmsg = self.sock.poll(pollTimeout)\n if nmsg == 0:\n msg = None\n return msg\n # Process the oldest messages in the backlog first\n for i in range(nmsg):\n self.backlog.put(self.sock.recv_string())\n msg = self.backlog.get()\n\n return msg", "def run_forever(self):\n try:\n while True:\n # blocking untill a task comes available\n # timeout specified, else Keyboard interupts are ignored\n self.log.info(\"Waiting for new tasks....\")\n\n while True:\n try:\n task = self.queue.get(timeout=1)\n # if no item is returned, the Empty exception is\n # triggered, thus break statement is not reached\n break\n\n except queue.Empty:\n pass\n\n except Exception as e:\n self.log.debug(e)\n\n # if task comes available, attempt to execute it\n try:\n self.__start_task(task)\n except Exception as e:\n self.log.exception(e)\n\n except KeyboardInterrupt:\n self.log.debug(\"Caught a keyboard interupt, shutting down...\")\n self.socketIO.disconnect()\n sys.exit()", "def _thread_worker(self):\n while self._running:\n # Retrieve next cmd, or block\n packet = self._queue.get(True)\n if isinstance(packet, dict) and QS_CMD in packet:\n try:\n self._callback_listen(packet)\n except Exception as err: # pylint: disable=broad-except\n _LOGGER.error(\"Exception in callback\\nType: %s: %s\",\n type(err), err)\n self._queue.task_done()", "def run(self):\n while True:\n # Check to see if we should stop\n if self._stop.isSet():\n logger.debug(\"Worker thread stopping.\")\n break\n\n # Try to pull from the queue\n try:\n func, args, kwargs = self.queue.get_nowait()\n func(*args, **kwargs)\n except Queue.Empty:\n time.sleep(5)\n continue\n except Exception as e:\n logger.exception(e)", "def work():\n with rq.Connection(create_connection()):\n worker = rq.Worker(list(map(rq.Queue, listen)))\n worker.work()", "def receive(self, request_id, timeout=None):\n res = None\n start_time = time.time()\n while res is None:\n with self.connlock:\n res = self.conn.do_receive(request_id)\n if res is None:\n time.sleep(0.1)\n if timeout and (time.time() - start_time > timeout):\n raise RequestTimeout(request_id)\n\n if 'Error' in res:\n raise ServerError(res['Error'], res)\n\n try:\n return res['Response']\n except:\n raise BadResponseError(\"Failed to parse response: {}\".format(res))", "def XXhandle_request(self):\n\n fd_sets = SocketServer._eintr_retry(select.select, [self], [], [], self.timeout)\n if not fd_sets[0]:\n stillOK = self.handle_timeout()\n if not stillOK:\n return\n self._handle_request_noblock()", "def wait(self):\r\n if self._is_ready:\r\n return\r\n if self._ttl is None:\r\n while not self._is_ready:\r\n self._conn.serve()\r\n else:\r\n while True:\r\n timeout = self._ttl - time.time()\r\n self._conn.poll(timeout = max(timeout, 0))\r\n if self._is_ready:\r\n break\r\n if timeout <= 0:\r\n raise AsyncResultTimeout(\"result expired\")", "async def poll_event(self) -> None:\n try:\n msg = await self.socket.receive(timeout=self._max_heartbeat_timeout)\n if msg.type is aiohttp.WSMsgType.TEXT:\n await self.received_message(msg.data)\n elif msg.type is aiohttp.WSMsgType.BINARY:\n await self.received_message(msg.data)\n elif msg.type is aiohttp.WSMsgType.ERROR:\n _log.debug('Received %s', msg)\n raise msg.data\n elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSING, aiohttp.WSMsgType.CLOSE):\n _log.debug('Received %s', msg)\n raise WebSocketClosure\n except (asyncio.TimeoutError, WebSocketClosure) as e:\n # Ensure the keep alive handler is closed\n if self._keep_alive:\n self._keep_alive.stop()\n self._keep_alive = None\n\n if isinstance(e, asyncio.TimeoutError):\n _log.debug('Timed out receiving packet. Attempting a reconnect.')\n raise ReconnectWebSocket(self.shard_id) from None\n\n code = self._close_code or self.socket.close_code\n if self._can_handle_close():\n _log.debug('Websocket closed with %s, attempting a reconnect.', code)\n raise ReconnectWebSocket(self.shard_id) from None\n else:\n _log.debug('Websocket closed with %s, cannot reconnect.', code)\n raise ConnectionClosed(self.socket, shard_id=self.shard_id, code=code) from None", "def run(self):\n queue_name = str(self._get_queue_name())\n sqs = self._get_sqs_wrapper(queue_name, JSONMessage)\n\n scanner_queue_name = str(self._get_scanner_queue_name())\n scanner_sqs = self._get_sqs_wrapper(scanner_queue_name, JSONMessage)\n dummy_message = {\"message\": \"dummy\"} # TODO: make this message meaningful\n\n while(not self._stop_requested): # Loop forever while this variable is set.\n try: # Main try-except\n for msg in sqs.get_messages_from_queue():\n msg_body = msg.get_body()\n log({\n \"status\": \"new message\",\n \"queue\": queue_name,\n \"msg\": msg_body,\n })\n\n results = None\n final_status = JOBS_ETL_STATUS_ERROR\n lsd = None\n try:\n self._update_scheduled_jobs_on_etl_start(msg_body)\n # safe to delete message. if worker dies, scanner will resubmit\n sqs.delete_message_from_queue(msg)\n\n try:\n # Execute etl\n results, action_dict = self._process_msg(msg)\n\n # Parse results\n final_status, lsd, extra_info = \\\n parse_results(results, msg_body['end_date'])\n if final_status != JOBS_ETL_STATUS_COMPLETE:\n if action_dict['delete_requested']:\n final_status = JOBS_ETL_STATUS_DELETED\n elif action_dict['cancel_requested']:\n final_status = JOBS_ETL_STATUS_CANCELLED\n elif action_dict['pause_requested']:\n final_status = JOBS_ETL_STATUS_PAUSED\n\n log({\n \"status\": \"processed message OK\",\n \"queue\": queue_name,\n \"msg\": msg_body,\n \"results\": results,\n \"job status\": final_status,\n \"last OK date\": lsd,\n })\n except Exception:\n final_status = JOBS_ETL_STATUS_ERROR\n log_exception(\n \"Exception in processing msg from queue: \" +\n queue_name + \" msg body:\" + str(msg_body)\n )\n if final_status != JOBS_ETL_STATUS_DELETED:\n self._update_scheduled_jobs_on_etl_complete(\n msg_body, final_status, lsd\n )\n scanner_sqs.write_message_to_queue(dummy_message)\n try:\n self.emailer.mail_result(\n final_status, msg_body, additional_info=extra_info\n )\n log(\n \"Sent emails to:\" + str(msg_body['contact_emails'])\n )\n except Exception:\n log_exception(\n \"Exception in sending emails of job:\" +\n str(msg_body)\n )\n except Exception:\n log_exception(\n \"Failed to update scheduled jobs on etl\"\n \" start/complete, msg body: \" + str(msg_body)\n )\n except Exception: # end of main try-except\n log_exception(\n \"Exception in fetching messages from queue:\"\n + queue_name\n )\n # if sqs queue fails, throttle retry\n time.sleep(sqs.get_wait_time())\n if self._run_once:\n break\n\n self._stop_requested = False", "def async_wait(self):\n if self._is_ready:\n return\n if self._ttl is None:\n while not self._is_ready:\n self._conn.serve()\n else:\n while True:\n timeout = self._ttl - time.time()\n self._conn.poll(timeout = max(timeout, 0))\n if self._is_ready:\n break\n if timeout <= 0:\n raise AsyncResultTimeout(\"result expired\")", "def _poll(self):\n return self.zmq_core.poll(10)", "def get_socket_non_empty_queue_test(self, q):\n socket = Mock()\n queue = Mock()\n queue.empty.return_value = False\n queue.get_nowait.return_value = socket\n q.return_value = queue\n context = Mock()\n p = stellr.pool.PoolManager(context)\n p.pools[ADDRESS] = queue\n\n s = p.get_socket(ADDRESS)\n self.assertEqual(s, socket)\n self.assertEqual(1, queue.empty.call_count)\n self.assertEqual(1, len(p.pools))", "def run(self):\n global _callback_thread\n\n self._ready.set()\n\n while self._operational:\n\n # qLen = self._work_q.qsize()\n\n while True:\n try:\n msg = self._topic_recvr.fetch(timeout=0)\n except Empty:\n break\n # TRACE:\n # log.error(\"!!! Console %s: msg on %s [%s]\" %\n # (self._name, self._topic_recvr.source, msg))\n self._dispatch(msg, _direct=False)\n\n while True:\n try:\n msg = self._direct_recvr.fetch(timeout = 0)\n except Empty:\n break\n # TRACE\n #log.error(\"!!! Console %s: msg on %s [%s]\" %\n # (self._name, self._direct_recvr.source, msg))\n self._dispatch(msg, _direct=True)\n\n self._expire_agents() # check for expired agents\n self._expire_mboxes() # check for expired async mailbox requests\n\n #if qLen == 0 and self._work_q.qsize() and self._notifier:\n if self._work_q_put and self._notifier:\n # new stuff on work queue, kick the the application...\n self._work_q_put = False\n _callback_thread = currentThread()\n trace.debug(\"Calling console notifier.indication\")\n self._notifier.indication()\n _callback_thread = None\n\n\n # wait for a message to arrive, or an agent\n # to expire, or a mailbox requrest to time out\n now = datetime.datetime.utcnow()\n next_expire = self._next_agent_expire\n\n self._lock.acquire()\n try:\n # the mailbox expire flag may be cleared by the\n # app thread(s) to force an immedate mailbox scan\n if self._next_mbox_expire is None:\n next_expire = now\n elif self._next_mbox_expire < next_expire:\n next_expire = self._next_mbox_expire\n finally:\n self._lock.release()\n\n timeout = timedelta_to_secs(next_expire - now)\n\n if self._operational and timeout > 0.0:\n try:\n trace.debug(\"waiting for next rcvr (timeout=%s)...\" % timeout)\n self._session.next_receiver(timeout = timeout)\n except Empty:\n pass\n\n trace.debug(\"Shutting down Console thread\")", "def get_socket_empty_queue_empty_error_test(self, q):\n queue = Mock()\n queue.empty.return_value = False\n queue.get_nowait.side_effect = gevent.queue.Empty\n q.return_value = queue\n context = Mock()\n p = stellr.pool.PoolManager(context)\n socket = Mock()\n socket.return_value = socket\n p._create_socket = socket\n\n s = p.get_socket(ADDRESS)\n self.assertEqual(s, socket)\n self.assertEqual(1, queue.empty.call_count)\n self.assertEqual(1, len(p.pools))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Artificially set up the worker's work socket This sets self.aw.work_socket so that methods other than "run" can be tested in the worker.
def set_work_socket(self): self.analysis_id = uuid.uuid4().hex def do_set_work_socket(aw): aw.work_socket = cellprofiler_core.constants.worker.the_zmq_context.socket( zmq.REQ ) aw.work_socket.connect(self.work_addr) aw.work_request_address = self.work_addr aw.current_analysis_id = self.analysis_id self.awthread.execute(do_set_work_socket, self.awthread.aw)
[ "def setup(self) -> None:\n self.running = True\n self.listen()\n self.start_workers()\n\n # Send server socket to workers.\n assert self.socket is not None\n for work_queue in self.work_queues:\n work_queue[0].send(self.family)\n send_handle(work_queue[0], self.socket.fileno(),\n self.workers[self.current_worker_id].pid)\n self.socket.close()", "def setWorker(self, worker):\n pass", "def setup_for_run(self):\n self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.server.bind((self.ip_address, self.port))\n self.server.listen(100)", "def set_worker(self) -> None:\n self.__set_pycompss_context__(self.worker)", "def worker(self, worker):\n \n self._worker = worker", "def connect_to_worker (self):\n context = zmq.Context ()\n print (\"Connecting to worker at %s responsible for casu #%d...\" % (self.wrk_addr, self.casu_number))\n socket = context.socket (zmq.REQ)\n socket.connect (self.wrk_addr)\n return socket", "def _setup_socket(self, timeout_seconds=None):\n assert self.task_state >= self.STATE_INIT_RUN, \\\n 'socket cannot be set up until run is started'\n socket_server_url = self.server_url\n if (self.opt['local']): # skip some hops for local stuff\n socket_server_url = \"https://localhost\"\n self.socket_manager = SocketManager(\n socket_server_url,\n self.port,\n self._on_alive,\n self._on_new_message,\n self._on_socket_dead,\n self.task_group_id,\n socket_dead_timeout=timeout_seconds,\n server_death_callback=self.shutdown,\n )", "def worker(self, worker):\n\n self._worker = worker", "def set_worker(self, worker):\n\n self._worker = worker", "def _init_zmq_interface(self):\n if self._parsed_args.debug:\n self.logger.debug(\"**debug mode** no connection to manager\")\n return\n self._socket = self._context.socket(zmq.REQ)\n zmq_host = self.config[\"zmq\"][\"host\"]\n zmq_port = self.config[\"zmq\"][\"ports\"][\"workers\"]\n self._socket.setsockopt(zmq.TCP_KEEPALIVE, 1)\n self._socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE, 900)\n self._socket.connect(f\"tcp://{zmq_host}:{zmq_port}\")\n self.logger.info(f\"connected to {zmq_host} port {zmq_port}\")", "def setup_socket(self):\n context = zmq.Context()\n socket = context.socket(zmq.REP)\n socket.bind('tcp://*:5555')\n\n self.prev_raw_frame = None\n self.socket = socket", "def setup(self):\n self.context = zmq.Context()\n self.sub_socket = self.context.socket(zmq.SUB)\n if self.filter:\n self.sub_socket.setsockopt(zmq.SUBSCRIBE, self.filter)\n self.sub_socket.connect('tcp://'+self.host+':'+str(self.com_port))\n return self", "def runtime_setup(self):\n self.context = zmq.Context()\n\n self.socket = self._setup_subscriber_socket(\n self.connection_addresses['subscriber'],\n self.context,\n self.model\n )", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n self.setUpWorkers()", "def setDefaultWorker(self, worker):\n pass", "def connect_to_worker():\n socket = context.socket(zmq.REQ)\n socket.connect(\"tcp://localhost:5555\")\n return socket", "def setUp(self) :\n self.longMessage = True\n logger = corAna.makeLogger(isTestMode=True,isMaster=True,isViewer=True,isServer=True,rank=0)\n isFirstWorker = True\n self.numTimes = 5\n numDataPointsThisWorker = 1\n\n self.workerData = corAna.WorkerData(logger, isFirstWorker, self.numTimes,\n numDataPointsThisWorker, addRemoveCallbackObject = None)", "def initialize(self,init):\n logger.info('*** initialize: worker id=%d',self._agent.wid)\n self.commands = {'initialize':None, 'before_do_work':None, 'after_do_work':None, 'finalize':None}\n self.commands.update(init.get(self._agent.wid,{}))\n exec_command(self.commands['initialize'])", "def init_socket_with_rety(self, worker_id):\n\n if self.mode == \"tcp\":\n # acquire lock for this socket in 100 ms or abandon, another thread is handling the socket reconnect\n with self.socket_locks[worker_id].acquire_timeout(0.1):\n connected = False\n while not connected:\n try:\n self._init_socket_tcp(worker_id)\n connected = True\n self.get_logger().info('Connection successful!')\n except Exception as e:\n self.get_logger().error(f\"Error initializing socket exception: {str(e)} worker id {worker_id}\")\n for i in range(1, 5):\n self.get_logger().info(f'Retrying in {5-i}')\n time.sleep(1)\n elif self.mode == \"udp\": \n self._init_socket_udp(worker_id)\n else:\n raise Exception(\"Mode must be one of 'udp' or 'tcp'\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Announce the work address until we get some sort of a request
def send_announcement_get_work_request(self): self.analysis_id = uuid.uuid4().hex while True: self.announce_socket.send_json(((self.analysis_id, self.work_addr),)) try: return self.awthread.recv(self.work_socket, 250) except six.moves.queue.Empty: continue
[ "def announceWork(self):\n if self.queens:\n for q in self.queens:\n try:\n q.proxyAnnounceWork(self.name, self.cobraname, self.cobrad.port)\n except Exception as e:\n logger.warning('Queen Error: %s', e)\n\n else:\n buf = \"cobra:%s:%s:%d\" % (self.name, self.cobraname, self.cobrad.port)\n self.sendsock.sendto(buf.encode('utf-8'), (cluster_ip, cluster_port))", "def sendInternetInquiry():\n pass", "def on_address_status(self, data):\n self.logger.info('got Address Status message: {}'.format(data))\n self.address_queue.put(data)", "def answer_waiting_call(self) -> None:", "def handle_request(self, t):\n self.make_work_request()", "async def send_referral(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n # check if the referral file exists\n if os.path.exists(REFERRAL_FILE_S9):\n try:\n # tell the user we are sending the referral\n self.add_to_output(\"Sending referral IPK...\")\n # create ssh connection to miner\n await self.send_file(REFERRAL_FILE_S9, '/tmp/referral.ipk')\n await self.send_file(CONFIG_FILE, '/etc/bosminer.toml')\n\n await self.run_command(f'opkg install /tmp/referral.ipk && /etc/init.d/bosminer restart')\n # tell the user the referral completed\n self.add_to_output(f\"Referral configuration completed...\")\n except OSError as e:\n print(e)\n self.add_to_output(f\"Unknown error...\")\n else:\n self.add_to_output(\"No referral file, skipping referral install\")", "def run(self) -> None:\n self._logger_setup()\n for _ in range(3):\n ip = self.get_ip()\n if ip:\n self.ip = ip\n self.logger.info('IP fetched successfully')\n break\n else:\n self.logger.warning(f'IP fetch attempt unsuccessful, trying again in 2 seconds...')\n sleep(2)\n if not self.ip:\n raise AttributeError('Unable to fetch IP address')\n\n while True:\n old_ip = self.ip\n new_ip = self.get_ip()\n if old_ip != new_ip:\n successes = 0\n failures = []\n for domain in self.domains:\n resp = self._send_request(domain, new_ip)\n success = xml_errors(resp.text)\n if success:\n msg = f'IP address change for {domain} submitted to namecheap, ' \\\n 'change should be effective in 30 minutes.'\n self.logger.info(msg)\n successes += 1\n else:\n msg = f'IP address request failure, response: {resp.text}'\n failures.append(msg)\n self.logger.error(msg)\n if successes == len(self.domains):\n msg = f'IP address change for domains: {\" \".join(self.domains)}\\nsuccessful updated to {new_ip}.'\n send_message('Namecheap DynDNS IP address change successful', msg)\n self.ip = new_ip\n else:\n msg = f'Namecheap DynDNS IP address change failed!\\n' + \"\\n\".join(failures)\n send_message('Namecheap DynDNS IP address change Failure!', msg)\n else:\n self.logger.info('IP address same, sleeping')\n sleep(300)", "def work(self, work):\n\n self._work = work", "def test_answer_background(network):\n work_item_id = bfq.ipOwners().answer(background=True)\n bf_get_work_status(work_item_id)", "def gotNotificationReferral(self, host, port):\n pass", "def test_solicitation_no_reply_resend(self):\n waittime = self.autoconflayer._solicitation_timeout * 4.0\n self.autoconflayer.start_process()\n interest = Interest(Name('/foo/bar'))\n self.queue_from_higher.put([None, interest])\n\n # Catch all data the autoconfig layer sends downwards for 3 seconds\n deadline = datetime.utcnow() + timedelta(seconds=waittime)\n tolower = []\n while datetime.utcnow() < deadline:\n try:\n data = self.queue_to_lower.get(timeout=waittime/10)\n tolower.append(data)\n except queue.Empty:\n pass\n # Make sure the broadcast face was actually created and get its face id\n bcfid = self.faceidtable.get_or_create_faceid(AddressInfo(('127.255.255.255', 4242), 0))\n self.assertIsNotNone(bcfid)\n # Make sure the forwarder solicitation was sent more than once\n solictiation = Interest(Name('/autoconfig/forwarders'))\n solictiation_count = len([1 for data in tolower if data == [bcfid, solictiation]])\n self.assertGreater(solictiation_count, 1)", "def perform_tracker_request(self, url, info_hash, peer_id, port):\r\n\r\n while self.running:\r\n self.tracker_response = make_tracker_request(info_hash, port, peer_id, url)\r\n\r\n if b\"failure reason\" not in self.tracker_response:\r\n self.peers = decode_expanded_peers(self.tracker_response[b\"peers\"])\r\n sleep(self.tracker_response[b\"interval\"])", "def _request_info(self, callback=None):\n self._queue_request(\n NodeInfoRequest(self.mac),\n callback,\n )", "def work(self, task_id, task):\n pass", "def transition_update(self): # pragma: no cover\n Address = Pool().get('party.address')\n\n address = Address(Transaction().context.get('active_id'))\n Address.write([address], {\n 'street': self.start.street,\n 'zip': self.start.zip,\n 'city': self.start.city,\n 'country': self.start.country.id,\n 'subdivision': self.start.subdivision.id,\n })\n return 'done'", "def hook_request_assistance(self, data):\n request_id = data[\"request_id\"]\n log.info(\"NEW request for assistance %s\", request_id)\n volunteers_to_contact = data[\"volunteers\"]\n\n needs = \"\"\n for item in data[\"needs\"]:\n needs += f\"- {item}\\n\"\n\n assistance_request = c.MSG_REQUEST_ANNOUNCEMENT % (data[\"address\"], needs)\n\n for chat_id in volunteers_to_contact:\n if chat_id not in self.updater.persistence.user_data:\n log.debug(\"User %s hasn't added the updater to their contacts, skipping.\", chat_id)\n continue\n\n current_state = self.updater.persistence.user_data[chat_id].get(\"state\", None)\n\n if current_state in [c.State.REQUEST_IN_PROGRESS, c.State.REQUEST_ASSIGNED]:\n log.debug(\"Vol%s is already working on a request, skippint\")\n continue\n\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=assistance_request,\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=ReplyKeyboardMarkup(k.initial_responses, one_time_keyboard=True),\n )\n\n # update this user's state and keep the request_id as well, so we can use it later\n updated_state = {\"state\": c.State.REQUEST_SENT, \"reviewed_request\": request_id}\n self.updater.dispatcher.user_data[chat_id].update(updated_state)\n\n self.updater.dispatcher.bot_data.update({request_id: data})\n self.updater.dispatcher.update_persistence()", "def sendBonusEmails():\n return", "def feed(self, instruction):\n assert self.future_inst is None, 'BranchUnit fed when full'\n self.future_inst = instruction\n self.future_timer = max(0, instruction.DELAY - 1)", "def run(self):\n while True:\n try:\n target_url = self.TO_PROCESS.get(block=True, timeout=4)\n if target_url[\"url\"].startswith(\"mailto:\"):\n email = target_url[\"url\"][len(\"mailto:\") :]\n self.mailto_links.append(email)\n\n elif target_url[\"url\"] not in self.visited:\n self.visited.add(target_url[\"url\"])\n job = self.pool.submit(\n self.load_url, target_url, self.config.timeout\n )\n job.add_done_callback(self.handle_future)\n except Empty:\n return\n except Exception as e:\n print(e)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the X window id of the window whose title matches regex `title_regex`
def get_window_id(title_regex): cmd = "wmctrl -l" logit(cmd) output = subprocess.check_output(cmd.split()).decode("utf-8").splitlines() logit(output) for line in output: w_id = line.split()[0] title = line.split(" ", 3)[3] if re.match(title_regex, title): return w_id raise Exception(f"Could not find window with title matching regex: {title_regex}")
[ "def find_window(title):\n return FindWindow(None, title)", "def _getWindowsByTitle(title, exact=False):\n matched = []\n windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID)\n for win in windows:\n if exact:\n if (title == win[Quartz.kCGWindowOwnerName]) or \\\n (title == win.get(Quartz.kCGWindowName, '')):\n matched.append(MacOSWindow(win['kCGWindowNumber']))\n if title in '%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, '')):\n matched.append(MacOSWindow(win['kCGWindowNumber']))\n if len(matched) > 0:\n return matched\n raise Exception('Could not find a matching window.') # HACK: Temporary hack.", "def GetProcessIdByWindowTitle(window_title: str) -> int:\n result = ctypes.c_uint32(0)\n\n string_buffer_size = len(window_title) + 2 # (+2) for the next possible character of a title and the NULL char.\n string_buffer = ctypes.create_unicode_buffer(string_buffer_size)\n\n def callback(hwnd, size):\n \"\"\"\n This callback is used to get a window handle and compare\n its title with the target window title.\n\n To continue enumeration, the callback function must return TRUE;\n to stop enumeration, it must return FALSE.\n \"\"\"\n nonlocal result, string_buffer\n\n user32.GetWindowTextW(hwnd, string_buffer, size)\n\n # Compare the window titles and get the process ID.\n if window_title == string_buffer.value:\n user32.GetWindowThreadProcessId(hwnd, ctypes.byref(result))\n return False\n\n # Indicate it must continue enumeration.\n return True\n\n # Enumerates all top-level windows on the screen by passing the handle to each window,\n # in turn, to an application-defined callback function.\n user32.EnumWindows(WNDENUMPROC(callback), string_buffer_size)\n\n return result.value", "def window(title: str = None) -> Window:\n return _obtain_window(title)", "def get_window_id(*args):\n return _ida_kernwin.get_window_id(*args)", "def getCurrentWindowId(*args):", "def FindWindowById(*args, **kwargs):\n return _core_.FindWindowById(*args, **kwargs)", "def getWindowsWithTitle(title):\n hWndsAndTitles = _getAllTitles()\n windowObjs = []\n for hWnd, winTitle in hWndsAndTitles:\n if title.upper() in winTitle.upper(): # do a case-insensitive match\n windowObjs.append(Win32Window(hWnd))\n return windowObjs", "def FindWindowByName(*args, **kwargs):\n return _core_.FindWindowByName(*args, **kwargs)", "def get_window_title(self): # real signature unknown; restored from __doc__\n return \"\"", "def _get_title_id(cursor, title):\n # run query to find title id for given title\n title_id_query = cursor.execute(dbq.SELECT_TITLE_ID, [title])\n\n if title_id_query:\n return _fetch_value(cursor)\n else:\n return None", "def get_window_title(self):\n\n return self.window_title", "def FindWindowByName(*args, **kwargs):\n return _core_.Window_FindWindowByName(*args, **kwargs)", "def get_window_title(self):\n return self.state.window.Title", "def getCurrentWindowDialogId(*args):", "def getApplicationwindowId(ReferenceID):\n try:\n ldtp.wait(5)\n window = ReferenceID.windows()[0]\n logging.info(\"Application id of the window : %s\" % window)\n except Exception as er:\n logging.info('Not able to get window name of Application')\n return False\n return window", "def id_by_title(self, title):\n logging.debug('id_by_title(%s)', title)\n if not self.list_loaded_:\n self.load_shows()\n\n for show_id in self.shows_data:\n next_show = self.shows_data[show_id]\n logging.debug('id_by_title(%s) = %s', next_show['title'], show_id)\n if next_show['title'] == title:\n logging.debug('Found id_by_title(%s) = %s', title, show_id)\n return show_id\n\n print('Unknown title - {0}'.format(title))\n sys.exit(1)", "def GetId(*args, **kwargs):\n return _core_.Window_GetId(*args, **kwargs)", "def get_stack_id(self, title):\n matching_ids = set()\n for stack in self._stacks():\n if stack[\"title\"] == title:\n matching_ids.add(stack[\"id\"])\n\n return self._ensure_one(matching_ids, title, \"stack\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure we can't create a student user without academic_ fields.
def test_create_new_student_user_missing_field(self): data = { 'email': 'John@mailinator.com', 'password': 'test123!', } response = self.client.post( reverse('user-list'), data, format='json', ) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
[ "def test_create_user_with_missing_attribute(self):\n pass", "def test_create_user_without_role(self):\n\n role = None\n self.user_data[\"role_id\"] = role\n\n with self.assertRaises(IntegrityError):\n User.objects.create_user(**self.user_data)", "def test_careers_invalid_student(self):\n student_id = '1234567890'\n result = self.ucuenca.careers(student_id)\n self.assertFalse(result)", "def test_create_user_without_first_name(self):\n\n first_name = \"\"\n self.user_data[\"first_name\"] = first_name\n\n with self.assertRaises(ValueError):\n User.objects.create_user(**self.user_data)", "def test_create_student_missing_role(self):\n response = self.client.post(self.url, data=json.dumps(self.payload_missing_role),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())\n self.assertEqual(Student.objects.count(), self.qty)", "def create_user_student_or_prof(sender, instance, created, **kwargs):\n if created:\n if not instance.is_staff:\n Student.objects.create(user=instance)\n elif not instance.is_superuser:\n Professor.objects.create(user=instance)", "def test_create_user_without_last_name(self):\n\n last_name = \"\"\n self.user_data[\"last_name\"] = last_name\n\n with self.assertRaises(ValueError):\n User.objects.create_user(**self.user_data)", "def test_staff_permission_required(self):\r\n with self.assertRaises(PermissionDenied):\r\n add_user_with_status_granted(self.user, self.user)\r\n\r\n with self.assertRaises(PermissionDenied):\r\n update_course_creator_group(self.user, self.user, True)", "def test_student_username_unique(self):\n try:\n Student.objects.create(index=\"1233\", first_name=\"test\",\n last_name=\"test\", username=\"abc\")\n self.fail(\"Student cannot be created with non unique username field\")\n except Exception:\n pass", "def test06_add_student_with_empty_fields(self):\n student_data = self.students_page.\\\n click_edit_students_list_button(). \\\n click_add_new_student_button()\n student_data.save_data_changes_button.click()\n actual_warnings = \\\n student_data.warnings_text_for_adding_student_with_empty_fields()\n self.assertEqual(actual_warnings, data['expected_warnings'])", "def test_create_user_without_phone_number(self):\n\n phone = \"\"\n self.user_data[\"phone\"] = phone\n\n with self.assertRaises(ValueError):\n User.objects.create_user(**self.user_data)", "def test_dont_create_user(self):\n self.assertFalse(User.objects.exists())", "def test_add_user_to_course_group_permission_denied(self):\r\n add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)\r\n add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)\r\n with self.assertRaises(PermissionDenied):\r\n add_users(self.staff, CourseStaffRole(self.course_key), self.staff)", "def test_new_user_empty_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"test1234\")", "def test_careers_invalid_student(self):\n student_id = '1234567890'\n result = self.ucuenca.schedule(student_id)\n self.assertFalse(result)", "def test_creator_group_not_enabled(self):\r\n self.assertTrue(has_access(self.user, CourseCreatorRole()))", "def test_creating_a_new_user_without_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"Test1234\")", "def test_create_instructor_missing_role(self):\n response = self.client.post(self.url, data=json.dumps(self.payload_missing_role),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())\n self.assertEqual(Instructor.objects.count(), self.qty)", "def test_creating_user_with_no_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'testpassword')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure we can't create a new user with an invalid phone number
def test_create_new_user_invalid_phone(self): data = { 'username': 'John', 'email': 'John@mailinator.com', 'password': '1fasd6dq#$%', 'phone': '12345', 'other_phone': '23445dfg', 'first_name': 'Chuck', 'last_name': 'Norris', 'university': { "name": "random_university" }, 'academic_field': {'name': "random_field"}, 'academic_level': {'name': "random_level"}, 'gender': "M", 'birthdate': "1999-11-11", } response = self.client.post( reverse('user-list'), data, format='json', ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) content = { "phone": ['Invalid format.'], "other_phone": ['Invalid format.'] } self.assertEqual(json.loads(response.content), content)
[ "def test_new_user_invalid_phonenumber(self):\n phonenumber = None\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\n 'TestFirstName',\n 'TestMiddleName',\n 'TestLastName',\n phonenumber,\n 'test@testmail.com',\n '1900-01-01',\n 'Not Disclosed',\n 'qwerty123456asdfgh0987'\n )", "def test_append_user_malformed_phone_number(self):\n print('(' + self.test_append_user_malformed_phone_number.__name__ + ')', \\\n self.test_append_user_malformed_phone_number.__doc__)\n\n with self.assertRaises(database.PhoneNumberFormatException):\n self.connection.create_user(NEW_USER_MALFORMED_PHONE_NUMBER)", "def test_create_user_without_phone_number(self):\n\n phone = \"\"\n self.user_data[\"phone\"] = phone\n\n with self.assertRaises(ValueError):\n User.objects.create_user(**self.user_data)", "def test_modify_user_malformed_phone_number(self):\n print('(' + self.test_modify_user_malformed_phone_number.__name__ + ')', \\\n self.test_modify_user_malformed_phone_number.__doc__)\n\n with self.assertRaises(database.PhoneNumberFormatException):\n self.connection.modify_user(USER1_ID, MODIFIED_USER1_MALFORMED_PHONE_NUMBER)", "def test_duplicate_phone_number(self):\n params = {\n 'first_name': \"David\",\n 'last_name': \"Smith\",\n 'password': '******',\n 'email': \"david.smith@mom.com\",\n 'phone_number': \"012-345-6789\"\n }\n self.register(params)\n response = self.register(params)\n self.assertEqual(response.status_code, 400)\n self.assertDictContainsSubset({'message': \"Phone number/email already exists\"}, response.json())", "def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'testing321')", "def test_add_an_invalid_phone_number_to_a_contact(self):\n # Use the API endpoint to add a phone number to a contact\n response = self.add_phone_number(self.valid_contact_id, self.invalid_phone_number_data)\n\n self.assertTrue(len(response.data['phone']) > 0)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, '123')", "def test_verify_that_users_cannot_save_an_invalid_email():", "def test_new_user_empty_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"test1234\")", "def test_create_user_invalid_id(self):\r\n print(\"Create user invalid id (already taken)\")\r\n u_id = 100\r\n username = \"newtestuser\"\r\n password = \"test9999\"\r\n u_type = 1\r\n\r\n prev_noUsers = len(User.query.all())\r\n self.assertEqual(self.system.create_user(u_id, username, password, u_type), 0)\r\n curr_noUsers = len(User.query.all())\r\n self.assertEqual(prev_noUsers, curr_noUsers)", "def test_new_user_without_email_raises_error(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user('', 'test123')", "def test_creating_a_new_user_without_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"Test1234\")", "def test_append_user_malformed_email(self):\n print('(' + self.test_append_user_malformed_email.__name__ + ')', \\\n self.test_append_user_malformed_email.__doc__)\n\n with self.assertRaises(database.EmailFormatException):\n self.connection.create_user(NEW_USER_MALFORMED_EMAIL)", "def test_new_user_empty_password(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\"test3@gmail.com\", \"\")", "def test_user_register_bad_request(self):\n response = self.client.post(\n CONSTS.USER_REGISTER_URL,\n data=self.invalid_user_data,\n format='json'\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_signup_missing_first_name(self):\n\n invalid_u = User.signup(\"test@test.com\", \"testuser\", \"testpass\", None, \"User\", None)\n \n uid = 99999\n invalid_u.id = uid\n\n with self.assertRaises(exc.IntegrityError) as context:\n db.session.commit()", "def test_phone_validator(self):\n invalid_phone = 123\n result = phone_validator(phone=invalid_phone)\n self.assertFalse(result)\n\n invalid_phone = '123'\n result = phone_validator(phone=invalid_phone)\n self.assertFalse(result)\n\n test_phone = '+380123456789'\n result = phone_validator(phone=test_phone)\n self.assertTrue(result)", "def check_format_user_phone(phone):\n match = re.match(r'^\\+[0-9]{10,}$', phone)\n if not match:\n raise exceptions.ValidationError('phone is not valid!')\n return phone" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure we can't list users without authentication.
def test_list_users_without_authenticate(self): response = self.client.get(reverse('user-list')) content = {"detail": "Authentication credentials were not provided."} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
[ "def test_list_users_without_permissions(self):\n self.client.force_authenticate(user=self.user)\n\n response = self.client.get(reverse('user-list'))\n\n content = {\n 'detail': 'You do not have permission to perform this action.'\n }\n self.assertEqual(json.loads(response.content), content)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_user_access_to_user_list_not_permitted(self, user, testapp):\n with pytest.raises(AppError):\n testapp.get(url_for('user.members'))", "def test_users_list_fail_unauthenticated_user(self):\n self.url = reverse('authentication:users-list')\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 401)\n self.assertEqual(response.data['error'], 'NotAuthenticated')", "def test_unauthenticated_user_denial(self):\n\n self.response = self.client.get(\"/api/users/users_list/\")\n self.assertEqual(self.response.status_code, status.HTTP_403_FORBIDDEN)\n self.assertEqual(\n 'Authentication credentials were not provided.', self.response.data['detail'])", "def test_admin_user_list_all_users_permission_denied(self):\n self.client.logout()\n self.client.login(\n username=self.invalid_user.username,\n password=self.invalid_user.password\n )\n response = self.client.get(CONSTS.USER_ADMIN_LIST)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_user_list_as_user(self):\n request = self.factory.get('/api/users/')\n force_authenticate(request, user=self.testuser)\n response = UserListView.as_view()(request)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_retrieve_all_users_information_non_superuser(self):\n\n self._create_multiple_users()\n\n su = UserHelpers.create_authenticated_user(superuser=False)\n token = su.auth_token.key\n headers = UserHelpers.create_authentication_header(token)\n\n url = reverse('user_items')\n\n response = self.client.get(url, **headers)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_retrieve_all_users_information_unauth(self):\n\n self._create_multiple_users()\n\n url = reverse('user_items')\n\n response = self.client.get(url)\n\n self.assertTrue(response.status_code, status.HTTP_401_UNAUTHORIZED)", "def test_get_users_unauthenticated(client: FlaskClient) -> None:\n # Unauthenticated users are not allowed to make the request\n response = get_users(client)\n assert_error_response(response, HTTPStatus.UNAUTHORIZED)", "def test_user_get_all(self):\n response = self.app.get('/api/v3/users', headers=self.user_header)\n self.assertEqual(response.status_code, 401)", "def test_cannot_view_all_users_with_blacklisted_token(self):\n resp = self.admin_create_user()\n reply = self.admin_create_user2()\n resp = self.admin_login()\n token = resp['token']\n\n resp = self.client.delete(\n '/api/v1/logout',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'You are successfully logged out!')\n self.assertEqual(resp.status_code, 200)\n\n resp = self.client.get(\n '/api/v1/users',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'Invalid Authentication, Please Login!')\n self.assertEqual(resp.status_code, 401)", "def test_show_private_lists_invalid(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.user2.id\n \n res = c.get(\"/users/tester1/private-lists\")\n\n self.assertEqual(res.status_code, 302)", "def test_anonymous_user_read(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Unauthorized,\r\n getattr(require, 'token').read,\r\n token)", "def test_if_list_blog_using_unauthenticated_users(self):\n url = get_url('blog_list')\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)", "def test_v1alpha3_userlist(self):\n pass", "def test_no_token_get_all(self):\n response = self.app.get('/api/v3/users')\n self.assertEqual(response.status_code, 401)", "def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME", "def test_authenticated_user_read(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Forbidden,\r\n getattr(require, 'token').read,\r\n token)", "def test_check_permission_list_non_authenticated(self):\n\n self.client.logout()\n\n error_message = \"Authentication credentials were not provided.\"\n\n url = reverse(\"item-list\")\n response = self.client.get(url, format=\"json\")\n\n assert response.status_code == status.HTTP_401_UNAUTHORIZED\n assert str(response.data[\"detail\"]) == error_message" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure we can't list users without permissions.
def test_list_users_without_permissions(self): self.client.force_authenticate(user=self.user) response = self.client.get(reverse('user-list')) content = { 'detail': 'You do not have permission to perform this action.' } self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
[ "def test_admin_user_list_all_users_permission_denied(self):\n self.client.logout()\n self.client.login(\n username=self.invalid_user.username,\n password=self.invalid_user.password\n )\n response = self.client.get(CONSTS.USER_ADMIN_LIST)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_user_access_to_user_list_not_permitted(self, user, testapp):\n with pytest.raises(AppError):\n testapp.get(url_for('user.members'))", "def test_retrieve_all_users_information_non_superuser(self):\n\n self._create_multiple_users()\n\n su = UserHelpers.create_authenticated_user(superuser=False)\n token = su.auth_token.key\n headers = UserHelpers.create_authentication_header(token)\n\n url = reverse('user_items')\n\n response = self.client.get(url, **headers)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def DeniedPermissions(self) -> _n_6_t_0:", "def test_user_list_as_user(self):\n request = self.factory.get('/api/users/')\n force_authenticate(request, user=self.testuser)\n response = UserListView.as_view()(request)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_view_as_regular_user(self):\n response = self.client.get(self.url)\n self.assertEqual(403, response.status_code)", "def test_no_permissions_by_default(self):\n eq_(len(self.obj.permissions), 0)", "def test_list_display_has_no_username(self):\n self.assertFalse(contains_recursive(BaseUserAdmin.list_display, \"username\"))", "def test_not_logged_user_can_access(self):\n\n utils.test_can_access(self, self.url)", "def test_v1alpha3_userlist(self):\n pass", "def test_list_system_users(self):\n pass", "def test_unauthenticated_user_denial(self):\n\n self.response = self.client.get(\"/api/users/users_list/\")\n self.assertEqual(self.response.status_code, status.HTTP_403_FORBIDDEN)\n self.assertEqual(\n 'Authentication credentials were not provided.', self.response.data['detail'])", "def test_check_permission_list_non_authenticated(self):\n\n self.client.logout()\n\n error_message = \"Authentication credentials were not provided.\"\n\n url = reverse(\"item-list\")\n response = self.client.get(url, format=\"json\")\n\n assert response.status_code == status.HTTP_401_UNAUTHORIZED\n assert str(response.data[\"detail\"]) == error_message", "def test_user_without_team_returns_403(self):\n user = create_test_user()\n\n token = self.get_token(user=user)\n\n request = factory.get(\n '/',\n data={},\n content_type='application/json',\n HTTP_AUTHORIZATION=f'Bearer {token}',\n )\n my_view = PermissionModelViewset.as_view(\n actions={'get': 'list'},\n )\n response = my_view(request)\n\n assert response.status_code == status.HTTP_403_FORBIDDEN", "def test_has_no_permission(self):\n self.assertFalse(self.anonymous_user.has_perm(self.permission_string))", "def get_everyone_denied(self):", "def test_user_access_to_other_users_data_not_permitted(self, user, testapp):\n with pytest.raises(AppError):\n testapp.get(url_for('user.edit', name=user.username))", "def attendants_cannot_view_user_accounts(self):\n reply = self.admin_create_user()\n resp = self.attendant_login()\n token = resp['token']\n resp = self.client.get(\n '/api/v1/users',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'Unauthorized Access!')\n self.assertEqual(resp.status_code, 401)", "def test_list_user(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure we can send notification for membership end
def test_send_notification_end_membership(self): fixed_time = timezone.now() end_time_membership = fixed_time + relativedelta(days=28) self.user.membership = self.membership self.user.membership_end = end_time_membership self.user.save() with mock.patch( 'store.serializers.timezone.now', return_value=fixed_time ): response = self.client.get( reverse('user-execute-automatic-email-membership-end') ) content = { 'stop': False, 'email_send_count': 1 } self.assertEqual( response.status_code, status.HTTP_200_OK, response.content ) self.assertEqual( json.loads(response.content), content ) self.assertEqual(len(mail.outbox), 1) self.user.refresh_from_db() self.assertEqual(self.user.membership_end_notification, fixed_time) with mock.patch( 'store.serializers.timezone.now', return_value=fixed_time ): response = self.client.get( reverse('user-execute-automatic-email-membership-end') ) content = { 'stop': False, 'email_send_count': 0 } self.assertEqual( response.status_code, status.HTTP_200_OK, response.content ) self.assertEqual( json.loads(response.content), content ) # no new mail self.assertEqual(len(mail.outbox), 1)
[ "def test_admin_approval_complete_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.send_admin_approve_complete_email(Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n self.assertEqual(mail.outbox[0].to, [self.user_info['email']])", "def test_registered_no_notifications(self):\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.no_reminders)", "def test_admin_approval_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.activated = True\n self.registration_profile.objects.send_admin_approve_email(\n new_user, Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]\n for email in mail.outbox[0].to:\n self.assertIn(email, admins_emails)", "def test_email_non_member_msg(self):\n from ..models import Brevet\n from ..models import BrevetRider\n brevet = Brevet.objects.get(\n region='LM', event=200, date=date(2012, 3, 17))\n rider = BrevetRider.objects.get(\n first_name='Fibber', last_name='McGee', brevet=brevet)\n self._send_one(brevet.pk, rider.pk, 'testserver')\n self.assertIn(\n 'indicated that you are NOT a member',\n mail.outbox[0].body)", "def send_reminder(self):\n pass", "async def check_notify(self) -> None:\n async with self.lock:\n # We loop through a list of keys because we are going to\n # mutate the dictionary as we loop through it.\n for message_id in copy.copy(list(self.upcoming_events.keys())):\n upcoming_event = self.upcoming_events[message_id]\n if not upcoming_event.time_to_notify():\n continue\n\n # Delete upcoming event if it's a member event\n if isinstance(upcoming_event, MemberEvent):\n # Delete upcoming if it's a member event\n await self.delete_upcoming_event(message_id)\n\n # Prepare message from the queue if it's recurring\n stop_notifying = False\n if isinstance(upcoming_event, RecurringEvent):\n stop_notifying = (\n upcoming_event.event_cancelled\n or upcoming_event.notified\n )\n\n if not stop_notifying:\n # Send ongoing event message\n ongoing_message = await upcoming_event.send_ongoing_message(\n notif_message=self.ongoing_template,\n channel=self.calendar_channel\n )\n\n # Distribute DM\n await upcoming_event.distribute_dm(\n self.dm_template,\n self.organizer_dm_template\n )\n\n # Create new ongoing event\n ongoing_event = OngoingEvent(\n countdown_time=upcoming_event.start_time,\n timeout_length=self.event_timeout,\n organizer_id=upcoming_event.organizer.id,\n message_text=ongoing_message.content,\n message_embed=ongoing_message.embeds[0]\n )\n\n self.ongoing_events[ongoing_message.id] = ongoing_event", "def test_post_faxes_send_notify(self):\n pass", "def notifyNewMember(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def test_resend_inactive(self):\n self.invite.active = False\n self.invite.save()\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(url, method='POST')\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(len(mail.outbox), 0)", "def test_notifyModerator(self):\n shelf = NewsShelf('example.com', self.mktemp(), 'alice@example.com')\n shelf.sendmail = self.sendmail\n shelf.notifyModerator('bob@example.org', Article('Foo: bar', 'Some text'))\n self.assertEqual(len(self._email), 1)", "def test_resend_delegate_no_perms(self):\n self.invite.role = self.role_delegate\n self.invite.save()\n delegate = self.make_user('delegate')\n self.make_assignment(self.project, delegate, self.role_delegate)\n\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(\n url, method='POST', token=self.get_token(delegate)\n )\n self.assertEqual(response.status_code, 403, msg=response.content)\n self.assertEqual(len(mail.outbox), 0)", "def test_private_message_sends_email(self, get_current):\n get_current.return_value.domain = \"testserver\"\n\n s, c = Setting.objects.get_or_create(user=self.to, name=\"email_private_messages\")\n s.value = True\n s.save()\n # User has setting, and should recieve notification email.\n\n assert Setting.get_for_user(self.to, \"email_private_messages\")\n\n self.client.login(username=self.sender.username, password=\"testpass\")\n post(self.client, \"messages.new\", {\"to\": self.to, \"message\": \"a message\"})\n subject = \"[SUMO] You have a new private message from [{sender}]\"\n\n attrs_eq(\n mail.outbox[0],\n to=[self.to.email],\n subject=subject.format(sender=self.sender.profile.name),\n )\n starts_with(\n mail.outbox[0].body, PRIVATE_MESSAGE_EMAIL.format(sender=self.sender.profile.name)\n )", "def test_email_non_member_msg(self):\n from ..models import Brevet\n from ..models import BrevetRider\n brevet = Brevet.objects.get(\n region='LM', event=200, date=date(2012, 3, 17))\n rider = BrevetRider.objects.get(\n first_name='Fibber', last_name='McGee', brevet=brevet)\n self._send_one(brevet.pk, rider.pk, 'testserver')\n self.assertIn(\n 'has indicated that zhe is NOT a club member',\n mail.outbox[0].body)", "def test_set_send_email_notifications(self):\n # Setup scenario\n username = 'tester'\n password = 'secret'\n user = Account.objects.create_user(username=username, email='john.snow@gmail.com', password=password)\n\n self.assertTrue(self.client.login(username=username, password=password))\n\n # Verify initial assumptions\n self.assertTrue(user.send_email_notifications)\n\n # Run code\n resp = self.client.post(reverse('account.api.configure_email'), {\n 'send_email_notifications': False,\n }, format='json')\n\n # Verify expectations\n self.assertEquals(status.HTTP_201_CREATED, resp.status_code)\n self.assertTrue(user.send_email_notifications)", "def test_api_user_resend_confirmation_post(self):\n pass", "def can_notify(self, last_notification):\n return (\n features.is_enabled(features.EMAIL_NOTIFICATIONS)\n and self.notification_settings.via_email\n and api.can_email_user(self.user)\n and super().can_notify(last_notification)\n )", "def test_resend_activation_email_nonexistent_user(self):\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def notify(message):\n # TODO: clean up this ugly mess\n\n global notify_flag\n\n if not notify_flag:\n notify_flag = True\n message.reply(\":gear: Started expiration checking process; users will now \"\n \"be notified if their access is about to expire.\")\n else:\n message.reply(\"Cannot have more than one running instance of the notify \"\n \"function.\")\n return\n\n flag = \"tenmins\"\n while True:\n if flag is \"deleted\":\n info = sql.notify_users(\"hour\")\n flag = \"hour\"\n elif flag is \"hour\":\n info = sql.notify_users(\"tenmins\")\n flag = \"tenmins\"\n elif flag is \"tenmins\":\n info = sql.notify_users(\"deleted\")\n flag = \"deleted\"\n\n for person in info:\n if len(info[person]) == 0:\n continue\n try:\n users = hf.get_users()\n for user in users:\n if user[\"name\"] == person:\n dbs = []\n servers = []\n for grant in info[person]:\n dbs.append(grant[\"db\"])\n servers.append(grant[\"server\"])\n chan = hf.find_channel(message._client.channels, user[\"id\"])\n\n if flag is \"hour\":\n message._client.send_message(chan,\n Strings['NOTIFY_EXPIRE_HOUR'].format(\", \".join(dbs)) + \"\\n\"\n \"\" + Strings[\"NOTIFY_EXPIRE_INFO\"])\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING IN AN HOUR]\\n\".format(user[\"name\"]), server, db, \"notifyhour\")\n elif flag is \"tenmins\":\n message._client.send_message(chan,\n Strings['NOTIFY_EXPIRE_TENMINS'].format(\", \".join(dbs)) + \"\\n\"\n \"\" + Strings[\"NOTIFY_EXPIRE_INFO\"])\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING IN TEN MINUTES]\\n\".format(user[\"name\"]), server, db, \"notifyten\")\n elif flag is \"deleted\":\n message._client.send_message(chan,\n Strings['EXPIRE'].format(\", \".join(dbs)))\n message._client.send_message(public_channel,\n Strings[\"EXPIRE_PING\"].format(user[\"name\"],\n \", \".join(dbs)))\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING]\\n\".format(user[\"name\"]), server, db, \"notifyexpire\")\n\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))\n\n with open(\"data/jobs.json\") as f:\n jobs = json.load(f)\n\n new_jobs = []\n if len(jobs) > 0:\n for job in jobs:\n if not job.endswith(\"DONE\"):\n job_string = job.replace(\"10.132.140.160\", \"SQLCLUSTER02\").replace(\"10.132.140.150\", \"SQLCLUSTER01\")\n message._client.send_message(public_channel,\n Strings[\"LOGOUT_PLEASE\"].format(job_string.split(\":\")[0],\n job_string.split(\":\")[1]))\n new_jobs.append(job + \":DONE\")\n else:\n new_jobs.append(job)\n\n with open(\"data/jobs.json\", \"w\") as f:\n json.dump(new_jobs, f)\n\n # For use with Datadog\n with open(\"/opt/opsbot35/data/status.txt\", \"w\") as f:\n f.write(str(datetime.now()))\n\n time.sleep(5)", "def test_alert_create_for_site_members(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure admin can credit tickets to a user
def test_credit_ticket_as_admin(self): user = UserFactory() self.assertEqual(user.tickets, 1) nb_tickets_to_add = 5 data = { 'nb_tickets': nb_tickets_to_add, } self.client.force_authenticate(user=self.admin) response = self.client.post( reverse( 'user-credit-tickets', kwargs={'pk': user.id}, ), data, format='json', ) self.assertEqual( response.status_code, status.HTTP_200_OK, ) self.assertEqual( User.objects.get(pk=user.id).tickets, 1 + nb_tickets_to_add )
[ "def test_credit_ticket_as_user(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.user)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_403_FORBIDDEN,\n )", "def test_ticket_update_by_admin(self):\n payload = {\n 'reserved': True\n }\n\n self.client.force_authenticate(self.user_admin)\n response = self.client.put(TICKETS_URL+'1/', payload)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_ticket_create_by_admin(self):\n payload = {\n 'seat': 1,\n 'passenger': 1,\n 'trip': 1\n }\n\n self.client.force_authenticate(self.user_admin)\n response = self.client.post(TICKETS_URL, payload)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_credit_ticket_negative_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = -5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )", "def test_ticket_detail_by_admin(self):\n self.client.force_authenticate(self.user_admin)\n path = TICKETS_URL+'1/'\n response = self.client.get(path)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def validate_ticket(self, req, ticket):\n\n res = []\n self.env.log.debug('Validating ticket: %s' % ticket.id)\n\n enchants = self.config.get('blackmagic', 'tweaks', '')\n for e,v in self.enchants.items():\n editable = True\n self.env.log.debug('%s' % v)\n if ticket.values.get(e, None) is not None:\n if v[\"disable\"] or v[\"hide\"]:\n editable = False\n elif v[\"permission\"]!='':\n editable = False\n for perm in (x.strip() for x in v[\"permission\"].split(',')):\n self.env.log.debug(\"Checking permission %s\" % perm)\n #user has permission no denied\n if perm and perm in req.perm(ticket.resource):\n self.env.log.debug(\"Has %s permission\" % perm)\n editable = True\n \n \n #field is disabled or hidden, cannot be modified by user\n if not editable:\n self.env.log.debug('%s disabled or hidden ' % e)\n #get default ticket state or orginal ticket if being modified\n ot = model.Ticket(self.env, ticket.id)\n original = ot.values.get('%s' % e, None)\n new = ticket.values.get('%s' % e, None)\n self.env.log.debug('OT: %s' % original)\n self.env.log.debug('NEW: %s' % new)\n #field has been modified throw error\n if new != original:\n res.append(('%s' % e, 'Access denied to modifying %s' % e))\n self.env.log.debug('Denied access to: %s' % e)\n \n #check if user has perm to create ticket type\n ticketperm = self.config.get(\"blackmagic\",\"ticket_type.%s\" % ticket[\"type\"],None)\n if not ticketperm:\n ticketperm = None\n if ticketperm is not None and ticketperm not in req.perm:\n self.env.log.debug(\"Ticket validation failed type %s permission %s\"% (ticket[\"type\"], ticketperm))\n res.append(('type', \"Access denied to ticket type %s\" % ticket[\"type\"]))\n \n return res", "def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()", "def test_ticket_list_by_admin(self):\n self.client.force_authenticate(self.user_admin)\n response = self.client.get(TICKETS_URL)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_claim_admin(self):\n self.contract.transfer_admin(admin_1.address)\n self.assertIn(admin_1.address, self.contract.pending_admin())\n self.contract.change_account(admin_1)\n self.contract.claim_admin()\n self.assertEqual(self.contract.admin(), admin_1.address)\n\n self.contract.transfer_admin(deployer.address)\n self.contract.change_account(deployer)\n self.contract.claim_admin()\n self.assertEqual(self.contract.admin(), deployer.address)", "def AdminTicket(ticket):\n try:\n data, = xmlrpclib.loads(ticket)[0]\n name = data['slivers'][0]['name']\n if data != None:\n deliver_ticket(data)\n logger.log('api_calls: Admin Ticket delivered for %s' % name)\n Create(database.db.get(name))\n except Exception, err:\n raise xmlrpclib.Fault(102, 'Ticket error: ' + str(err))", "def test_create_purchase_insufficient_credit_contender(self):\n data = {\"user_id\": 3, \"product_id\": 3, \"amount\": 4}\n self.assertEqual(User.query.filter_by(id=3).first().rank_id, 1)\n\n res = self.post(url=\"/purchases\", data=data)\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.InsufficientCredit)", "def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False", "def test_user_can_change_admin(self):\n self.assertTrue(self.story.user_can_change(self.admin_user))", "def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()", "def add_ticket(self, user):\n profile = user.get_profile()\n if profile.available_tickets() <= 0:\n raise Exception(\"This user does not have any tickets to allocate.\")\n \n ticket = RaffleTicket(raffle_prize=self, user=user)\n ticket.save()", "def test_automatically_offer_ticket(self):\n pass", "def issue_ticket(database, user):\n try:\n # check if user is an officer\n c = database.cursor()\n c.execute('SELECT utype FROM users WHERE uid = ?', (user, ))\n user_type = c.fetchone()[0]\n\n # If user is an officer \n if user_type == 'o':\n reg_num = int(input(\"Registration number: \"))\n c.execute(\"\"\"SELECT p.fname, p.lname, v.make, v.model, v.year, v.color FROM registrations r JOIN\n persons p ON (r.fname, r.lname) = (p.fname, p.lname) JOIN vehicles v ON r.vin = v.vin WHERE r.regno = ?\"\"\",(reg_num,))\n result = c.fetchone()\n fname = result[0]\n lname = result[1]\n make = result[2]\n model = result[3]\n year = result[4]\n color = result[5]\n print(\"\\n--------------------------\\nInformation\\n--------------------------\\n\")\n print(\"First Name: \", fname)\n print(\"Last Name: \", lname)\n print(\"Make: \", make)\n print(\"Model: \", model)\n print(\"Year: \", year)\n print(\"Color: \", color)\n\n print(\"\\n-------------------------\\nTicket the registra: \\n------------------------\\n\")\n violation_date = str(input(\"Violation Date: \")) # if not provided, today's date\n if violation_date == \"\":\n violation_date = datetime.today().strftime('%Y-%m-%d')\n violation_text = str(input(\"violation Text: \"))\n amount = str(input(\"Amount: \"))\n tno = randrange(1001, 9867699)\n\n c.execute(q.insert_into_tickets, (tno, reg_num, amount, violation_text, violation_date))\n\n database.commit()\n print(pm.all_done)\n # if user is not an officer\n else:\n print(pm.for_officers_only)\n sys.exit()\n except:\n print(pm.something_went_wrong)\n sys.exit()", "def test_modify_client_status_as_support(self):\n\n user = Staff.objects.get(username = 'StaffSupportA')\n self.client.force_authenticate(user)\n\n url = reverse('clients-change-status', kwargs = {'pk': 1})\n response = self.client.post(url, format='json')\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def add_user(request, ticket_id):\n if \"user\" in request.POST:\n user = User.objects.get(id=request.POST[\"user\"])\n Ticket.objects.get(id=ticket_id).authorized_users.add(user)\n return HttpResponseRedirect( reverse('tickets.views.show_ticket', kwargs={'ticket_id': ticket_id}) )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure user can't credit tickets to a user
def test_credit_ticket_as_user(self): user = UserFactory() self.assertEqual(user.tickets, 1) nb_tickets_to_add = 5 data = { 'nb_tickets': nb_tickets_to_add, } self.client.force_authenticate(user=self.user) response = self.client.post( reverse( 'user-credit-tickets', kwargs={'pk': user.id}, ), data, format='json', ) self.assertEqual( response.status_code, status.HTTP_403_FORBIDDEN, )
[ "def test_credit_ticket_negative_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = -5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )", "def test_lock_not_owned(self):\n response = self.app.get('/check/321', headers=self.auth_header(\"test@mail.com\", \"python\"))\n self.assertEqual(403, response.status_code)", "def test_cancelled_ticket_is_held(self):\n pass", "def test_credit_ticket_as_admin(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n )\n\n self.assertEqual(\n User.objects.get(pk=user.id).tickets,\n 1 + nb_tickets_to_add\n )", "def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()", "def credit_limit_validation(self):\n if self.credit_limit > 0:\n self.over_credit = False", "def cant(user, action):\n\n return not can(user, action)", "def test_validate_ticket_consumed_ticket(self):\n pgt = ProxyGrantingTicket.objects.create_ticket(self.pgt_url,\n user=self.user,\n validate=False)\n pgt.consume()\n self.assertRaises(InvalidTicket,\n ProxyGrantingTicket.objects.validate_ticket,\n pgt.ticket, self.pgt_url)", "def test_validate_ticket_no_ticket(self):\n self.assertRaises(InvalidRequest,\n ProxyGrantingTicket.objects.validate_ticket,\n False, False)", "def test_validate_ticket_no_ticket(self):\n with self.assertRaises(InvalidRequest):\n ProxyGrantingTicket.objects.validate_ticket(None, 'https://www.example.com')", "def test_validate_ticket(self):\n pgt = ProxyGrantingTicket.objects.create_ticket(self.pgt_url,\n user=self.user,\n validate=False)\n self.assertTrue(ProxyGrantingTicket.objects.validate_ticket(pgt.ticket,\n self.pgt_url), pgt)\n self.assertFalse(pgt.is_consumed())", "def test_noTicket():\n assert testUser1.buyTicket(None) == False", "def test_create_purchase_insufficient_credit_contender(self):\n data = {\"user_id\": 3, \"product_id\": 3, \"amount\": 4}\n self.assertEqual(User.query.filter_by(id=3).first().rank_id, 1)\n\n res = self.post(url=\"/purchases\", data=data)\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.InsufficientCredit)", "def test_upgradeTicket_notforsale():\n\tassert not testUser3.upgradeTicket(testTicket3, testTicket2)", "def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False", "def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()", "def test_validate_ticket_invalid_ticket(self):\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket('12345', 'https://www.example.com')", "def validate_ticket(self, req, ticket):\n\n res = []\n self.env.log.debug('Validating ticket: %s' % ticket.id)\n\n enchants = self.config.get('blackmagic', 'tweaks', '')\n for e,v in self.enchants.items():\n editable = True\n self.env.log.debug('%s' % v)\n if ticket.values.get(e, None) is not None:\n if v[\"disable\"] or v[\"hide\"]:\n editable = False\n elif v[\"permission\"]!='':\n editable = False\n for perm in (x.strip() for x in v[\"permission\"].split(',')):\n self.env.log.debug(\"Checking permission %s\" % perm)\n #user has permission no denied\n if perm and perm in req.perm(ticket.resource):\n self.env.log.debug(\"Has %s permission\" % perm)\n editable = True\n \n \n #field is disabled or hidden, cannot be modified by user\n if not editable:\n self.env.log.debug('%s disabled or hidden ' % e)\n #get default ticket state or orginal ticket if being modified\n ot = model.Ticket(self.env, ticket.id)\n original = ot.values.get('%s' % e, None)\n new = ticket.values.get('%s' % e, None)\n self.env.log.debug('OT: %s' % original)\n self.env.log.debug('NEW: %s' % new)\n #field has been modified throw error\n if new != original:\n res.append(('%s' % e, 'Access denied to modifying %s' % e))\n self.env.log.debug('Denied access to: %s' % e)\n \n #check if user has perm to create ticket type\n ticketperm = self.config.get(\"blackmagic\",\"ticket_type.%s\" % ticket[\"type\"],None)\n if not ticketperm:\n ticketperm = None\n if ticketperm is not None and ticketperm not in req.perm:\n self.env.log.debug(\"Ticket validation failed type %s permission %s\"% (ticket[\"type\"], ticketperm))\n res.append(('type', \"Access denied to ticket type %s\" % ticket[\"type\"]))\n \n return res", "def test_validate_ticket_consumed_ticket(self):\n pt = ProxyTicket.objects.create_ticket(service=self.service_url,\n user=self.user,\n granted_by_pgt=self.pgt)\n pt.consume()\n self.assertRaises(InvalidTicket, ProxyTicket.objects.validate_ticket,\n pt.ticket, self.service_url)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ensure admin can't credit negative tickets to a user
def test_credit_ticket_negative_int(self): user = UserFactory() self.assertEqual(user.tickets, 1) nb_tickets_to_add = -5 data = { 'nb_tickets': nb_tickets_to_add, } self.client.force_authenticate(user=self.admin) response = self.client.post( reverse( 'user-credit-tickets', kwargs={'pk': user.id}, ), data, format='json', ) self.assertEqual( response.status_code, status.HTTP_400_BAD_REQUEST, )
[ "def test_credit_ticket_as_admin(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n )\n\n self.assertEqual(\n User.objects.get(pk=user.id).tickets,\n 1 + nb_tickets_to_add\n )", "def test_ticket_update_by_admin(self):\n payload = {\n 'reserved': True\n }\n\n self.client.force_authenticate(self.user_admin)\n response = self.client.put(TICKETS_URL+'1/', payload)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_credit_ticket_as_user(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.user)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_403_FORBIDDEN,\n )", "def test_ticket_create_by_admin(self):\n payload = {\n 'seat': 1,\n 'passenger': 1,\n 'trip': 1\n }\n\n self.client.force_authenticate(self.user_admin)\n response = self.client.post(TICKETS_URL, payload)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_upgradeTicket_notforsale():\n\tassert not testUser3.upgradeTicket(testTicket3, testTicket2)", "def credit_limit_validation(self):\n if self.credit_limit > 0:\n self.over_credit = False", "def test_lock_not_owned(self):\n response = self.app.get('/check/321', headers=self.auth_header(\"test@mail.com\", \"python\"))\n self.assertEqual(403, response.status_code)", "def test_ticket_detail_by_admin(self):\n self.client.force_authenticate(self.user_admin)\n path = TICKETS_URL+'1/'\n response = self.client.get(path)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_create_purchase_insufficient_credit_contender(self):\n data = {\"user_id\": 3, \"product_id\": 3, \"amount\": 4}\n self.assertEqual(User.query.filter_by(id=3).first().rank_id, 1)\n\n res = self.post(url=\"/purchases\", data=data)\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.InsufficientCredit)", "def test_ticket_list_by_admin(self):\n self.client.force_authenticate(self.user_admin)\n response = self.client.get(TICKETS_URL)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def cant(user, action):\n\n return not can(user, action)", "def validate_ticket(self, req, ticket):\n\n res = []\n self.env.log.debug('Validating ticket: %s' % ticket.id)\n\n enchants = self.config.get('blackmagic', 'tweaks', '')\n for e,v in self.enchants.items():\n editable = True\n self.env.log.debug('%s' % v)\n if ticket.values.get(e, None) is not None:\n if v[\"disable\"] or v[\"hide\"]:\n editable = False\n elif v[\"permission\"]!='':\n editable = False\n for perm in (x.strip() for x in v[\"permission\"].split(',')):\n self.env.log.debug(\"Checking permission %s\" % perm)\n #user has permission no denied\n if perm and perm in req.perm(ticket.resource):\n self.env.log.debug(\"Has %s permission\" % perm)\n editable = True\n \n \n #field is disabled or hidden, cannot be modified by user\n if not editable:\n self.env.log.debug('%s disabled or hidden ' % e)\n #get default ticket state or orginal ticket if being modified\n ot = model.Ticket(self.env, ticket.id)\n original = ot.values.get('%s' % e, None)\n new = ticket.values.get('%s' % e, None)\n self.env.log.debug('OT: %s' % original)\n self.env.log.debug('NEW: %s' % new)\n #field has been modified throw error\n if new != original:\n res.append(('%s' % e, 'Access denied to modifying %s' % e))\n self.env.log.debug('Denied access to: %s' % e)\n \n #check if user has perm to create ticket type\n ticketperm = self.config.get(\"blackmagic\",\"ticket_type.%s\" % ticket[\"type\"],None)\n if not ticketperm:\n ticketperm = None\n if ticketperm is not None and ticketperm not in req.perm:\n self.env.log.debug(\"Ticket validation failed type %s permission %s\"% (ticket[\"type\"], ticketperm))\n res.append(('type', \"Access denied to ticket type %s\" % ticket[\"type\"]))\n \n return res", "def test_only_admin_can_change_request_disapprove(self):\n\n resource_disapprove = self.client().put('api/v2/requests/1/approve',\n data=json.dumps(\n dict(status=\"disapproved\"\n )),\n headers=self.headers)\n self.assertEqual(resource_disapprove.status_code, 401)\n data = json.loads(resource_disapprove.data.decode())\n self.assertEqual(data['response'], \"This request is only for an admin\")", "def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()", "def test_user_can_change_not_author(self):\n self.assertFalse(self.asset.user_can_change(self.user2))", "def test_cancelled_ticket_is_held(self):\n pass", "def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False", "def get_everyone_denied(self):", "def test_deny_pending_payment(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Used to generate the beta s for stimulation tests num_part1 + num_part2 = the number of beta s to be nonzero
def gen_beta(self,num_part1=54, num_part2=25,intercept=None): #intercept if intercept is not None: self.intercept = intercept #part1 num_p1_1 = num_part1 // 8 num_p2_2 = num_part1 - num_p1_1 sep_point = int(num_p2_2 * 0.8) tmp1 = np.sin([np.pi/(num_p2_2-6) * i for i in range(1,sep_point)]) * 3 tmp2 = np.ones(num_p1_1) * tmp1[-1] tmp3 = np.sin([np.pi/(num_p2_2-6) * i for i in range(sep_point,num_p2_2-3)]) * 3 tmp4 = np.linspace(tmp3[-1]+0.1,0,4,endpoint=False) part1 = np.concatenate([tmp1,tmp2,tmp3,tmp4]) #part2 num_p2_1 = num_part2 // 2 + 1 num_p2_2 = num_part2 - num_p2_1 slop = 5.5 / num_p2_1 tmp1 = np.array([slop*i for i in range(num_p2_1)]) - 0.2 tmp2 = np.array([5-slop*i for i in range(1,num_p2_2+1)]) - 0.2 part2 = np.concatenate([tmp1,tmp2]) self.beta[1:(len(part1)+1)] = part1 self.beta[(len(part1)+30):(len(part1)+30+len(part2))] = part2 return self.beta
[ "def evo_blanket(self,beta,alpha): \n evo_blanket = np.zeros(self.state_no)\n for i in range(evo_blanket.shape[0]):\n evo_blanket[i] = self.state_likelihood_markov_blanket(beta,alpha,i).sum()\n\n if self.dist in ['t']:\n evo_blanket = np.append([self.m_likelihood_markov_blanket(beta,alpha).sum()]*2,evo_blanket)\n if self.dist in ['skewt']:\n evo_blanket = np.append([self.m_likelihood_markov_blanket(beta,alpha).sum()]*3,evo_blanket) \n elif self.dist in ['Laplace']:\n evo_blanket = np.append([self.m_likelihood_markov_blanket(beta,alpha).sum()],evo_blanket)\n\n return evo_blanket", "def beta_defs(alpha,mu):\n\tbeta = alpha/mu - alpha\n\tval = np.random.beta(alpha, beta)\n\treturn val", "def gibbs_init(self, sigma2_s_param=None, sigma2_g_param=None):\n #Gibbs : Initialization step\n self.gibbs_init_step(self.nb_days, self.nb_particles, sigma2_s_param, sigma2_g_param)\n\n #Gibbs : step t > 0\n for j in range(1, self.nb_particles):\n if(j%(self.nb_particles/10)==0 or j==1):\n print(\"Gibbs sampling for particle \" + str(j) + \"/\" + str(self.nb_particles))\n\n\n self.s[:,j] = self.s[:,j-1]\n self.g_heat[:,j] = self.g_heat[:,j-1]\n self.sigma_s_star_2[:,j] = self.sigma_s_star_2[:,j-1]\n self.sigma_g_star_2[:,j] = self.sigma_g_star_2[:,j-1]\n\n # Compute s[0] for particle j (j>0)\n self.compute_s_0(j)\n\n # Compute s[n] for particle j (n>0 and j>0)\n for i in range(1, self.nb_days):\n self.compute_s(i,j)\n\n # Compute g_heat[O] for particle j (and j>0)\n self.compute_g_0(j)\n\n # Compute g_heat[n] for particle j (n>0 and j>0)\n for i in range(1, self.nb_days):\n self.compute_g(i,j)\n\n shape = 0.01 + ((self.nb_days - 1)/2)\n # Compute the new sigma_s_star2 for particle j (j>0) (follow Inverse Gamma)\n self.sigma_s_star_2[0, j] = self.compute_sigma_star_2(shape, self.s, j)\n\n # Compute the new sigma_g_star2 for particle j (j>0) (follow Inverse Gamma)\n self.sigma_s_star_2[0, j] = self.compute_sigma_star_2(shape, self.g_heat, j)\n\n #Compute x\n self.compute_x()\n #Compute w\n self.compute_w()", "def markov_blanket(self,beta,alpha): \n likelihood_blanket = self.m_likelihood_markov_blanket(beta,alpha)\n state_blanket = self.state_likelihood_markov_blanket(beta,alpha,0)\n for i in range(self.state_no-1):\n likelihood_blanket = np.append(likelihood_blanket,self.m_likelihood_markov_blanket(beta,alpha))\n state_blanket = np.append(state_blanket,self.state_likelihood_markov_blanket(beta,alpha,i+1))\n return likelihood_blanket + state_blanket", "def prior_beta(self, w1=1350, w2=1800, dw=100, sample=None, width_params={'k':-5, 'z_split':4, 'sigma0':20, 'sigma1':0.5, 'center':-1.5}): \n from scipy.stats import norm as normal_distribution\n \n wx = np.arange(-0.7*dw, 0.7*dw)\n wy = wx*0.\n wy[np.abs(wx) <= dw/2.] = 1\n \n f1 = filters_code.FilterDefinition(wave=wx+w1, throughput=wy)\n f2 = filters_code.FilterDefinition(wave=wx+w2, throughput=wy)\n \n y1 = [t.integrate_filter(f1, flam=True) for t in self.templates]\n y2 = [t.integrate_filter(f2, flam=True) for t in self.templates]\n ln_beta_x = np.log([w1, w2])\n beta_y = np.array([y1, y2]).T\n \n if sample is not None:\n fit_beta_y = np.dot(self.fit_coeffs[sample,:,:], beta_y)\n else:\n fit_beta_y = np.dot(self.fit_coeffs, beta_y)\n\n ln_fit_beta_y = np.log(fit_beta_y)\n out_beta_y = (np.squeeze(np.diff(ln_fit_beta_y, axis=2)) / \n np.diff(ln_beta_x)[0])\n \n # Width of beta distribution, logistic\n #k = -5\n wi = {'k':-5, 'z_split':4, 'sigma0':100, 'sigma1':1, 'center':-1.5}\n for k in width_params:\n wi[k] = width_params[k]\n \n sigma_z = 1./(1+np.exp(-wi['k']*(self.zgrid - wi['z_split']))) \n sigma_z = sigma_z*wi['sigma0'] + wi['sigma1']\n \n p_beta = np.zeros_like(out_beta_y)\n for i in range(self.NZ):\n n_i = normal_distribution(loc=wi['center'], scale=sigma_z[i])\n p_beta[:,i] = (1 - n_i.cdf(out_beta_y[:,i]))\n \n return p_beta", "def sample_utility(n, model, alpha, beta, bmax):\n A, b = matrix(0.0, (n,n)), matrix(ra.uniform(0,bmax,(n,1)))\n \n if model == 1: A = matrix(ra.uniform(0,beta,(n,n)))\n \n if model == 2:\n for i in range(n):\n for j in range(n/2):\n A[i, int(np.mod(i+j+1,n))] = beta**(j+1)\n A[i, int(np.mod(i-(j+1),n))] = beta**(j+1)\n \n if model == 3: A = 0.5*matrix(ra.binomial(1,beta,(n,n)))\n \n for i in range(n): A[i,i] = 1.0\n \n return Utility((alpha*A,b), 'sqrt')", "def sn(n, sk, fk):\n bd = beta(sk+1, fk+1) # create beta distribution\n assert n >= 0\n p = bd.rvs() # draw p from beta distribution\n sn = int(p*(n+1))\n if sn>n: # could only happen if p = 1.000 !\n sn = n\n fn = n - sn\n return (sn, fn)", "def test_negative_beta_p_lessthan_half(self):\n auc = 0.2\n for _ in range(self.Nreps):\n self._simulate_and_fit(auc=auc)\n self.assertTrue(self.cl.get_beta(self.N) < 0)", "def backwardVariableGeneration(self):\n self.beta = zeros((self.noOfEmmittingStates+2, self.T + 1))\n\n # initialisation\n for j in range(self.noOfEmmittingStates+1):\n self.beta[j,-1] = self.transitionMatrix[j,-1]\n self.beta[-1,-1] = 1.0\n\n # main recursion\n for t in range(self.T, 1, -1):\n for j in range(self.noOfEmmittingStates, 0, -1):\n partialSum = 0\n for k in range(1, self.noOfEmmittingStates+1):\n partialSum += (self.transitionMatrix[j,k-1] * self.b[k-1,t-1] * self.beta[k,t])\n self.beta[j,t-1] = partialSum\n\n # first column\n partialSum = 0\n for k in range(1, self.noOfEmmittingStates+1):\n partialSum += (self.transitionMatrix[0,k-1] * self.b[k-1,0] * self.beta[k,1])\n self.beta[0,0] = partialSum\n\n # likelihood of observed sequence, p(O|lambda)\n self.observationLikelihood = self.alpha[-1,-1]", "def _sedov_calc_beta(v, gamma, nu):\n\n beta = (nu + 2.0) * (gamma + 1.0) * np.array(\n (0.25, (gamma / (gamma - 1)) * 0.5, -(2.0 + nu * (gamma - 1.0)) / 2.0 /\n ((nu + 2.0) * (gamma + 1.0) - 2.0 *\n (2.0 + nu * (gamma - 1.0))), -0.5 / (gamma - 1.0)))\n\n beta = np.outer(beta, v)\n\n beta += (gamma + 1.0) * np.array(\n (0.0, -1.0 / (gamma - 1.0), (nu + 2.0) /\n ((nu + 2.0) * (gamma + 1.0) - 2.0 *\n (2.0 + nu * (gamma - 1.0))), 1.0 / (gamma - 1.0))).reshape((4, 1))\n\n return beta", "def test_negative_beta_p_greaterthan_half(self):\n auc = 0.2\n prevalence = 0.8\n for _ in range(self.Nreps):\n self._simulate_and_fit(auc=auc, prevalence=prevalence)\n self.assertTrue(self.cl.get_beta(self.N) < 0)", "def beta_ind(num_components):\n return temperature_ind(num_components)+1", "def beta_analysis(self,stream=None):\r\n\r\n def _beta_analysis1(stream=None):\r\n \"\"\"private function called by beta_analysis()\"\"\"\r\n if stream is None:\r\n stream=sys.stdout\r\n q2 = []\r\n for i in range(1,17):\r\n q_copy=copy.copy(self)\r\n q_copy.beta=2**(i/4.0)\r\n q_copy.dim=250\r\n q_copy.grain=0.02\r\n q_copy.recompute()\r\n q2.append(q_copy)\r\n na = num.array # shorthand\r\n t2 = na([q2i.mean() for q2i in q2])\r\n p2 = na([q2i.pdf_at(t2i) for q2i,t2i in zip(q2,t2)])\r\n sd2 = na([q2i.sd() for q2i in q2])\r\n beta2 = na([q2i.beta for q2i in q2])\r\n i=num.argsort(p2)[-1]\r\n t=t2[i]\r\n sd=q2[i].sd()\r\n p=num.sum(p2)\r\n betaMean=num.sum(p2*beta2)/p\r\n betaSd=math.sqrt(num.sum(p2*beta2**2)/p-(num.sum(p2*beta2)/p)**2)\r\n iBetaMean=num.sum(p2/beta2)/p\r\n iBetaSd=math.sqrt(num.sum(p2/beta2**2)/p-(num.sum(p2/beta2)/p)**2)\r\n stream.write('%5.2f\t%5.2f\t%4.1f\t%4.1f\t%6.3f\\n'%(t,sd,1/iBetaMean,betaSd,self.gamma))\r\n print 'Now re-analyzing with beta as a free parameter. . . .'\r\n if stream is None:\r\n stream=sys.stdout\r\n stream.write('logC \t sd \t beta\t sd\t gamma\\n');\r\n _beta_analysis1(stream)", "def __init__(self, n, sents, corpus='', beta=None, addone=True):\n self.n = n\n self.beta = beta\n self.corpus = corpus\n self.beta_flag = True\n self.addone = addone\n self.smoothingtechnique = 'Back Off (Katz) with Discounting Smoothing'\n self.counts = counts = defaultdict(int)\n self.A_set = defaultdict(set)\n voc = ['</s>']\n for s in sents:\n voc += s\n self.voc = set(voc)\n if beta is None:\n self.beta_flag = False\n\n # if no beta given, we compute it\n if not self.beta_flag:\n total_sents = len(sents)\n aux = int(total_sents * 90 / 100)\n # 90 per cent por training\n train_sents = sents[:aux]\n # 10 per cent for perplexity (held out data)\n held_out_sents = sents[-total_sents+aux:]\n\n train_sents = list(map((lambda x: ['<s>']*(n-1) + x), train_sents))\n train_sents = list(map((lambda x: x + ['</s>']), train_sents))\n for sent in train_sents:\n for j in range(n+1):\n for i in range(n-j, len(sent) - j + 1):\n ngram = tuple(sent[i: i + j])\n counts[ngram] += 1\n # for efficiency, we save the A set as a dict of sets\n if j:\n self.A_set[ngram[:-1]].add(ngram[-1])\n for i in range(1, n):\n counts[('<s>',)*i] += len(train_sents)\n counts[('</s>',)] = len(train_sents)\n\n self.tocounts = counts\n # search for the beta that gives lower perplexity\n beta_candidates = [i*0.1 for i in range(1, 10)]\n # xs is a list with (beta, perplexity)\n xs = []\n self.sents = train_sents\n for aux_beta in beta_candidates:\n self.beta = aux_beta\n aux_perx = self.perplexity(held_out_sents)\n xs.append((aux_beta, aux_perx))\n xs.sort(key=lambda x: x[1])\n self.beta = xs[0][0]\n with open('old-stuff/backoff_'+str(n)+'_parameters_'+corpus, 'a') as f:\n f.write('Order: {}\\n'.format(self.n))\n f.write('Beta: {}\\n'.format(self.beta))\n f.write('AddOne: {}\\n'.format(self.addone))\n f.write('Perplexity observed: {}\\n'.format(xs[0][1]))\n f.write('-------------------------------\\n')\n f.close()\n else:\n sents = list(map((lambda x: x + ['</s>']), sents))\n sents = list(map((lambda x: ['<s>']*(n-1) + x), sents))\n\n for sent in sents:\n for j in range(n+1):\n for i in range(n-j, len(sent) - j + 1):\n ngram = tuple(sent[i: i + j])\n counts[ngram] += 1\n # for efficiency, we save the A set as a dict of sets\n if j:\n self.A_set[ngram[:-1]].add(ngram[-1])\n for i in range(1, n):\n counts[('<s>',)*i] += len(sents)\n counts[('</s>',)] = len(sents)", "def test_prop_beta(self):\n # reproducible arbitrariness\n np.random.seed(1321)\n\n self.rule.alpha = 0\n self.rule.beta = 0.5\n\n self.conductor.out_step = np.random.randn(self.Nc)\n self.tutor.out_step = np.random.randn(self.Ns)\n\n factor = 1.5\n tmax = 7*self.dt\n\n W0 = np.copy(self.syns.W)\n\n sim = simulation.Simulation(self.conductor, self.student, self.tutor,\n self.syns, self.rule, dt=self.dt)\n sim.run(tmax)\n\n change1 = self.syns.W - W0\n\n self.syns.W = np.copy(W0)\n self.rule.beta *= factor\n sim.run(tmax)\n\n change2 = self.syns.W - W0\n\n self.assertTrue(np.allclose(change2, factor*change1))", "def nbeta(self) -> int:\n return self._core.nbeta()", "def betafix(self, ignore_exception=True, ts_beta_zero=9):\n for source in self.free_sources:\n print '----------------- %s (%.1f)-------------' % (source.name, source.ts)\n t=source.ts_beta = self.ts_beta(source.name, ignore_exception=ignore_exception)\n if t is None: continue\n print 'ts_beta ', t,\n if ts_beta_zero is None: \n print ' -- not checking'\n continue\n changed=True\n powerlaw = not source.model.free[2]\n if t<ts_beta_zero and not powerlaw:\n self.freeze('beta', source.name, 0.)\n print '--> PowerLaw'\n elif t>ts_beta_zero and powerlaw:\n self.thaw('beta', source.name)\n print '--> LogParabola'\n else:\n print ': OK'\n changed=False\n if changed:\n self.fit(source.name)\n self.fit(ignore_exception=ignore_exception) # seems necessary\n return False # nofollowup", "def get_MB_velocities(n_part, T):\n\n var_xy = kb_J*T/m # Define the variance of the distribution in the x,y planes\n var_z = 2*kb_J*T/m # Variance in z plane is twice that in the horizontal\n var_vs = np.asarray([var_xy,var_xy,var_z])\n mean = [0,0,0] # Each distribution has a native mean of 0.\n cov = np.multiply(var_vs,np.identity(3)) # distributions are assumed to be independent\n # mean_vz = np.sqrt(2*var/np.pi) # compute this from all-positive component of distribution\n\n # Additional values are computed so that tuples with negative vz can be discarded\n flag_array_full = False\n while not flag_array_full:\n output = np.random.multivariate_normal(mean,cov,int(np.round(n_part*3)))\n pos_output = output[np.where(output[:,2] > 0.)[0]]\n if pos_output.shape[0] >= n_part:\n flag_array_full = True\n\n return pos_output[:n_part]", "def sbootpars(Taus,Vs):\n#\n Tau1s,Tau2s,Tau3s=[],[],[]\n V1s,V2s,V3s=[],[],[]\n nb=len(Taus)\n bpars={}\n for k in range(nb):\n Tau1s.append(Taus[k][0])\n Tau2s.append(Taus[k][1])\n Tau3s.append(Taus[k][2])\n V1s.append(Vs[k][0])\n V2s.append(Vs[k][1])\n V3s.append(Vs[k][2])\n x,sig=gausspars(Tau1s) \n bpars[\"t1_sigma\"]=sig\n x,sig=gausspars(Tau2s) \n bpars[\"t2_sigma\"]=sig\n x,sig=gausspars(Tau3s) \n bpars[\"t3_sigma\"]=sig\n kpars=dokent(V1s,len(V1s))\n bpars[\"v1_dec\"]=kpars[\"dec\"]\n bpars[\"v1_inc\"]=kpars[\"inc\"]\n bpars[\"v1_zeta\"]=kpars[\"Zeta\"]*numpy.sqrt(nb)\n bpars[\"v1_eta\"]=kpars[\"Eta\"]*numpy.sqrt(nb)\n bpars[\"v1_zeta_dec\"]=kpars[\"Zdec\"]\n bpars[\"v1_zeta_inc\"]=kpars[\"Zinc\"]\n bpars[\"v1_eta_dec\"]=kpars[\"Edec\"]\n bpars[\"v1_eta_inc\"]=kpars[\"Einc\"]\n kpars=dokent(V2s,len(V2s))\n bpars[\"v2_dec\"]=kpars[\"dec\"]\n bpars[\"v2_inc\"]=kpars[\"inc\"]\n bpars[\"v2_zeta\"]=kpars[\"Zeta\"]*numpy.sqrt(nb)\n bpars[\"v2_eta\"]=kpars[\"Eta\"]*numpy.sqrt(nb)\n bpars[\"v2_zeta_dec\"]=kpars[\"Zdec\"]\n bpars[\"v2_zeta_inc\"]=kpars[\"Zinc\"]\n bpars[\"v2_eta_dec\"]=kpars[\"Edec\"]\n bpars[\"v2_eta_inc\"]=kpars[\"Einc\"]\n kpars=dokent(V3s,len(V3s))\n bpars[\"v3_dec\"]=kpars[\"dec\"]\n bpars[\"v3_inc\"]=kpars[\"inc\"]\n bpars[\"v3_zeta\"]=kpars[\"Zeta\"]*numpy.sqrt(nb)\n bpars[\"v3_eta\"]=kpars[\"Eta\"]*numpy.sqrt(nb)\n bpars[\"v3_zeta_dec\"]=kpars[\"Zdec\"]\n bpars[\"v3_zeta_inc\"]=kpars[\"Zinc\"]\n bpars[\"v3_eta_dec\"]=kpars[\"Edec\"]\n bpars[\"v3_eta_inc\"]=kpars[\"Einc\"]\n return bpars" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a value in a nested associative structure, where `ks` is a sequence of keys. Returns `None`, if the key is not present, or the `default` value, if supplied.
def get_in(d, ks, default=None): *ks_, last = ks d_ = d for k in ks_: if type(d_) != dict or k not in d_: return default d_ = d_[k] if type(d_) == dict: return d_.get(last, default) return default
[ "def get(\n self, k: SeqStrType, default: Optional[T] = None\n ) -> Union[T, \"NestedDict[T]\"]:\n k = _flatten_index(k)\n\n if k not in self:\n if default is not None:\n return default\n else:\n raise KeyError(k)\n\n data_ptr = self._data\n for key in k:\n # This is to avoid the recursion on __getitem__\n if isinstance(data_ptr, NestedDict):\n data_ptr = data_ptr._data\n data_ptr = data_ptr[key]\n return data_ptr", "def _get_default(ddict, key, default):\n if ddict is None or key not in ddict or ddict[key] is None:\n return default\n return ddict[key]", "def get(dd, kk, default=0):\n if kk in dd.keys():\n return dd[kk]\n else:\n return default", "def fetch_default(params, key):\n for container in params:\n if container['id'] == key:\n if 'default' in container:\n return container['default']\n return None", "def getkey(d,key,default=None):\n if key in d:\n return d[key]\n else:\n return default", "def get(self, key_path, default=None):\n def _get(attr, key):\n if key in attr:\n ret = attr[key]\n if isinstance(ret, list):\n return list(ret)\n elif isintance(ret, dict):\n return ret.copy()\n else:\n return ret\n else:\n raise KeyError(\"Cannot found {0} in object\".format(key_path))\n\n try:\n ret = self._key_apply(key_path, _get)\n except KeyError as e:\n if default is not None:\n return default\n else:\n raise e\n else:\n return ret", "def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]", "def get_optional_key(json, key, default_value=None):\n try:\n return json[key]\n except KeyError:\n return default_value", "def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value", "def peek(s, k=0, **kw):\n if not isinstance(s, dict):\n # try to index into the container\n try:\n return s[k]\n except (KeyError, IndexError, TypeError):\n pass\n # try iterating through the container\n for (i, x) in enumerate(s):\n if i == k:\n return x\n try:\n return kw['default']\n except KeyError:\n pass\n raise ValueError(str.format(\"invalid index {k}\", k=k))", "def get_default(section, option=\"\"):\n\tif not option:\n\t\tif defaults.has_key(section):\n\t\t\treturn defaults[section]\n\telse:\n\t\tif defaults.has_key(section):\n\t\t\tif defaults[section].has_key(option):\n\t\t\t\treturn defaults[section][option]\n\treturn None", "def _dict_value(dictnary: Dict, key: str, default):\n if key in dictnary:\n return dictnary[key]\n else:\n return default", "def get(self, key, default=None):\n try:\n return self[key]\n except KeyError:\n return default", "def get_or_default(mapping, key, default=None):\n try:\n values = mapping[key]\n except KeyError:\n return default\n return values[0]", "def get(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n return default", "def get(self, key: str, default: t.Any = None) -> t.Any:\n if _guards.is_optional(self.data, list):\n return default\n # NB: `default` is provided as a positional because the native dict type\n # doesn't recognize a keyword argument `default`\n return self.data.get(key, default)", "def _nested_defaultdict(depth: int) -> Union[dict, defaultdict]:\n if depth < 0:\n raise ValueError('depth must be a nonnegative int')\n\n if not depth:\n return dict\n else:\n return defaultdict(lambda: _nested_defaultdict(depth-1))", "def getfirst(self, key, default=None):\n \n values = self.getlist(key)\n return values[0] if values else default", "def get_value_or_default(self, key: Union[str, ConfigurationVariable], default_value: Any) -> Any:\n try:\n return self.get_value(key)\n except Undefined as e:\n logger.debug(e)\n return default_value" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Associates a value in a nested associative structure, where `ks` is a sequence of keys and `v` is the new value, and returns a nested structure. If any levels do not exist, `dict`s will be created.
def assoc_in(d, ks, v): *ks_, last = ks d_ = d for k in ks_: if k not in d_: d_[k] = {} d_ = d_[k] d_[last] = v return d
[ "def createNestedDict(self, myDict, value, *path):\n for level in path[:-1]:\n myDict = myDict.setdefault(level, {})\n #for level -ends\n dict[path[-1]]=value\n return myDict", "def _set_nested(self, d, keys, value):\n if len(keys) > 1 and isinstance(d, dict):\n if dict(d).__contains__(keys[0]) and isinstance(d[keys[0]], dict):\n return self._set_nested(d[keys[0]], keys[1:], value)\n else:\n d[keys[0]] = {}\n return self._set_nested(d[keys[0]], keys[1:], value)\n else:\n dict.__setitem__(d, keys[0], value)", "def update_nested_dictionary(d, u: Mapping) -> dict:\n for k, v in u.items():\n # Check whether the value 'v' is a dictionary. In this case,\n # update_nested_dictionary is called again recursively. The\n # subdictionary d[k] will be updated with v.\n if isinstance(v, Mapping):\n d[k] = update_nested_dictionary(d.get(k, {}), v)\n else:\n d[k] = v\n return d", "def __setitem__(self, key, value): \n key, leaf = self.__build_sequence(self, key) \n super(NestedDict, leaf).__setitem__(key, value)", "def createDict(given_dict, words, value):\n\tresult_dict = given_dict\n\t# base case: if list is empty, add the value to the dict\n\tif not words:\n\t\tif '$value' in result_dict:\n\t\t\tresult_dict['$value'].append(value)\n\t\telse:\n\t\t\tresult_dict['$value'] = [value]\n\telse:\n\t\t# if the first word is already in dict, traverse through treemap with that word\n\t\t# call createDict with the tail of the words list\n\t\tif words[0] in result_dict:\n\t\t\tresult_dict[words[0]] = createDict(result_dict[words[0]], words[1:], value)\n\t\telse:\n\t\t\t# if the first word is not in the dict, create a new path\n\t\t\t# call createDict with the tail of the words list\n\t\t\tresult_dict[words[0]] = createDict({}, words[1:], value)\n\n\treturn result_dict", "def assoc(_d, key, value):\n d = deepcopy(_d)\n d[key] = value\n return d", "def recursive_mapping_update(d, u):\n if u is not None:\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = recursive_mapping_update(d.get(k, {}), v)\n d[k] = r\n else:\n d[k] = u[k]\n return d", "def flatten(d: MutableMapping, sep: str = \".\", parent_key: str = \"\") -> dict:\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)", "def dict_deep_update(d, u, handlers=None):\n if handlers is None:\n handlers = {}\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = dict_deep_update(d.get(k, {}), v, handlers)\n d[k] = r\n elif k in d:\n h = handlers.get(type(v), None)\n if h is not None:\n d[k] = h(d[k], u[k])\n else:\n d[k] = u[k]\n else:\n d[k] = u[k]\n return d", "def assign(self, k, v):\n if self.dict == []:\n self.dict.append([k, v])\n else:\n for tup in self.dict:\n if tup[0] == k:\n tup[1] = v\n else:\n self.dict.append([k, v])", "def nested_dict():\n return defaultdict(nested_dict)", "def nest_dict(dct, keys):\n nested_dict = dct\n for key in reversed(keys):\n nested_dict = RecursiveDict({key: nested_dict})\n return nested_dict", "def create_level(dict, path_list, value):\n\tif len(path_list) == 0:\n\t\treturn\n\n\tfor k in path_list[:-1]:\n\t\tdict = dict[k]\n\t\n\tdict[path_list[-1]] = value", "def add_or_append_dict_entry(main_dict, main_key, sub_key, value):\n # type: (dict, str, str, Any) -> dict\n if main_key not in main_dict:\n main_dict[main_key] = dict()\n if sub_key not in main_dict[main_key]:\n main_dict[main_key][sub_key] = [value]\n else:\n main_dict[main_key][sub_key].append(value)\n return main_dict", "def set_nested_item(data_dict: dict, key_list: tuple or list, value):\r\n reduce(getitem, key_list[:-1], data_dict)[key_list[-1]] = value\r\n return data_dict", "def deep_merge(d, u):\n stack = [(d, u)]\n while stack:\n d, u = stack.pop(0)\n for k, v in u.items():\n if not isinstance(v, collections.abc.Mapping):\n d[k] = v\n else:\n dv = d.setdefault(k, {})\n if not isinstance(dv, collections.abc.Mapping):\n d[k] = v\n else:\n stack.append((dv, v))", "def assoc(d, key, val):\n d = d.copy()\n d[key] = val\n return d", "def _nested_defaultdict(depth: int) -> Union[dict, defaultdict]:\n if depth < 0:\n raise ValueError('depth must be a nonnegative int')\n\n if not depth:\n return dict\n else:\n return defaultdict(lambda: _nested_defaultdict(depth-1))", "def dict_update_nested(p, q, max_nest=100000):\n if max_nest <= 0:\n dict.update(p, q)\n else:\n for (k,v) in dict.iteritems(q):\n if isinstance(v, dict):\n if (k not in p) or not isinstance(p[k], dict):\n p[k] = {}\n dict_update_nested(p[k], q[k], max_nest-1)\n else:\n p[k] = v" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Print a `middleware_name` with a right arrow if `_VERBOSE_MODE` is on.
def _print_inwards(middleware_name): if _VERBOSE_MODE: print('{}--->'.format(middleware_name))
[ "def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))", "def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))", "def print_debug(message):\n if current_app.debug:\n print(message)", "def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)", "def do_debug(self, line):\n fields = line.strip().split()\n n = len(fields)\n if n == 0 :\n self.help_debug()\n else:\n try:\n (opt, path, args) = self._debug_parse_args(fields)\n except ArgParseError:\n self.help_debug()\n return \n if path not in self.router.app_path:\n print \"invalid job name : %s\" % path\n print \"use \\\"ls\\\" to see all job name \"\n return \n # def route(self, func, mixed_args=[], mode=\"normal\", opt={}):\n self.router.route(path, args, \"debug\", opt)", "def vprint(string):\n if options.verbose:\n print(string)", "def debug(string):\n if verbose:\n print string\n return", "def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return", "def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)", "def vprint(string):\n global verbose\n if verbose:\n print(string)", "def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return", "def get_verbose_prefix():\n s = inspect.stack()\n module_name = inspect.getmodulename(s[1][1])\n func_name = s[1][3]\n return '%s->%s' % (module_name, func_name)", "def verbose_print(verbose, print_function=None):\n\n if verbose:\n return print_function or print\n else:\n def vprint(*args, **kwars):\n pass\n return vprint", "def show_details(name, f):\n print '%s:' % name\n print '\\tobject:', f\n print '\\t__name__:', \n try:\n print f.__name__\n except AttributeError:\n print '(no __name__)'\n print '\\t__doc__', repr(f.__doc__)\n print\n return", "def debug_print(text):\n if settings.debug:\n print(text)", "def trace(msg):\n if debug_level >= 2:\n print msg", "def pretty_print_default(enable=True):\n import sys\n if enable:\n sys.displayhook = pretty_print\n else:\n sys.displayhook = sys.__displayhook__", "def _print(self) -> None:\n if self.verb >= 3:\n print(self)", "def print_if_verbose(msg):\n if verbose:\n print(msg)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Print a `middleware_name` with a left arrow if `_VERBOSE_MODE` is on.
def _print_outwards(middleware_name): if _VERBOSE_MODE: print('<---{}'.format(middleware_name))
[ "def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))", "def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))", "def get_verbose_prefix():\n s = inspect.stack()\n module_name = inspect.getmodulename(s[1][1])\n func_name = s[1][3]\n return '%s->%s' % (module_name, func_name)", "def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return", "def pretty_print_default(enable=True):\n import sys\n if enable:\n sys.displayhook = pretty_print\n else:\n sys.displayhook = sys.__displayhook__", "def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)", "def print_debug(message):\n if current_app.debug:\n print(message)", "def do_debug(self, line):\n fields = line.strip().split()\n n = len(fields)\n if n == 0 :\n self.help_debug()\n else:\n try:\n (opt, path, args) = self._debug_parse_args(fields)\n except ArgParseError:\n self.help_debug()\n return \n if path not in self.router.app_path:\n print \"invalid job name : %s\" % path\n print \"use \\\"ls\\\" to see all job name \"\n return \n # def route(self, func, mixed_args=[], mode=\"normal\", opt={}):\n self.router.route(path, args, \"debug\", opt)", "def debug(string):\n if verbose:\n print string\n return", "def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)", "def debugargs(prefix='***'):\n def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print(prefix + ': ' + func.__qualname__)\n return func(*args, **kwargs)\n return wrapper\n return debug", "def trace(msg):\n if debug_level >= 2:\n print msg", "def vprint(string):\n if options.verbose:\n print(string)", "def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return", "def vprint(string):\n global verbose\n if verbose:\n print(string)", "def do_show(self, line):\n if line == \"director\" or line == \"firewall\":\n configkey = line + \"_config\"\n if not self.config[configkey]:\n print(\"[ERROR] '\" + configkey + \"' not defined in \" +\n \"configuration file!\")\n else:\n lines = utils.print_file(self.config[configkey])\n utils.pager(lines)\n else:\n print \"syntax: show <module>\"", "def debug_print(text):\n if settings.debug:\n print(text)", "def debug(data):\n for token in self.lexer.tokens():\n print(token)", "def create_print(prefix):\n def inner(*args):\n print prefix + str(args)\n return inner" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function is used to decorate generators with exactly two `yield` statements and turn them into middleware. For examples see documentation to this module and tests. Extra arguments beyond name are passed to the generator that is being decorated during instantiation. If they are not defined during interpretation of this module, then this function can be used as a regular callable and not as an annotation.
def middleware(name, *args, **kwargs): def new_annotate(g_fn): def new_middleware(handler): def new_handler(ctx): _print_inwards(name) g = g_fn(ctx, *args, **kwargs) changed_ctx = next(g) new_ctx = handler(changed_ctx) last_ctx = g.send(new_ctx) _print_outwards(name) return last_ctx return new_handler return new_middleware return new_annotate
[ "def consumer(func):\n\n from functools import wraps\n\n @wraps(func)\n def wrapper(*args,**kw):\n gen = func(*args, **kw)\n gen.next()\n return gen\n return wrapper", "def writer_wrapper_2(coroutine):\n yield from coroutine", "def with_outer(*args):\n def generator():\n for i in args:\n yield i\n return generator", "def construct_result_generator_middleware(result_generators):\n def result_generator_middleware(make_request, web3):\n def middleware(method, params):\n if method in result_generators:\n result = result_generators[method](method, params)\n return {'result': result}\n else:\n return make_request(method, params)\n return middleware\n return result_generator_middleware", "def inner(*args, **kwargs):\n gen = g(*args, **kwargs)\n next(gen)\n return gen", "def instance_generator(session: requests.Session, instance, generator):\n for index, arg in enumerate(generator):\n if type(arg) is tuple:\n with protection(instance.__name__, arg[0]):\n yield instance(session, *arg)\n else:\n with protection(instance.__name__, arg):\n yield instance(session, arg)", "def test_decorated(*args):\n for i in args:\n yield i", "def get_generator(generator: Generator, **kwargs) -> Generator:\n return generator(**kwargs)", "def _wrap_in_generator(func, source, namer, overload):\n\n nonlocals = []\n\n for var in six.get_function_code(func).co_freevars:\n # We must generate dummy vars so the generated function has the same closure\n # as the original function.\n free_template = 'var = None'\n nonlocal_node = templates.replace(free_template, var=var)\n nonlocals.extend(nonlocal_node)\n\n gen_fun_name = namer.new_symbol('gen_fun', set())\n template = \"\"\"\n def gen_fun(overload):\n nonlocals\n\n program\n\n return f_name\n \"\"\"\n\n ret = templates.replace(\n template,\n gen_fun=gen_fun_name,\n nonlocals=nonlocals,\n overload=overload.symbol_name,\n program=source,\n f_name=func.__name__)\n\n converted_module, _ = parsing.ast_to_object(ret)\n outer_func = getattr(converted_module, gen_fun_name)\n return outer_func(overload.module)", "def test_middleware_that_yields_too_much():\n\n def chatty_middleware(context):\n \"\"\"\n Middleware should only yield once\n \"\"\"\n yield\n yield\n\n data_manager = DataManager()\n data_manager.register_context_middleware(chatty_middleware)\n\n with pytest.raises(RuntimeError):\n with data_manager.dal():\n pass", "def test_middleware_that_does_not_yield():\n\n def bogus_middleware(context):\n \"\"\"\n This is a valid generator function because of the ``yield``\n but the ``yield`` will never occur so this is invalid middleware.\n Middleware must ``yield`` once to be valid.\n \"\"\"\n if False:\n yield\n\n data_manager = DataManager()\n data_manager.register_services(test=StateTestService())\n data_manager.register_context_middleware(bogus_middleware)\n\n with pytest.raises(MiddlewareSetupException):\n with data_manager.dal():\n pytest.fail(\"Context should not have continued\")", "def simple_generator():\n yield 'horse'\n # just going to do it...\n yield 'cow'\n yield 'mouse'", "def generator(func):\n\n @fn\n @wraps(func)\n def gen(*args, **kwargs):\n return Iter(func(*args, **kwargs))\n\n return gen", "def _wrap_generator(self, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n cls = type(self)\n try:\n # Issuing `None` to a generator fires it up\n with cls():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with cls():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with cls():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with cls():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context", "def _wrap_generator(ctx_factory, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n try:\n # Issuing `None` to a generator fires it up\n with ctx_factory():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with ctx_factory():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with ctx_factory():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with ctx_factory():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context", "def register_generator(name, maker):\n name = name.strip().upper()\n GENERATORS[name] = maker\n GENERATORS_ORDERED.append(name)", "def register_features_generator(features_generator_name: str) -> Callable[[FeaturesGenerator], FeaturesGenerator]:\r\n def decorator(features_generator: FeaturesGenerator) -> FeaturesGenerator:\r\n FEATURES_GENERATOR_REGISTRY[features_generator_name] = features_generator\r\n return features_generator\r\n\r\n return decorator", "def test_nested_yield():\n yield (yield (yield 1))", "def dummy_context(arg):\n yield arg" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function layers `middleware` left to right around the `handler` and calls it all with `ctx` as an argument. Setting `verbose` to `True` prints when handlers start their before and after sections.
def wrap_and_call(ctx, handler, *middleware, verbose=False): global _VERBOSE_MODE _VERBOSE_MODE = verbose middleware_ = list(middleware) return compose(*reversed(middleware_))(handler)(ctx)
[ "def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))", "def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))", "def middleware(f):\n @wraps(f)\n def outer(*args):\n def inner(next_dispatch):\n def inner_most(action):\n f(*args, next_dispatch, action)\n return inner_most\n return inner\n return outer", "def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate", "def middleware_chain(self, req, res):\n for mw in self.http_application.middleware_chain(req):\n try:\n if asyncio.iscoroutine(mw):\n print(\"Running middleware coroutine:\", mw)\n self.loop.run_until_complete(mw(req, res))\n else:\n print(\"Running middleware:\", mw)\n mw(req, res)\n print(\" -> DONE\")\n except Exception as error:\n print(\" -> EXCEPTION OCCURED\", error)\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_tb(exc_traceback, limit=5, file=sys.stdout)\n # print(inspect.stack())\n for handler in self.http_application.next_error_handler(req):\n handler(req, res, error)\n return\n\n if not res.has_ended:\n raise HTTPErrorInternalServerError", "def wrap_in_middleware(app, global_conf, application_stack, **local_conf):\n stack = application_stack\n # Merge the global and local configurations\n conf = global_conf.copy()\n conf.update(local_conf)\n debug = asbool(conf.get('debug', False))\n # First put into place httpexceptions, which must be most closely\n # wrapped around the application (it can interact poorly with\n # other middleware):\n app = wrap_if_allowed(app, stack, httpexceptions.make_middleware, name='paste.httpexceptions', args=(conf,))\n # The recursive middleware allows for including requests in other\n # requests or forwarding of requests, all on the server side.\n if asbool(conf.get('use_recursive', True)):\n from paste import recursive\n app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))\n # Various debug middleware that can only be turned on if the debug\n # flag is set, either because they are insecure or greatly hurt\n # performance\n if debug:\n # Middleware to check for WSGI compliance\n if asbool(conf.get('use_lint', True)):\n from paste import lint\n app = wrap_if_allowed(app, stack, lint.make_middleware, name='paste.lint', args=(conf,))\n # Middleware to run the python profiler on each request\n if asbool(conf.get('use_profile', False)):\n import profile\n app = wrap_if_allowed(app, stack, profile.ProfileMiddleware, args=(conf,))\n # Middleware that intercepts print statements and shows them on the\n # returned page\n if asbool(conf.get('use_printdebug', True)):\n from paste.debug import prints\n app = wrap_if_allowed(app, stack, prints.PrintDebugMiddleware, args=(conf,))\n if debug and asbool(conf.get('use_interactive', False)):\n # Interactive exception debugging, scary dangerous if publicly\n # accessible, if not enabled we'll use the regular error printing\n # middleware.\n try:\n from weberror import evalexception\n app = wrap_if_allowed_or_fail(app, stack, evalexception.EvalException,\n args=(conf,),\n kwargs=dict(templating_formatters=build_template_error_formatters()))\n except MiddlewareWrapUnsupported as exc:\n log.warning(str(exc))\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n else:\n # Not in interactive debug mode, just use the regular error middleware\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n # Transaction logging (apache access.log style)\n if asbool(conf.get('use_translogger', True)):\n from paste.translogger import TransLogger\n app = wrap_if_allowed(app, stack, TransLogger)\n # X-Forwarded-Host handling\n from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware\n app = wrap_if_allowed(app, stack, XForwardedHostMiddleware)\n return app", "def middleware(request):\n with tracer.start_as_current_span(\n \"test\", kind=trace.SpanKind.SERVER\n ):\n app(request)", "def bogus_middleware(context):\n if False:\n yield", "def _inject_trace_middleware_to_args(trace_middleware, args, kwargs):\n # type: (Callable, Tuple, Dict) -> Tuple[Tuple, Dict]\n middlewares_arg = 8\n if _graphql_version >= (3, 2):\n # middleware is the 10th argument graphql.execute(..) version 3.2+\n middlewares_arg = 9\n\n # get middlewares from args or kwargs\n try:\n middlewares = get_argument_value(args, kwargs, middlewares_arg, \"middleware\") or []\n if isinstance(middlewares, MiddlewareManager):\n # First we must get the middlewares iterable from the MiddlewareManager then append\n # trace_middleware. For the trace_middleware to be called a new MiddlewareManager will\n # need to initialized. This is handled in graphql.execute():\n # https://github.com/graphql-python/graphql-core/blob/v3.2.1/src/graphql/execution/execute.py#L254\n middlewares = middlewares.middlewares # type: Iterable\n except ArgumentError:\n middlewares = []\n\n # Note - graphql middlewares are called in reverse order\n # add trace_middleware to the end of the list to wrap the execution of resolver and all middlewares\n middlewares = list(middlewares) + [trace_middleware]\n\n # update args and kwargs to contain trace_middleware\n args, kwargs = set_argument_value(args, kwargs, middlewares_arg, \"middleware\", middlewares)\n return args, kwargs", "async def raven_middleware(app, handler):\n async def middleware_handler(request):\n try:\n return await handler(request)\n except aiohttp.web.HTTPClientError:\n # Do not capture client errors\n raise\n except Exception:\n raven_client.captureException()\n raise\n return middleware_handler", "async def run_middleware(request: Request, call_next):\n response = await call_next(request)\n return response", "def instrument_flask():\n oc_trace_config = app.config.get('OPENCENSUS_TRACE', {})\n oc_trace_config.update({\n 'EXPORTER': trace_exporter.TraceExporter,\n 'PROPAGATOR': trace_context_http_header_format.TraceContextPropagator\n })\n app.config.update(OPENCENSUS_TRACE=oc_trace_config)\n return flask_middleware.FlaskMiddleware(app)", "def execute(ctx):\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating comment section...\")\r\n ctx.Sections.comment_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating pre-compiled header section...\")\r\n ctx.Sections.pch_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating head section...\")\r\n ctx.Sections.head_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating include section...\")\r\n ctx.Sections.include_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating type section...\")\r\n ctx.Sections.type_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating vars section...\")\r\n ctx.Sections.vars_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating cli section...\")\r\n ctx.Sections.cli_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating node creation section...\")\r\n ctx.Sections.node_creation_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating body section...\")\r\n ctx.Sections.body_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"Generating tail section...\")\r\n ctx.Sections.tail_section(ctx)\r\n modlog.log(ctx.MODLOG_FILTER, modlog.TRACE, \"ok\")", "def middleware(self):\n return ()", "def log_meta_context(**kwargs):\n if not hasattr(_meta_local, 'meta'):\n _meta_local.meta = []\n\n if len(_meta_local.meta):\n # Seems to be a nested context. Include meta from the parent\n # context\n d = _meta_local.meta[-1].to_dict()\n d.update(kwargs)\n kwargs = d\n\n _meta_local.meta.append(LogMeta(**kwargs))\n\n yield _meta_local.meta[-1]\n # Remove the current meta from the stack after the context exits\n _meta_local.meta.pop()", "def main():\n lambda_handler(\"event\", \"context\")", "def apply_middleware(self, page, html):\n for middleware in self.middleware:\n html = middleware(page, html)\n return html", "async def pre_middleware(\n self,\n event: T_contra,\n context_variables: Optional[dict] = None,\n ) -> Optional[List[BaseMiddleware]]:\n mw_instances = []\n\n for middleware in self.middlewares:\n mw_instance = middleware(event, view=self)\n await mw_instance.pre()\n if not mw_instance.can_forward:\n logger.debug(\"{} pre returned error {}\", mw_instance, mw_instance.error)\n return None\n\n mw_instances.append(mw_instance)\n\n if context_variables is not None:\n context_variables.update(mw_instance.context_update)\n\n return mw_instances", "def ContextLog(logger, oline, cline):\n logger('{}...'.format(oline))\n yield\n logger('{}.'.format(cline))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
converts kml files to open airspace files
def kml_2_open_airspace_and_json_format(self, full_path): # read file f = open(full_path,'r') kml = f.readlines() f.close() # find airspaces """Placemark > < name > Bremen - Blumenthal Thermikplatte < / name > < styleUrl > # inline10</styleUrl> < Polygon > < tessellate > 1 < / tessellate > < outerBoundaryIs > < LinearRing > < coordinates > 8.529121049900063, 53.19549566929423, 0 8.52324583919868, 53.21131939607898, 0 8.545439298799483, 53.23055800702935, 0 8.588991466114615, 53.23047069814625, 0 8.575289966189502, 53.20745451706468, 0 8.560633120477348, 53.19724609335408, 0 8.529121049900063, 53.19549566929423, 0 < / coordinates > < / LinearRing > < / outerBoundaryIs > < / Polygon > < / Placemark >""" container = [] idxLine = 0 did_not_pass_main_folder = True list_of_airspace_types_included = [] while idxLine < len(kml): #print(kml[idxLine]) #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder: # # we have to jump over the first folder # print(f'Reading everything inside folder: {kml[idxLine]}') # did_not_pass_main_folder = False if '<Folder>' in kml[idxLine]: # begin of airspace as_type = kml[idxLine+1].replace('\t','').replace('<name>','').replace('</name>\n','') # <name>B</name> print('Reading AS-types: ' + as_type) list_of_airspace_types_included.append(as_type) #if not (as_type == 'A' or as_type == 'B'): # print('#### Check Folder / Airspace Types, must be "A" or "B" and try again (current %s)' % as_type) # msgbox('Check Folder / Airspace Types, are not "A" or "B" (current %s). Airspace E will be used for export.' % as_type) # as_type = 'E' if '<Placemark' in kml[idxLine]: # begin of airspace container = [] if '</Placemark' in kml[idxLine]: # end of airspace # make sure only Polygons are stored for as_line in container: if '<Polygon>' in as_line: idx_lookAt_start = None for idx, line_of_container in enumerate(container): if "<LookAt>" in line_of_container: idx_lookAt_start = idx if "</LookAt>" in line_of_container: idx_lookAt_end = idx # Remove lookAt lines if necessary if idx_lookAt_start: container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part # append airspace to airspace list as airspace class self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type)) container.append(kml[idxLine]) idxLine += 1 print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path)) # summary outlines = ['* KML conversion file, rename this line'] json_dict = {"circles": [], "polygons": []} for airspace in self.airspaces: # prepare open-airspace formate outlines.append('\n\n') # separate airspaces outlines.extend(airspace.txt_lines) # prepare json json_dict['polygons'].append(airspace.json_dict) # write open airspace format target_path = full_path[:-4] + '_converted.txt' # uisave dialog target_path = filesavebox(default=target_path, filetypes="*.txt") if target_path is None: print('Airspace conversion was aborted by the user') quit() f = open(target_path,'w') f.writelines(outlines) f.close() print('Result was written to: %s' % target_path) # write json: target_path_json = target_path[:-4] + '.json' json_string = json.dumps(json_dict) json_file = open(target_path_json, "w") json_file.write(json_string) json_file.close() # write list of airspace files for index.html for leaflet map print('The following airspace types have been converted:') print(list_of_airspace_types_included)
[ "def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result", "def convert_kml_to_xml(kml_path, xml_path):\n # get the kml names\n kml_names = get_filenames(kml_path)\n\n # get the xml names\n xml_names = set(get_filenames(xml_path))\n\n # iterate through kml_names\n for kname in kml_names:\n # replace the .kml with .xml\n renamed = kname.replace('.kml', '.xml')\n\n # copy the file to the xml directory\n shutil.copyfile(f'./{kml_path}/{kname}', f'./{xml_path}/{renamed}')", "def convert_file(file_in):\n file_tree = ET.parse(file_in)\n root = file_tree.getroot()\n\n for document in root.findall('kml:Document', ns):\n for folder in document.findall('kml:Folder', ns):\n for place in folder.findall('kml:Placemark', ns):\n name = place.find('kml:name', ns)\n address = place.find('kml:address', ns)\n address_data = process_address(address.text)\n position = get_coordenate(name.text, address.text)\n if position:\n node = {\n \"name\" : name.text,\n \"address\" : address_data,\n \"position\" : [position[\"lat\"],position[\"lng\"]]\n }\n nodes_list.append(node)\n \n generate_osm_xml()", "def export_kml():\n arcpy.CheckOutExtension('3D')\n \n folder = 'kml'\n name = get_dataset_filename()\n \n # Create a kml folder in the temp directory if it does not exist\n temp_working_folder = os.path.join(temp_workspace,folder)\n create_folder(temp_working_folder, True)\n destination = os.path.join(temp_working_folder,name + '.kmz') \n \n # Make a feature layer (in memory)\n logger.debug('Generating KML file in memory from \"' + staging_feature_class + '\"')\n arcpy.MakeFeatureLayer_management(staging_feature_class, name, '', '')\n \n # Encode special characters that don't convert to KML correctly.\n # Replace any literal nulls <Null> with empty as these don't convert to KML correctly\n replace_literal_nulls(name)\n\n # Convert the layer to KML\n logger.debug('Exporting KML file (KMZ) to \"' + destination + '\"')\n arcpy.LayerToKML_conversion(name, destination, '20000', 'false', 'DEFAULT', '1024', '96')\n \n # Delete the in-memory feature layer and the file geodatabase\n logger.debug('Deleting in-memory feature layer:' + name)\n arcpy.Delete_management(name)\n\n # Publish the zipfile to the download folder\n publish_file(temp_working_folder, name + '.kmz','kml')", "def make_open_airspace_format(self):\n # Extract coordinates from KML\n for idxline in range(len(self.kml_lines)):\n if '<name>' in self.kml_lines[idxline]:\n self.name = self.kml_lines[idxline].replace('\\t', '').replace('<name>', '').replace('</name>', '').replace('\\n','')\n if not self.name.startswith('TS'):\n self.name = 'TS_' + self.name\n print('Type: %s | Name: %s' % (self.as_type, self.name))\n if '<coordinates>' in self.kml_lines[idxline]:\n self.coordinates_kml = self.kml_lines[idxline + 1].replace('\\t', '').replace('\\n', '')\n break\n # start conversion to airspace format\n \"\"\" AC A\n AN TS_Erzgeb\n AL FL98\n AH FL99\n DP 50:26:22 N 012:17:59 E\n DP 50:25:25 N 012:18:26 E\n DP 50:24:40 N 012:19:01 E\n DP 50:24:06 N 012:19:46 E\"\"\"\n\n # AC A\n self.txt_lines.append('AC %s\\n' % self.as_type)\n # AN TS_Erzgeb\n self.txt_lines.append('AN %s\\n' % self.name)\n # heights\n self.txt_lines.append('AL FL98\\n')\n self.txt_lines.append('AH FL99\\n')\n # coordinates\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n # Target format: DP 50:26:22 N 012:17:59 E\n lat_long = coo_pt.split(',')\n # latitude\n latDecAsStr = lat_long[1].split('.')\n #if '.' not in latDecAsStr: # take care of case \"51\" instead of \"51.123456\"\n # latDecAsStr += '.000000'\n lat_degree = abs(int(latDecAsStr[0]))\n #print(f'latDecAsStr {latDecAsStr}')\n if len(latDecAsStr)==1:\n latDecAsStr.append('0')\n lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1\n lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)\n lat_second = round(lat_secondDec*60)\n cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))\n if latDecAsStr[0].startswith('-'):\n cooString += ' S'\n else:\n cooString += ' N'\n # longitude\n #print(f'converting lat_long {lat_long}')\n # take care of case: no decimal sign included, case \"11\" instead of \"11.123456\"\n if '.' not in lat_long[0]:\n lat_long[0] += '.0'\n lonDecAsStr = lat_long[0].split('.')\n lon_degree = abs(int(lonDecAsStr[0]))\n lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1\n lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)\n lon_second = round(lon_secondDec * 60)\n cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))\n if lonDecAsStr[0].startswith('-'):\n cooString += ' W'\n else:\n cooString += ' E'\n cooString += '\\n'\n self.txt_lines.append(cooString)", "def KMLfldtofile(KMLfolder, filename):", "def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)", "def importKML(filepath):\n\tf = open(filepath, 'r')\n\tstr = f.read()\n\treturn etree.fromstring(str)", "def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)", "def read_kmz(self):\n from zipfile import ZipFile\n from xml.etree import ElementTree\n\n try:\n _kmz = ZipFile(self.kmz_file, \"r\")\n kml = _kmz.open(\"doc.kml\", \"r\").read()\n _tree = ElementTree.fromstring(kml)\n _doc = _tree.getchildren()[0]\n self.read_xml(_doc)\n except Exception as e:\n print(self.kmz_file + \" read kmz \" + str(e))", "def convert_to_kml(file_name, output_file):\n\n with open(file_name, \"r\") as input_file:\n\n # Skip the first 5 lines\n for _ in range(SKIP_LINES):\n next(input_file)\n\n # CSV is read using csv.reader\n csv_reader = csv.reader(input_file, delimiter=\",\")\n\n # Set to store all the coordinates\n all_coordinates = list()\n\n # Iterating over each of the coordinates in the CSV\n for line in csv_reader:\n\n # If the line is of RMC type separate the data out\n if line[0] == TYPES_OF_DATA[0]:\n\n # I was getting a line with an empty string so a check for that!\n if line[3] != '' or line[4] != '' or line[5] != '' or line[6] != '':\n latitude = convert_to_degrees_mins(line[3], \"latitude\", line[4])\n longitude = convert_to_degrees_mins(line[5], \"longitude\", line[6])\n\n # You don't want duplicate data values and hence we don't pick GGA records\n else:\n continue\n all_coordinates.append((longitude, latitude, 1000))\n\n # To use only the unique values, i.e., deduplication\n all_coordinates = remove_duplicates(all_coordinates)\n\n for coordinate in all_coordinates:\n output_file.write(\"\\n\\t\\t\\t\\t\" + str(coordinate[0]) + \",\" + str(coordinate[1]) + \",\" + str(coordinate[-1]))\n\n # Closing tags for the KML\n output_file.write(\"\\n\\t\\t\\t</coordinates>\")\n output_file.write(\"\\n\\t\\t</LineString>\")\n output_file.write(\"\\n\\t</Placemark>\")", "def loadKml(self, scope, name, path, **kwargs):\r\n geodatabase = getConfigValue(\"geodatabase\")\r\n self.logger.logMessage(INFO,\"Loading %s to %s/%s as %s\\n\" % (path, geodatabase, scope, name))\r\n #if no dataset, make one\r\n if not arcpy.Exists(os.path.join(geodatabase,scope)):\r\n arcpy.CreateFeatureDataset_management(out_dataset_path = geodatabase,\r\n out_name = scope,\r\n spatial_reference = self.spatialRef)\r\n arcpy.KMLToLayer_conversion( in_kml_file = path,\r\n output_folder = os.path.dirname(path),\r\n output_data = name)\r\n # load resulting feature classes out of fgdb just created\r\n fgdb = os.path.join(os.path.dirname(path), name + '.gdb')\r\n arcpy.env.workspace = fgdb\r\n fclasses = arcpy.ListFeatureClasses(\r\n wild_card='*',\r\n feature_type='',\r\n feature_dataset=\"Placemarks\"\r\n )\r\n if fclasses:\r\n feature = fclasses[0]\r\n fcpath = fgdb + os.sep + \"Placemarks\" + os.sep + feature\r\n outDS = os.path.join(geodatabase, scope)\r\n outF = name + '_' + feature\r\n arcpy.FeatureClassToFeatureClass_conversion(in_features = fcpath,\r\n out_path = outDS,\r\n out_name = outF)\r\n return outDS + os.sep + outF\r\n return None", "def read_kml(self):\n from xml.etree import ElementTree\n\n try:\n import io\n\n kml = io.open(self.kml_file, mode=\"r\", encoding=\"utf-8\")\n _tree = ElementTree.parse(self.kml_file)\n _doc = _tree.getroot().getchildren()[0]\n self.read_xml(_doc)\n except Exception as e:\n print(self.kml_file + \" read kml \" + str(e))", "def tdump2kml(inputDir):\n # Check inputdir\n if not os.path.exists(inputDir):\n print(\"Entered directory is invalid.\")\n sys.exit()\n\n os.chdir(inputDir)\n\n # Main loop\n for run in os.walk('.').next()[1]:\n\n os.chdir(run)\n\n # Filter tdump files\n files = glob.glob(\"*.tdump\")\n\n # Conversion\n for entry in files:\n p = subprocess.Popen(\"C:\\\\hysplit4\\\\exec\\\\trajplot.exe -i%s -o%s.ps -a3 -v1 -l1\" % \\\n (entry, entry), shell=True, stdout=subprocess.PIPE)\n p.wait()\n os.remove(entry[:-6])\n #p_out = p.communicate()\n #print p_out[0], p_out[1]\n\n # Move all kmls into dir kmls\n #sys.stdout.flush()\n kmls = glob.glob(\"*.kml\")\n\n if not os.path.exists(\"kmls\"):\n os.makedirs(\"kmls\")\n\n for kml in kmls:\n os.rename(kml, \"kmls\\\\%s\" % kml)\n\n # Remove redundant ps files\n pss = glob.glob(\"*.ps\")\n\n for ps in pss:\n os.remove(ps)\n\n print \"DONE : %s %s\\kmls\" % (run, os.getcwd())\n os.chdir('../')", "def kml(self, file_handle):\n return self.bytes(file_handle, \"application/vnd.google-earth.kml+xml\")", "def convert_kml_to_geojson(labels_path):\n logger = logging.getLogger(__name__)\n for file in Path(labels_path).iterdir():\n if file.name.endswith(('.kml', 'kmz')):\n logger.info('Generating geojson from {}'.format(file.name))\n kmlpath = '{}/{}'.format(labels_path, file.name)\n k2g.convert(kmlpath, labels_path)", "def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))", "def get_kml_object(filename: str) -> fastkml.kml.KML:\n\t\n\tkml_obj = fastkml.kml.KML()\n\t\n\twith open(filename) as file:\n\t\tkml_obj.from_string(file.read().encode(\"utf-8\"))\n\t\n\treturn kml_obj", "def read_kml():\n global kmldata\n global CONFIG\n if type(kmldata) == type(None):\n if not os.path.exists(CONFIG[\"kmlfile\"]):\n fiona.drvsupport.supported_drivers['KML'] = 'rw'\n kmldata = geopandas.read_file(CONFIG[\"kmlrepo\"], driver=\"KML\")\n os.makedirs(CONFIG[\"cachedir\"],exist_ok=True)\n with open(CONFIG[\"kmlfile\"], \"wb\") as fh:\n pickle.dump(kmldata,fh)\n else:\n with open(CONFIG[\"kmlfile\"], \"rb\") as fh:\n kmldata = pickle.load(fh)\n return kmldata" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
convert to open airspace format
def make_open_airspace_format(self): # Extract coordinates from KML for idxline in range(len(self.kml_lines)): if '<name>' in self.kml_lines[idxline]: self.name = self.kml_lines[idxline].replace('\t', '').replace('<name>', '').replace('</name>', '').replace('\n','') if not self.name.startswith('TS'): self.name = 'TS_' + self.name print('Type: %s | Name: %s' % (self.as_type, self.name)) if '<coordinates>' in self.kml_lines[idxline]: self.coordinates_kml = self.kml_lines[idxline + 1].replace('\t', '').replace('\n', '') break # start conversion to airspace format """ AC A AN TS_Erzgeb AL FL98 AH FL99 DP 50:26:22 N 012:17:59 E DP 50:25:25 N 012:18:26 E DP 50:24:40 N 012:19:01 E DP 50:24:06 N 012:19:46 E""" # AC A self.txt_lines.append('AC %s\n' % self.as_type) # AN TS_Erzgeb self.txt_lines.append('AN %s\n' % self.name) # heights self.txt_lines.append('AL FL98\n') self.txt_lines.append('AH FL99\n') # coordinates for coo_pt in self.coordinates_kml.split(' ')[:-1]: # Target format: DP 50:26:22 N 012:17:59 E lat_long = coo_pt.split(',') # latitude latDecAsStr = lat_long[1].split('.') #if '.' not in latDecAsStr: # take care of case "51" instead of "51.123456" # latDecAsStr += '.000000' lat_degree = abs(int(latDecAsStr[0])) #print(f'latDecAsStr {latDecAsStr}') if len(latDecAsStr)==1: latDecAsStr.append('0') lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1 lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec) lat_second = round(lat_secondDec*60) cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second)) if latDecAsStr[0].startswith('-'): cooString += ' S' else: cooString += ' N' # longitude #print(f'converting lat_long {lat_long}') # take care of case: no decimal sign included, case "11" instead of "11.123456" if '.' not in lat_long[0]: lat_long[0] += '.0' lonDecAsStr = lat_long[0].split('.') lon_degree = abs(int(lonDecAsStr[0])) lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1 lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec) lon_second = round(lon_secondDec * 60) cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second)) if lonDecAsStr[0].startswith('-'): cooString += ' W' else: cooString += ' E' cooString += '\n' self.txt_lines.append(cooString)
[ "def kml_2_open_airspace_and_json_format(self, full_path):\n # read file\n f = open(full_path,'r')\n kml = f.readlines()\n f.close()\n # find airspaces\n \"\"\"Placemark >\n < name > Bremen - Blumenthal\n Thermikplatte < / name >\n < styleUrl > # inline10</styleUrl>\n < Polygon >\n < tessellate > 1 < / tessellate >\n < outerBoundaryIs >\n < LinearRing >\n < coordinates >\n 8.529121049900063, 53.19549566929423, 0\n 8.52324583919868, 53.21131939607898, 0\n 8.545439298799483, 53.23055800702935, 0\n 8.588991466114615, 53.23047069814625, 0\n 8.575289966189502, 53.20745451706468, 0\n 8.560633120477348, 53.19724609335408, 0\n 8.529121049900063, 53.19549566929423, 0\n < / coordinates >\n \n < / LinearRing >\n < / outerBoundaryIs >\n < / Polygon >\n < / Placemark >\"\"\"\n container = []\n idxLine = 0\n did_not_pass_main_folder = True\n list_of_airspace_types_included = []\n while idxLine < len(kml):\n #print(kml[idxLine])\n #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:\n # # we have to jump over the first folder\n # print(f'Reading everything inside folder: {kml[idxLine]}')\n # did_not_pass_main_folder = False\n if '<Folder>' in kml[idxLine]: # begin of airspace\n as_type = kml[idxLine+1].replace('\\t','').replace('<name>','').replace('</name>\\n','') # <name>B</name>\n print('Reading AS-types: ' + as_type)\n list_of_airspace_types_included.append(as_type)\n #if not (as_type == 'A' or as_type == 'B'):\n # print('#### Check Folder / Airspace Types, must be \"A\" or \"B\" and try again (current %s)' % as_type)\n # msgbox('Check Folder / Airspace Types, are not \"A\" or \"B\" (current %s). Airspace E will be used for export.' % as_type)\n # as_type = 'E'\n\n if '<Placemark' in kml[idxLine]: # begin of airspace\n container = []\n if '</Placemark' in kml[idxLine]: # end of airspace\n # make sure only Polygons are stored\n for as_line in container:\n if '<Polygon>' in as_line:\n idx_lookAt_start = None\n for idx, line_of_container in enumerate(container):\n if \"<LookAt>\" in line_of_container:\n idx_lookAt_start = idx\n if \"</LookAt>\" in line_of_container:\n idx_lookAt_end = idx\n # Remove lookAt lines if necessary\n if idx_lookAt_start:\n container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part\n # append airspace to airspace list as airspace class\n self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))\n container.append(kml[idxLine])\n idxLine += 1\n print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))\n # summary\n outlines = ['* KML conversion file, rename this line']\n json_dict = {\"circles\": [], \"polygons\": []}\n for airspace in self.airspaces:\n # prepare open-airspace formate\n outlines.append('\\n\\n') # separate airspaces\n outlines.extend(airspace.txt_lines)\n # prepare json\n json_dict['polygons'].append(airspace.json_dict)\n\n # write open airspace format\n target_path = full_path[:-4] + '_converted.txt'\n # uisave dialog\n\n target_path = filesavebox(default=target_path, filetypes=\"*.txt\")\n if target_path is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n f = open(target_path,'w')\n f.writelines(outlines)\n f.close()\n print('Result was written to: %s' % target_path)\n\n # write json:\n target_path_json = target_path[:-4] + '.json'\n\n json_string = json.dumps(json_dict)\n json_file = open(target_path_json, \"w\")\n json_file.write(json_string)\n json_file.close()\n\n # write list of airspace files for index.html for leaflet map\n print('The following airspace types have been converted:')\n print(list_of_airspace_types_included)", "def airports(osm_path): \n return (retrieve(osm_path,'multipolygons',['aeroway'],**{'aeroway':[\"='aerodrome'\"]})).rename(columns={'aeroway': 'asset'})", "def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)", "def buildOpAmpText(self,opamp):\n\t\tpinDict = {}\n\t\tpinDict['notUsed1'] = opamp.pinList[0]\n\t\tpinDict['negIn'] = opamp.pinList[1]\n\t\tpinDict['plusIn'] = opamp.pinList[2]\n\t\tpinDict['negSupply'] = opamp.pinList[3]\n\t\tpinDict['flag'] = opamp.pinList[4]\n\t\tpinDict['plusSupply'] = opamp.pinList[5]\n\t\tpinDict['out'] = opamp.pinList[6]\n\t\tpinDict['notUsed2'] = opamp.pinList[7]\n\t\topAmpNodeString = '%d %d %d %d %d' % (pinDict['plusIn'].Node.number,pinDict['negIn'].Node.number,pinDict['out'].Node.number,pinDict['plusSupply'].Node.number,pinDict['negSupply'].Node.number)\t\t\n\t\topAmpID = 'X%d' % id(opamp)\n\t\tsubCktID = 'opamp2'\n\t\tsubCktFileName = '%s.txt' % opamp.technicalName\n\t\tfin = open(subCktFileName)\n\t\topAmpSubCkt = fin.read()\n\t\topAmpCard = '%s %s %s\\n' % (opAmpID,opAmpNodeString,subCktID)\n\t\tfin.close()\n\t\treturn (opAmpCard,opAmpSubCkt)", "def create_an_ipv4_object_from_a_arista_output_command(context) -> None:\n\n context.object_05 = _arista_ipv4_converter(\n hostname=\"leaf03\",\n plateform=\"eos\",\n cmd_output=open_file(\n path=f\"{FEATURES_OUTPUT_PATH}arista_show_ip_interface.json\"\n ),\n filters={\n \"get_loopback\": True,\n \"get_physical\": True,\n \"get_vlan\": True,\n \"get_peerlink\": False,\n \"get_vni\": False\n }\n )", "def dump_to_amiitools(dump):\r\n internal = bytearray(dump)\r\n internal[0x000:0x008] = dump[0x008:0x010]\r\n internal[0x008:0x028] = dump[0x080:0x0A0]\r\n internal[0x028:0x04C] = dump[0x010:0x034]\r\n internal[0x04C:0x1B4] = dump[0x0A0:0x208]\r\n internal[0x1B4:0x1D4] = dump[0x034:0x054]\r\n internal[0x1D4:0x1DC] = dump[0x000:0x008]\r\n internal[0x1DC:0x208] = dump[0x054:0x080]\r\n return internal", "def igra2_ascii_to_dataframe(file=''):\n if debug:\n print(\"Running igra2_ascii_to_dataframe for: \", file) \n \n data = check_read_file(file=file, read=True)\n #source_file = [l for l in file.split('/') if '.txt' in l][0]\n read_data = [] # Lists containing the raw data from the ascii file, and the observation dates\n \"\"\" Data to be extracted and stored from the igra2 station files \n Some info is contained in the header of each ascent, some in the following data \"\"\"\n\n \"\"\" Initialize the variables that can be read from the igra2 files \"\"\"\n ident,year,month,day,hour,reltime,p_src,np_src,lat, lon = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan \n lvltyp1,lvltyp2,etime,press,pflag,gph,zflag,temp,tflag,rh,dpdep,wdir,wspd = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan # initialize to zeros\n stations_id = []\n idate = np.nan\n count = 0\n head_count = 0\n \n obs_id = 0\n \n def make_release_time(date_time, hour, release):\n \"\"\" build a sonde release time \n ex 2019 02 20 00 2349 \n ex 2019 01 10 00 0011 \n They round the release time to the closest hour. \n It can be the same day or the following !!!\n date_time = date_time pytohn object, \n date, time, release = original strings \n \"\"\"\n release_h = int(release[:2])\n release_m = int(release[2:4])\n \n if release_h == 99:\n return 0 #largest integer number int 64 \n \n else:\n if release_m == 99:\n release_m = 0\n release_date_time = date_time.replace(hour= release_h, minute= release_m) \n \n \"\"\" Here, I have to subtract one day to the release time stamp if the hour of the time stamp is in th evening,\n but the nominal time is reported at midnight hence in the following day. For example 2019 02 20 00 2349 from file VMM00048820 \"\"\"\n if hour == '00':\n if release_h > 20:\n release_date_time = release_date_time - timedelta(days=1)\n else:\n pass\n \n return release_date_time \n \n \n for i, line in enumerate(data):\n if line[0] == '#':\n head_count = head_count +1 \n # Info from the Header line of each ascent \n ident = line[1:12] # station identifier\n ident = ident[6:12]\n if ident not in stations_id:\n stations_id.append(ident)\n \n year = line[13:17] # year, months, day, hour of the observation\n month = line[18:20]\n day = line[21:23]\n hour = line[24:26] \n reltime = line[27:31] # release time of the sounding.\n numlev = int(line[32:36]) # number of levels in the sounding == number of data recorded in the ascent\n p_src = line[37:45] # data source code for the pressure levels \n np_src = line[46:54] # data source code for non-pressure levels\n lat = int(line[55:62]) / 10000. # latitude and longitude\n lon = int(line[63:71]) / 10000.\n #observation_id = i\n if int(hour) == 99:\n time = reltime + '00'\n else:\n time = hour + '0000'\n \n if '99' in time:\n time = time.replace('99', '00')\n\n idate = datetime.strptime(year + month + day + time, '%Y%m%d%H%M%S') # constructed according to CDM\n \n release_time = make_release_time(idate, hour, reltime) # making the release time \n \n \n iday = int(year + month + day)\n count = count + 1\n else:\n # Data of each ascent\n lvltyp1 = int(line[0]) # 1- 1 integer major level type indicator\n lvltyp2 = int(line[1]) # 2- 2 integer minor level type indicator\n etime = int(line[3:8]) # 4- 8 integer elapsed time since launch\n press = int(line[9:15]) # 10- 15 integer reported pressure\n \n if press == -9999:\n press = np.nan\n \n pflag = line[15] # 16- 16 character pressure processing flag\n \n gph = int(line[16:21]) # 17- 21 integer geopotential height [m]\n \n if gph == -9999 or gph == -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n gph = np.nan # 23- 27 integer temperature, [Celsius to Kelvin ] \n \n zflag = line[21] # 22- 22 character gph processing flag, \n \n temp = int(line[22:27]) \n if temp != -9999 and temp != -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n temp = temp / 10. + 273.15 # 23- 27 integer temperature, [Celsius to Kelvin ] \n else:\n temp = np.nan \n \n tflag = line[27] # 28- 28 character temperature processing flag\n \n rh = int(line[28:33]) # 30- 34 integer relative humidity [%] \n if rh != -8888 and rh != -9999:\n rh = rh / 1000. # converting from percentage to absolute ratio \n else:\n rh = np.nan\n \n dpdp = int(line[34:39]) \n if dpdp != -9999 and dpdp !=-8888: \n dpdp = dpdp / 10. # 36- 40 integer dew point depression (degrees to tenth e.g. 11=1.1 C) \n else:\n dpdp = np.nan \n \n wdir = int(line[40:45]) # 41- 45 integer wind direction (degrees from north, 90 = east)\n if wdir == -8888 or wdir == -9999 :\n wdir = np.nan \n \n wspd = int(line[46:51]) # 47- 51 integer wind speed (meters per second to tenths, e.g. 11 = 1.1 m/s [m/s]\n if wspd != -8888 and wspd != -9999 :\n wspd = wspd / 10. \n else:\n wspd = np.nan \n if reltime == 9999.0:\n reltime = np.nan \n \n z_type = np.nan\n if not (np.isnan(press)):\n z_type = 1\n elif (np.isnan(press) and not np.isnan(gph) ) :\n z_type = 2 \n \n for value,var in zip([gph, temp, wspd, wdir, rh, dpdp], ['gph', 'temperature', 'wind_speed', 'wind_direction', 'relative_humidity' , 'dew_point_depression'] ):\n obs_id = obs_id +1 \n if not np.isnan(press): # when pressure is available, z_coord== pressure and z_type==1 \n z_type = 1 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n elif (np.isnan(press) and not np.isnan(gph) ) : # when pressure is not available, z_coord== gph and z_type==2 \n z_type = 2 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, gph, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n else:\n z_type = -2147483648 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n\n\n df = pd.DataFrame(data= read_data, columns= column_names_igra2)\n \n df['observation_id'] = np.chararray.zfill( (df['observation_id'].astype(int)) .astype('S'+str(id_string_length ) ), id_string_length ) #converting to fixed length bite objects \n df['report_id'] = np.chararray.zfill( (df['report_id'].astype(int)).astype ('S'+str(id_string_length ) ), id_string_length )\n \n df = df.replace([-999.9, -9999, -999, -999.0, -99999.0, -99999.9, 99999.0, -99999.00 ], np.nan)\n \n df = df.sort_values(by = ['record_timestamp', 'vertco_reference_1@body' ] ) # FF check here !!!! \n \n return df, stations_id", "def _convert():", "def isochrone_to_aa(*args, **kwargs):\n return isochrone_xv_to_aa(*args, **kwargs)", "def to_AIR(self):\n container_air = [c.to_AIR() for c in self.containers]\n var_air = [v.to_AIR() for v in self.variables]\n types_air = [t.to_AIR() for t in self.types]\n\n # TODO actually create AIR objects for AIR -> GrFN\n # I think the AIR intermediate structure will need to change\n # to reflect changes in grfn such as typing.\n # C, V, T, D = dict(), dict(), dict(), dict()\n\n return {\"containers\": container_air, \"variables\": var_air, \"types\": types_air}", "def ros_to_aircraft(x_ros):\n if x_ros.ndim > 1:\n x_aircraft = numpy.zeros(x_ros.shape)\n for idx, x in enumerate(x_ros):\n x_aircraft[i] = ros_to_aircraft(x)\n return x_aircraft\n\n x_ros[1] *= -1.0\n x_ros[2] *= -1.0\n return x_ros", "def convert(self):", "def export_db_macserial(db, path, year):\n\n with open(path, 'w') as fh:\n print('#ifndef GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)\n print('#define GENSERIAL_MODELINFO_AUTOGEN_H\\n', file=fh)\n print('// DO NOT EDIT! This is an autogenerated file.\\n', file=fh)\n print('#include \"macserial.h\"\\n', file=fh)\n\n print('typedef enum {', file=fh)\n\n for info in db:\n print(' {}, // {}'.format(\n info['SystemProductName'].replace(',', '_'),\n info['Specifications']['CPU'][0]\n ), file=fh)\n\n print('} AppleModel;\\n', file=fh)\n print('#define APPLE_MODEL_MAX {}\\n'.format(len(db)), file=fh)\n\n print('static PLATFORMDATA ApplePlatformData[] = {', file=fh)\n for info in db:\n print(' {{ \"{}\", \"{}\" }},'.format(\n info['SystemProductName'],\n info['SystemSerialNumber']\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_CODE_MAX {}'.format(max(len(info['AppleModelCode']) for info in db)), file=fh)\n print('static const char *AppleModelCode[][APPLE_MODEL_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleModelCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_BOARD_CODE_MAX {}'.format(max(len(info['AppleBoardCode']) for info in db)), file=fh)\n print('static const char *AppleBoardCode[][APPLE_BOARD_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleBoardCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_YEAR_MAX {}'.format(max(len(info['AppleModelYear']) for info in db)), file=fh)\n print('static uint32_t AppleModelYear[][APPLE_MODEL_YEAR_MAX] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {{{}}},'.format(\n info['SystemProductName'],\n ', '.join(str(year) for year in info['AppleModelYear'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('static uint32_t ApplePreferredModelYear[] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {},'.format(\n info['SystemProductName'],\n info.get('MacserialModelYear', 0)\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#endif // GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)", "def xephemFormat(self):\n line = []\n #Field 1: names\n names = [self.getName()]\n identifiers = self.getIdentifiers()\n if identifiers[0] is not None:\n names.append(identifiers[0])\n for i in range(1,4):\n if identifiers[i] is not None:\n names.extend(identifiers[i])\n line.append(\"|\".join(names))\n\n #Field 2: type designation\n objType = self.getType()\n if objType in (\"Galaxy Pair\", \"Galaxy Triplet\", \"Group of galaxies\"):\n line.append(\"f|A\")\n elif objType == \"Globular Cluster\":\n line.append(\"f|C\")\n elif objType == \"Double star\":\n line.append(\"f|D\")\n elif objType in (\"HII Ionized region\", \"Nebula\"):\n line.append(\"f|F\")\n elif objType == \"Galaxy\":\n if self.getHubble().startswith(\"S\"):\n line.append(\"f|G\")\n else:\n line.append(\"f|H\")\n elif objType == \"Dark Nebula\":\n line.append(\"f|K\")\n elif objType in (\"Emission Nebula\", \"Reflection Nebula\"):\n line.append(\"f|N\")\n elif objType in (\"Association of stars\", \"Open Cluster\"):\n line.append(\"f|O\")\n elif objType == \"Planetary Nebula\":\n line.append(\"f|P\")\n elif objType == \"Supernova remnant\":\n line.append(\"f|R\")\n elif objType == \"Star\":\n line.append(\"f|S\")\n elif objType == \"Star cluster + Nebula\":\n line.append(\"f|U\")\n else:\n line.append(\"f\")\n\n #Field 3: Right Ascension\n line.append(self.getRA())\n\n #Field 4: Declination\n line.append(self.getDec())\n\n #Field 5: Magnitude\n #We use the first available magnitude in the sequence b,v,j,h,k\n for mag in self.getMagnitudes():\n if mag is not None:\n line.append(str(mag))\n break\n\n #Field 6: optional Epoch, we let it empty\n line.append(\"\")\n\n #Field 7: Dimensions\n dimensions = []\n #Xephem format wants axes espressed in arcsec, we have arcmin\n for value in (self.getDimensions()[0],self.getDimensions()[1]):\n if value is not None:\n dimensions.append(str(value*60))\n else:\n dimensions.append(\"\")\n if self.getDimensions()[2] is not None:\n dimensions.append(str(value))\n else:\n dimensions.append(\"\")\n line.append(\"|\".join(dimensions))\n\n return \",\".join(line)", "def make_aws(in_file, out_file):\n header = (\n (\n '\"programNumber\";\"platformId\";\"platformType\";\"platformModel\";'\n '\"platformName\";\"platformHexId\";\"satellite\";\"bestMsgDate\";\"duration\";'\n '\"nbMessage\";\"message120\";\"bestLevel\";\"frequency\";\"locationDate\";'\n '\"latitude\";\"longitude\";\"altitude\";\"locationClass\";\"gpsSpeed\";'\n '\"gpsHeading\";\"latitude2\";\"longitude2\";\"altitude2\";\"index\";\"nopc\";'\n '\"errorRadius\";\"semiMajor\";\"semiMinor\";\"orientation\";\"hdop\";'\n '\"bestDate\";\"compression\";\"type\";\"alarm\";\"concatenated\";\"date\";'\n '\"level\";\"doppler\";\"rawData\"'\n )\n .replace('\"', \"\")\n .split(\";\")\n )\n empty_row = [\"\"] * len(header)\n with csv23.open(in_file, \"r\") as csv_file:\n csv_reader = csv.reader(csv_file)\n with csv23.open(out_file, \"w\") as csv_file2:\n csv_writer = csv.writer(csv_file2, delimiter=\";\", quoting=csv.QUOTE_ALL)\n csv23.write(csv_writer, header)\n next(csv_reader) # throw away the header\n for row in csv_reader:\n row = csv23.fix(row)\n date = fix_date(row[1])\n lat = fix_lat(row[4])\n lon = fix_lon(row[5])\n if not date or not lat or not lon:\n continue # we must have a date/lat/long\n new_row = list(empty_row)\n new_row[0] = \"2433\"\n new_row[1] = row[0]\n new_row[7] = date\n new_row[13] = date\n new_row[17] = row[2] # LC\n new_row[23] = row[3] # IQ\n new_row[14] = lat\n new_row[15] = lon\n new_row[20] = fix_lat(row[6]) # Lat2\n new_row[21] = fix_lon(row[7]) # Lon2\n new_row[9] = int(\"0{0}\".format(row[8])) # Nb mes\n new_row[10] = int(\"0{0}\".format(row[9])) # Nb mes>-120dB\n new_row[11] = fix_level(row[10]) # Best level\n new_row[8] = fix_duration(row[11]) # Pass duration\n new_row[24] = fix_nopc(row[12]) # NOPC\n new_row[12] = fix_freq(row[13]) # Calcul freq\n new_row[16] = fix_alt(row[14]) # Altitude\n new_row[38] = \"06A88\" # junk to get it to process in the DB.\n csv23.write(csv_writer, new_row)", "def mac_ntoa(mac):\n return '%.2x:%.2x:%.2x:%.2x:%.2x:%.2x' % tuple(map(ord, list(mac)))", "def _convert_om5(self,):\n om5 = self.stage_dir + 'query.om5'\n convert_tmpl = Template('osmconvert $raw_osm -o=$om5')\n convert_cmd = convert_tmpl.safe_substitute({'raw_osm': self.raw_osm, 'om5': om5})\n proc = subprocess.Popen(convert_cmd, shell=True, executable='/bin/bash',\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n (stdout, stderr) = proc.communicate()\n returncode = proc.wait()\n if (returncode != 0):\n logger.error('%s', stderr)\n raise Exception, \"osmconvert process failed with returncode {0}: {1}\".format(returncode, stderr)\n return om5", "def operator_aircraft_info(self, apath):\r\n opfolder_path = apath.split(\"0 NEW\")[-1]\r\n opfolder = opfolder_path.replace(\"/\", \"\")\r\n opfolder = opfolder.replace(\"\\\\\", \"\")\r\n opfolder = opfolder.split(\" \")\r\n operator = opfolder[0].strip()\r\n aircraft = opfolder[1].strip()\r\n return operator, aircraft", "def aircraft(lcd_mono: LcdInfo):\n from dcspy.aircraft import Aircraft\n return Aircraft(lcd_mono)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
uses template in order to make kml format
def make_kml_format(self,kml_template): if self.as_type == 'A': self.kml_lines = kml_template['good_subdivided']['placemark'] elif self.as_type == 'B': self.kml_lines = kml_template['bad_subdivided']['placemark'] else: print('Unknown airspace type') # get idx of name and coordinates idxLine = 0 while idxLine < len(self.kml_lines): #print(self.kml_lines[idxLine] if self.kml_lines[idxLine].startswith('\t\t\t\t<name>'): # begin of airspace idx_name = idxLine if '\t\t\t\t\t\t\t<coordinates>\n' in self.kml_lines[idxLine]: # begin of airspace idx_coordinates = idxLine+1 idxLine += 1 # transform coordinates # add all coordinates: Format is: # source: 'DP 50:26:22 N 012:17:59 E\n' # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0 coo_list = [] # collect list of coorinates as strings for line in self.txt_lines: if line.startswith('AN'): self.name = line[3:].replace('\n','') self.kml_lines[idx_name] = '\t\t\t\t<name>%s</name>\n' % self.name if line.startswith('DP'): # lon lon_deg = float(line[14:17]) lon_min = float(line[18:20]) lon_sec = float(line[21:23]) lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg if line[24] == 'W': lon_dec *= -1 # negative if west # lat lat_deg = float(line[3:5]) lat_min = float(line[6:8]) lat_sec = float(line[9:11]) lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg if line[12] == 'S': lat_dec *= -1 # negative if west # attach coordinates coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec)) # store for later plotting self.lat_dec.append(lat_dec) self.lon_dec.append(lon_dec) # make sure that shape is closed --> first an last point must be the same if coo_list[0] != coo_list[-1]: coo_list.append(coo_list[0]) self.lat_dec.append(self.lat_dec[0]) self.lon_dec.append(self.lon_dec[0]) # write coordinate strings into kml self.kml_lines[idx_coordinates] = '\t\t\t\t\t\t\t\t' # is prefix. Coordinates to be added as string below for pt in coo_list: self.kml_lines[idx_coordinates] += pt print('Converted airspace %s' % self.name)
[ "def generate_document_kml(self, title, content):\n return \"\"\"\\\n<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://earth.google.com/kml/2.1\">\n <Document>\n <name>%s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\n%s\n </Document>\n</kml>\"\"\" % (title.replace('\\\\','/'), content)", "def json_to_kml():", "def generate_map(self):\r\n case_details = TollsCase.get_case_details(self.case_id)\r\n kml_header = \"\"\"\r\n <?xml version=\"1.0\" encoding=\"UTF-8\" ?>\r\n <kml xmlns=\"http://www.opengis.net/kml/2.2\" xmlns:gx=\"http://www.google.com/kml/ext/2.2\">\r\n <Document>\r\n <name>{casenum} ({agency}) :: {targetnum}</name>\r\n <Snippet maxLines=\"1\">\r\n <![CDATA[ {casenum} ]]>\r\n </Snippet>\r\n <open>1</open>\r\n <Style>\r\n <IconStyle>\r\n <Icon />\r\n </IconStyle>\r\n <BalloonStyle>\r\n <text>\r\n <![CDATA[ $[description] ]]>\r\n </text>\r\n </BalloonStyle>\r\n </Style>\r\n <description>\r\n <![CDATA[ {casenum} ({agency}) location data for target number {targetnum}.\r\n Prepared for {agent} by {analyst}. ]]>\r\n </description>\r\n <StyleMap id=\"Map1\">\r\n <Pair>\r\n <key>normal</key>\r\n <styleUrl>#NormalMap1</styleUrl>\r\n </Pair>\r\n <Pair>\r\n <key>highlight</key>\r\n <styleUrl>#HighlightMap1</styleUrl>\r\n </Pair>\r\n </StyleMap>\r\n <Style id=\"NormalMap1\">\r\n <IconStyle>\r\n <scale>1</scale>\r\n <Icon>\r\n <href>http://maps.google.com/mapfiles/kml/paddle/ylw-blank.png</href>\r\n </Icon>\r\n <color>FF00FFFF</color>\r\n </IconStyle>\r\n <LabelStyle>\r\n <color>FFFFFFFF</color>\r\n <scale>1</scale>\r\n </LabelStyle>\r\n <LineStyle>\r\n <color>FFFF00FF</color>\r\n <width>2</width>\r\n </LineStyle>\r\n <PolyStyle>\r\n <fill>0</fill>\r\n <outline>1</outline>\r\n <color>00FF00FF</color>\r\n </PolyStyle>\r\n <BalloonStyle>\r\n <text>\r\n <![CDATA[ $[description] ]]>\r\n </text>\r\n </BalloonStyle>\r\n </Style>\r\n <Style id=\"HighlightMap1\">\r\n <IconStyle>\r\n <scale>1.1</scale>\r\n <Icon>\r\n <href>http://maps.google.com/mapfiles/kml/paddle/ylw-blank.png</href>\r\n </Icon>\r\n <color>FF00FFFF</color>\r\n </IconStyle>\r\n <LabelStyle>\r\n <color>FFFFFFFF</color>\r\n <scale>1.1</scale>\r\n </LabelStyle>\r\n <LineStyle>\r\n <color>FFFF00FF</color>\r\n <width>3</width>\r\n </LineStyle>\r\n <PolyStyle>\r\n <fill>0</fill>\r\n <outline>1</outline>\r\n <color>70FF00FF</color>\r\n </PolyStyle>\r\n <BalloonStyle>\r\n <text>\r\n <![CDATA[ $[description] ]]>\r\n </text>\r\n </BalloonStyle>\r\n </Style>\"\"\".format(casenum=self.xml_safe(case_details['Case Number']),\r\n targetnum=self.xml_safe(case_details['Target Number']),\r\n agency=self.xml_safe(case_details['Agency']),\r\n agent=self.xml_safe(case_details['Agent']),\r\n analyst=self.xml_safe(case_details['Analyst']))\r\n kml_footer = \"\"\"\r\n </Document>\r\n </kml>\"\"\"\r\n\r\n cdrs_with_location_data = CDR.get_cdrs_with_location_data(self.case_id)\r\n\r\n with open(os.path.join(self.report_path, self.report_name), 'wb') as f:\r\n f.write(self.strip_whitespace(kml_header))\r\n\r\n for cdr_id in cdrs_with_location_data:\r\n cdr_details = CDR.get_cdr_details(cdr_id, self.case_id)\r\n tower_details = Tower.get_tower_location(self.case_id,\r\n cdr_details['Cell Site ID'],\r\n cdr_details['Sector'])\r\n # not an essential feature -- for future expansion\r\n # timespan_data = ''\r\n # if cdr_details['Start Date'] and cdr_details['Start Date'].strip() != '' and cdr_details['End Date'] \\\r\n # and cdr_details['End Date'].strip() != '':\r\n # timespan_data = \"\"\"\r\n # <TimeSpan>\r\n # <begin>2013-06-15T09:20:00Z</begin>\r\n # <end>2013-06-15T09:31:00Z</end>\r\n # </TimeSpan>\"\"\"\r\n\r\n placemark_data = \"\"\"\r\n <Placemark>\r\n <name><![CDATA[ {pk} ]]></name>\r\n <Snippet maxLines=\"0\" />\r\n <styleUrl>#Map1</styleUrl>\r\n <ExtendedData />\r\n <LookAt>\r\n <longitude>{longitude}</longitude>\r\n <latitude>{latitude}</latitude>\r\n <range>1000</range>\r\n <altitudeMode>relativeToGround</altitudeMode>\r\n <tilt>0</tilt>\r\n <heading>0</heading>\r\n </LookAt>\r\n <Point>\r\n <altitudeMode>clampToGround</altitudeMode>\r\n <extrude>0</extrude>\r\n <coordinates>{longitude},{latitude},0</coordinates>\r\n </Point>\r\n <description>\r\n {description}\r\n </description>\r\n </Placemark>\"\"\".format(pk=cdr_id,\r\n longitude=self.xml_safe(tower_details['Longitude']),\r\n latitude=self.xml_safe(tower_details['Latitude']),\r\n description=CDR.generate_cdata(cdr_id, self.case_id))\r\n\r\n f.write(self.strip_whitespace(placemark_data))\r\n\r\n f.write(self.strip_whitespace(kml_footer))\r\n\r\n return self.report_name", "def kml(self):\n kml = simplekml.Kml()\n # Main itinerary\n geom3d = self.geom_3d.transform(4326, clone=True) # KML uses WGS84\n line = kml.newlinestring(name=self.name,\n description=plain_text(self.description),\n coords=geom3d.coords)\n line.style.linestyle.color = simplekml.Color.red # Red\n line.style.linestyle.width = 4 # pixels\n # Place marks\n for poi in self.pois:\n place = poi.geom_3d.transform(settings.API_SRID, clone=True)\n kml.newpoint(name=poi.name,\n description=plain_text(poi.description),\n coords=[place.coords])\n return kml._genkml()", "def write_kml(self,varnames):\n if type(varnames) is str:\n varnames=(varnames,)\n content=[]\n for varname in varnames:\n content.append(self.image2kml(varname))\n kml=self.__class__.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':self.__class__.progname}\n f=open(self.__class__.kmlname,'w')\n f.write(kml)\n f.close()", "def cmd_template(self):", "def gen_wtml(base_dir, depth, **kwargs):\n kwargs.setdefault('FolderName', 'Toasty')\n kwargs.setdefault('BandPass', 'Visible')\n kwargs.setdefault('Name', 'Toasty map')\n kwargs.setdefault('Credits', 'Toasty')\n kwargs.setdefault('CreditsUrl', 'http://github.com/ChrisBeaumont/toasty')\n kwargs.setdefault('ThumbnailUrl', '')\n kwargs['url'] = base_dir\n kwargs['depth'] = depth\n\n template = ('<Folder Name=\"{FolderName}\">\\n'\n '<ImageSet Generic=\"False\" DataSetType=\"Sky\" '\n 'BandPass=\"{BandPass}\" Name=\"{Name}\" '\n 'Url=\"{url}/{{1}}/{{3}}/{{3}}_{{2}}.png\" BaseTileLevel=\"0\" '\n 'TileLevels=\"{depth}\" BaseDegreesPerTile=\"180\" '\n 'FileType=\".png\" BottomsUp=\"False\" Projection=\"Toast\" '\n 'QuadTreeMap=\"\" CenterX=\"0\" CenterY=\"0\" OffsetX=\"0\" '\n 'OffsetY=\"0\" Rotation=\"0\" Sparse=\"False\" '\n 'ElevationModel=\"False\">\\n'\n '<Credits> {Credits} </Credits>\\n'\n '<CreditsUrl>{CreditsUrl}</CreditsUrl>\\n'\n '<ThumbnailUrl>{ThumbnailUrl}</ThumbnailUrl>\\n'\n '<Description/>\\n</ImageSet>\\n</Folder>')\n return template.format(**kwargs)", "def initNameTemplate(self):\n\n nameTemplate = {\n \"locations\": [\"L\", \"R\", \"M\"],\n \"mirrorMap\": {\n \"L\": \"R\",\n \"R\": \"L\",\n \"M\": \"M\"\n },\n \"separator\": \"_\",\n \"types\": {\n \"default\": \"null\",\n \"Component\": \"\",\n \"ComponentGroup\": \"cmp\",\n \"ComponentInput\": \"cmpIn\",\n \"ComponentOutput\": \"cmpOut\",\n \"Container\": \"\",\n \"Control\": \"ctrl\",\n \"Curve\": \"crv\",\n \"HierarchyGroup\": \"hrc\",\n \"Joint\": \"def\",\n \"Layer\": \"\",\n \"Locator\": \"loc\",\n \"Space\": \"space\",\n \"CtrlSpace\": \"ctrlSpace\",\n \"OrientationConstraint\": \"oriCns\",\n \"PoseConstraint\": \"poseCns\",\n \"ParentConstraint\": \"parCns\",\n \"PositionConstraint\": \"posCns\",\n \"ScaleConstraint\": \"sclCns\",\n \"KLOperator\": \"klOp\",\n \"CanvasOperator\": \"canvasOp\"\n },\n \"formats\":\n {\n \"Container\": [\"name\"],\n \"Layer\": [\"container\", \"sep\", \"name\"],\n \"ComponentGroup\": [\"name\", \"sep\", \"location\", \"sep\", \"type\"],\n \"default\": [\"component\", \"sep\", \"location\", \"sep\", \"name\", \"sep\", \"type\"],\n \"KLOperator\": [\"component\", \"sep\", \"location\", \"sep\", \"name\", \"sep\", \"solverSource\", \"sep\", \"solverName\", \"sep\", \"type\"],\n \"CanvasOperator\": [\"component\", \"sep\", \"location\", \"sep\", \"name\", \"sep\", \"solverSource\", \"sep\", \"solverName\", \"sep\", \"type\"]\n }\n }\n\n return nameTemplate", "def KMLfldtofile(KMLfolder, filename):", "def buildkml(day):\n base = '<?xml version=\"1.0\" encoding=\"UTF-8\"?><kml xmlns=\"http://www.opengis.net/kml/2.2\"><Document><name>' + day.strftime('%Y-%m-%d Validation') + '</name>'\n end = '</Document></kml>'\n rootpath = day.strftime('data/%Y/%m/%d/')\n\n try:\n outlook1300data = fshelper.loaddata(rootpath + 'outlook_1300.json')\n outlook1630data = fshelper.loaddata(rootpath + 'outlook_1630.json')\n outlook2000data = fshelper.loaddata(rootpath + 'outlook_2000.json')\n except FileNotFoundError:\n # If there are any errors while loading data, don't build KML and exit early\n return\n\n maxkey = outlook1300data['probabilistic']['tornado']['max']\n o13 = outlook1300data['probabilistic']['tornado'][maxkey]\n\n maxkey = outlook1630data['probabilistic']['tornado']['max']\n o16 = outlook1630data['probabilistic']['tornado'][maxkey]\n\n maxkey = outlook2000data['probabilistic']['tornado']['max']\n o20 = outlook2000data['probabilistic']['tornado'][maxkey]\n\n outlook1300 = buildpolygon('1300z Outlook', o13)\n outlook1630 = buildpolygon('1630z Outlook', o16)\n outlook2000 = buildpolygon('2000z Outlook', o20)\n\n kml = ''.join([base, outlook1300, outlook1630, outlook2000, end])\n\n with open(rootpath + 'validation.kml', 'w') as textfile:\n textfile.write(kml)", "def render(data_dict, template=None):", "def make_template():\n raise NotImplementedError", "def _get_uml_template(proto_module: ModuleType) -> str:\n classes, relationships = _process_module(proto_module)\n uml_template = Template(\"\"\"\ndigraph \"Protobuf UML class diagram\" {\n fontname=\"Bitstream Vera Sans\"\n fontsize=10\n node[shape=record,style=filled,fillcolor=gray95,fontname=\"Bitstream Vera Sans\",fontsize=8]\n edge[fontname=\"Bitstream Vera Sans\",fontsize=8]\n\n$classes\n\n$relationships\n}\"\"\")\n return uml_template.substitute(\n classes=\"\\n\".join(classes),\n relationships=\"\\n\".join(relationships))", "def generate_template(sentence, sent_ent, kb_arr, domain_dict, node_list):\n dup_list = {\"名称\", \"地铁\", \"电话\", \"地址\", \"评分\", \"周边景点\", \"周边餐馆\", \"周边酒店\"}\n domain = \"\"\n sketch_response = []\n gold_sketch = []\n if sent_ent == []:\n sketch_response = sentence.split()\n else:\n for word in sentence.split():\n if word not in sent_ent:\n sketch_response.append(word)\n else:\n ent_type = None\n for kb_item in kb_arr:\n if word == kb_item[0]:\n try:\n for k in node_list:\n if k[0] == kb_item:\n domain = domain_dict[k[1]]\n except:\n print(k)\n print(node_list)\n assert False\n # if kb_item[1] is not None:\n ent_type = kb_item[1]\n break\n if ent_type in dup_list:\n ent_type = domain + '_' + ent_type\n assert ent_type is not None, print(str(word) + \" \" + str(domain))\n # sketch type就是带有@的实体属性种类标记\n sketch_response.append('@' + ent_type)\n gold_sketch.append('@' + ent_type)\n sketch_response = \" \".join(sketch_response)\n return sketch_response, gold_sketch", "def kml(self):\n kml = simplekml.Kml()\n # Main itinerary\n geom3d = self.geom_3d.transform(4326, clone=True) # KML uses WGS84\n line = kml.newlinestring(name=self.name,\n description=plain_text(self.description),\n coords=simplify_coords(geom3d.coords))\n line.style.linestyle.color = simplekml.Color.red # Red\n line.style.linestyle.width = 4 # pixels\n # Place marks\n for poi in self.published_pois:\n place = poi.geom_3d.transform(settings.API_SRID, clone=True)\n kml.newpoint(name=poi.name,\n description=plain_text(poi.description),\n coords=simplify_coords([place.coords]))\n return kml.kml()", "def format(self, rq, *args):\n return {\n 'title':'Okasha simple format templates',\n 'key1':'val1','key2':'val2','args':'/'.join(args)\n }", "def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d", "def constructKML(urls, course_name):\n\tdoc = GX.kml()\n\tfor url in urls:\n\t\t# print url #see if it prints urls \n\t\tname = url.split('/')[-1]\n\t\tpoint = points_dict.get(url)\n\t\tif point:\n\t\t\tlangt, longt = point.strip('POINT()').split(' ')\n\t\t\tpm = GX.Placemark(\n\t\t\t\tGX.name(name),\n\t\t\t\tGX.Point(\n\t\t\t\t\tGX.coordinates(langt + ',' + longt))\n\t\t\t\t)\n\t\t\tdoc.append(pm)\n\txml = lxml.etree.tostring(doc, pretty_print=True, xml_declaration=True)\n\twith open(OUTPUT_FOLDER+course_name+'.kml', 'w') as f:\n\t\tf.write(xml)", "def GetTemplateText(self, space):\r\n s = \"\"\r\n if space:\r\n s = \" \"\r\n \r\n invert = \"\"\r\n if self.Invert.Checked:\r\n invert = \"(!)\"\r\n \r\n return \"{\" + s + self.Prefix.Text + \"<\" + self.Template + \"(\" + self.TextBox.Text + \")\" + invert + \">\" + self.Postfix.Text + \"}\"" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
New() > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass Create a new object of the class itkTernaryAddImageFilterID2ID2ID2ID2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
[ "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF2IULL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass
def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args): return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)
[ "def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)", "def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)", "def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)", "def itkSLICImageFilterIF2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF2IULL2_Superclass_cast(obj)", "def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUS2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUS2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUC2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUC2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVISS2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVISS2IULL2_Superclass_cast(obj)", "def itkSLICImageFilterVIUS2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUS2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUS2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)", "def itkSLICImageFilterVIUC2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUC2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUC2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIUC2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIUC2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)", "def itkSLICImageFilterVIF2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)", "def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF2IULL2_Superclass_cast(obj)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
New() > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass Create a new object of the class itkTernaryAddImageFilterID3ID3ID3ID3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
[ "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF3IULL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass
def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args): return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)
[ "def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)", "def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)", "def itkSLICImageFilterIF3IULL3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)", "def itkSLICImageFilterVIUS3IULL3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUS3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUS3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUS3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUS3IULL3_Superclass_cast(obj)", "def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUC3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUC3IULL3_Superclass_cast(obj)", "def itkSLICImageFilterVISS3IULL3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVISS3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVISS3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)", "def itkSLICImageFilterVIUC3IULL3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUC3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUC3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIUC3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIUC3IULL3_Superclass_cast(obj)", "def itkSLICImageFilterVIF3IULL3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF3IULL3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterISS3IULL3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterISS3IULL3_Superclass_cast(obj)", "def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)", "def itkSLICImageFilterIF3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF3IUS3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF3IUS3_Superclass_cast(obj)", "def itkSLICImageFilterVISS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVISS3IUS3_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVISS3IUS3_Superclass_cast(obj)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
New() > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass Create a new object of the class itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
[ "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF2IULL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass
def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args): return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)
[ "def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)", "def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)", "def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)", "def itkSLICImageFilterIF2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF2IULL2_Superclass_cast(obj)", "def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF2IULL2_Superclass_cast(obj)", "def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)", "def itkProjectedLandweberDeconvolutionImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkProjectedLandweberDeconvolutionImageFilterIF2IF2_Superclass *\":\n return _itkProjectedLandweberDeconvolutionImageFilterPython.itkProjectedLandweberDeconvolutionImageFilterIF2IF2_Superclass_cast(obj)", "def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)", "def itkSLICImageFilterVIF2IULL2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF2IULL2_Superclass_cast(obj)", "def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF2IULL2_Superclass_cast(obj)", "def itkSLICImageFilterIF2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterIF2IUS2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterIF2IUS2_Superclass_cast(obj)", "def itkSLICImageFilterVIF2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIF2IUS2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIF2IUS2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUS2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUS2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSLICImageFilterVIUC2IULL2_Superclass *\":\n return _itkSLICImageFilterPython.itkSLICImageFilterVIUC2IULL2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkProjectedLandweberDeconvolutionImageFilterIF2IF2_Superclass *\":\n return _itkProjectedLandweberDeconvolutionImageFilterPython.itkProjectedLandweberDeconvolutionImageFilterIF2IF2_Superclass_cast(obj)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
New() > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass Create a new object of the class itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
[ "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarChanAndVeseSparseLevelSetImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF3IULL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSLICImageFilterIF3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkAddImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }